diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 3002819235a13..351b0b9e97118 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -53,7 +53,7 @@ forbiddenApisMain.enabled = false dependencyLicenses.enabled = false dependenciesInfo.enabled = false -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreViolations ( // these classes intentionally use JDK internal API (and this is ok since the project is maintained by Oracle employees) 'org.openjdk.jmh.profile.AbstractHotspotProfiler', 'org.openjdk.jmh.profile.HotspotThreadProfiler', @@ -62,4 +62,4 @@ thirdPartyAudit.excludes = [ 'org.openjdk.jmh.profile.HotspotMemoryProfiler', 'org.openjdk.jmh.profile.HotspotRuntimeProfiler', 'org.openjdk.jmh.util.Utils' -] +) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java new file mode 100644 index 0000000000000..b30b3ada0ab64 --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.benchmark.time; + +import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.time.DateFormatter; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.time.temporal.TemporalAccessor; +import java.util.concurrent.TimeUnit; + +@Fork(3) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +@SuppressWarnings("unused") //invoked by benchmarking framework +public class DateFormatterBenchmark { + + private final DateFormatter javaFormatter = DateFormatter.forPattern("8year_month_day||ordinal_date||epoch_millis"); + private final DateFormatter jodaFormatter = Joda.forPattern("year_month_day||ordinal_date||epoch_millis"); + + @Benchmark + public TemporalAccessor parseJavaDate() { + return javaFormatter.parse("1234567890"); + } + + @Benchmark + public TemporalAccessor parseJodaDate() { + return jodaFormatter.parse("1234567890"); + } +} + diff --git a/build.gradle b/build.gradle index 7e067b8997805..439bd32727d18 100644 --- a/build.gradle +++ b/build.gradle @@ -24,13 +24,9 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionCollection import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.plugin.PluginBuildPlugin -import org.gradle.api.tasks.options.Option -import org.gradle.util.GradleVersion -import org.gradle.util.DistributionLocator +import org.gradle.util.GradleVersion +import org.gradle.util.DistributionLocator import org.gradle.plugins.ide.eclipse.model.SourceFolder -import com.carrotsearch.gradle.junit4.RandomizedTestingTask - -import java.util.function.Predicate plugins { id 'com.gradle.build-scan' version '2.0.2' @@ -433,7 +429,7 @@ tasks.idea.doLast { ideaMarker.setText('', 'UTF-8') } if (System.getProperty('idea.active') != null && ideaMarker.exists() == false) { - throw new GradleException('You must run gradle idea from the root of elasticsearch before importing into IntelliJ') + throw new GradleException('You must run `./gradlew idea` from the root of elasticsearch before importing into IntelliJ') } // eclipse configuration diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index cd6c97078b8ab..b9ff1fd485e05 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -import java.nio.file.Files import org.gradle.util.GradleVersion plugins { @@ -224,6 +223,18 @@ if (project != rootProject) { integTestClass = 'org.elasticsearch.gradle.test.GradleIntegrationTestCase' } + testingConventions { + naming.clear() + naming { + Tests { + baseClass 'org.elasticsearch.gradle.test.GradleUnitTestCase' + } + IT { + baseClass 'org.elasticsearch.gradle.test.GradleIntegrationTestCase' + } + } + } + /* * We alread configure publication and we don't need or want this one that * comes from the java-gradle-plugin. diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 28d18e9b876f5..306ac4a05e87a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -78,6 +78,19 @@ public class PluginBuildPlugin extends BuildPlugin { skipIntegTestInDisguise = true } } + project.testingConventions { + naming.clear() + naming { + Tests { + baseClass 'org.apache.lucene.util.LuceneTestCase' + } + IT { + baseClass 'org.elasticsearch.test.ESIntegTestCase' + baseClass 'org.elasticsearch.test.rest.ESRestTestCase' + baseClass 'org.elasticsearch.test.ESSingleNodeTestCase' + } + } + } createIntegTestTask(project) createBundleTask(project) project.configurations.getByName('default').extendsFrom(project.configurations.getByName('runtime')) @@ -101,7 +114,7 @@ public class PluginBuildPlugin extends BuildPlugin { generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-client-${project.versions.elasticsearch}.pom" } } else { - project.plugins.withType(MavenPublishPlugin).whenPluginAdded { + if (project.plugins.hasPlugin(MavenPublishPlugin)) { project.publishing.publications.nebula(MavenPublication).artifactId( project.pluginProperties.extension.name ) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 9fdb1b41ec0d2..28c86a28f713c 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -91,7 +91,17 @@ class PrecommitTasks { } static Task configureTestingConventions(Project project) { - project.getTasks().create("testingConventions", TestingConventionsTasks.class) + TestingConventionsTasks task = project.getTasks().create("testingConventions", TestingConventionsTasks.class) + task.naming { + Tests { + baseClass "org.apache.lucene.util.LuceneTestCase" + } + IT { + baseClass "org.elasticsearch.test.ESIntegTestCase" + baseClass 'org.elasticsearch.test.rest.ESRestTestCase' + } + } + return task } private static Task configureJarHell(Project project) { @@ -123,8 +133,16 @@ class PrecommitTasks { project.tasks.withType(CheckForbiddenApis) { dependsOn(buildResources) targetCompatibility = project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 ? - project.runtimeJavaVersion.getMajorVersion() : - project.runtimeJavaVersion + project.runtimeJavaVersion.getMajorVersion() : project.runtimeJavaVersion + if (project.runtimeJavaVersion > JavaVersion.VERSION_11) { + doLast { + project.logger.info( + "Forbidden APIs does not support java version past 11. Will use the signatures from 11 for ", + project.runtimeJavaVersion + ) + } + targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() + } bundledSignatures = [ "jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out" ] diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionRule.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionRule.java new file mode 100644 index 0000000000000..5fec05d945ed5 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionRule.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.precommit; + +import java.io.Serializable; +import java.util.Collection; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; +import java.util.regex.Pattern; + +/** + * Represent rules for tests enforced by the @{link {@link TestingConventionsTasks}} + * + * Rules are identified by name, tests must have this name as a suffix and implement one of the base classes + * and be part of all the specified tasks. + */ +public class TestingConventionRule implements Serializable { + + private final String suffix; + + private Set baseClasses = new HashSet<>(); + + private Set taskNames = new HashSet<>(); + + public TestingConventionRule(String suffix) { + this.suffix = suffix; + } + + public String getSuffix() { + return suffix; + } + + /** + * Alias for @{link getSuffix} as Gradle requires a name property + * + */ + public String getName() { + return suffix; + } + + public void baseClass(String clazz) { + baseClasses.add(clazz); + } + + public void setBaseClasses(Collection baseClasses) { + this.baseClasses.clear(); + this.baseClasses.addAll(baseClasses); + } + + public void taskName(Pattern expression) { + taskNames.add(expression); + } + public void taskName(String expression) { + taskNames.add(Pattern.compile(expression)); + } + + public void setTaskNames(Collection expressions) { + taskNames.clear(); + taskNames.addAll(expressions); + } + + public Set getBaseClasses() { + return baseClasses; + } + + public Set getTaskNames() { + return taskNames; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestingConventionRule that = (TestingConventionRule) o; + return Objects.equals(suffix, that.suffix); + } + + @Override + public int hashCode() { + return Objects.hash(suffix); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java index 105deabfd40fd..0f207ad3fe1af 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java @@ -18,8 +18,10 @@ */ package org.elasticsearch.gradle.precommit; +import groovy.lang.Closure; import org.elasticsearch.gradle.tool.Boilerplate; import org.gradle.api.DefaultTask; +import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Task; import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileTree; @@ -54,50 +56,37 @@ public class TestingConventionsTasks extends DefaultTask { - private static final String TEST_CLASS_SUFIX = "Tests"; - private static final String INTEG_TEST_CLASS_SUFIX = "IT"; private static final String TEST_METHOD_PREFIX = "test"; - /** - * Are there tests to execute ? Accounts for @Ignore and @AwaitsFix - */ - private Boolean activeTestsExists; - private Map testClassNames; + private final NamedDomainObjectContainer naming; + public TestingConventionsTasks() { setDescription("Tests various testing conventions"); // Run only after everything is compiled Boilerplate.getJavaSourceSets(getProject()).all(sourceSet -> dependsOn(sourceSet.getClassesTaskName())); + naming = getProject().container(TestingConventionRule.class); } - + @Input - public Map> classFilesPerTask(FileTree testClassFiles) { + public Map> classFilesPerEnabledTask(FileTree testClassFiles) { Map> collector = new HashMap<>(); + // RandomizedTestingTask collector.putAll( - Stream.concat( - getProject().getTasks().withType(getRandomizedTestingTask()).stream(), - // Look at sub-projects too. As sometimes tests are implemented in parent but ran in sub-projects against - // different configurations - getProject().getSubprojects().stream().flatMap(subproject -> - subproject.getTasks().withType(getRandomizedTestingTask()).stream() - ) - ) + getProject().getTasks().withType(getRandomizedTestingTask()).stream() .filter(Task::getEnabled) .collect(Collectors.toMap( Task::getPath, task -> testClassFiles.matching(getRandomizedTestingPatternSet(task)).getFiles() - )) + ) + ) ); + // Gradle Test collector.putAll( - Stream.concat( - getProject().getTasks().withType(Test.class).stream(), - getProject().getSubprojects().stream().flatMap(subproject -> - subproject.getTasks().withType(Test.class).stream() - ) - ) + getProject().getTasks().withType(Test.class).stream() .filter(Task::getEnabled) .collect(Collectors.toMap( Task::getPath, @@ -119,14 +108,22 @@ public Map getTestClassNames() { return testClassNames; } + @Input + public NamedDomainObjectContainer getNaming() { + return naming; + } + @OutputFile public File getSuccessMarker() { return new File(getProject().getBuildDir(), "markers/" + getName()); } + public void naming(Closure action) { + naming.configure(action); + } + @TaskAction public void doCheck() throws IOException { - activeTestsExists = false; final String problems; try (URLClassLoader isolatedClassLoader = new URLClassLoader( @@ -134,62 +131,83 @@ public void doCheck() throws IOException { )) { Predicate> isStaticClass = clazz -> Modifier.isStatic(clazz.getModifiers()); Predicate> isPublicClass = clazz -> Modifier.isPublic(clazz.getModifiers()); - Predicate> implementsNamingConvention = clazz -> - clazz.getName().endsWith(TEST_CLASS_SUFIX) || - clazz.getName().endsWith(INTEG_TEST_CLASS_SUFIX); + Predicate> isAbstractClass = clazz -> Modifier.isAbstract(clazz.getModifiers()); - Map> classes = getTestClassNames().entrySet().stream() + final Map> classes = getTestClassNames().entrySet().stream() .collect(Collectors.toMap( Map.Entry::getValue, entry -> loadClassWithoutInitializing(entry.getKey(), isolatedClassLoader)) ); - FileTree allTestClassFiles = getProject().files( + final FileTree allTestClassFiles = getProject().files( classes.values().stream() .filter(isStaticClass.negate()) .filter(isPublicClass) - .filter(implementsNamingConvention) + .filter((Predicate>) this::implementsNamingConvention) .map(clazz -> testClassNames.get(clazz.getName())) .collect(Collectors.toList()) ).getAsFileTree(); - final Map> classFilesPerTask = classFilesPerTask(allTestClassFiles); + final Map> classFilesPerTask = classFilesPerEnabledTask(allTestClassFiles); - Map>> testClassesPerTask = classFilesPerTask.entrySet().stream() + final Map>> testClassesPerTask = classFilesPerTask.entrySet().stream() .collect( Collectors.toMap( Map.Entry::getKey, entry -> entry.getValue().stream() .map(classes::get) - .filter(implementsNamingConvention) + .filter(this::implementsNamingConvention) .collect(Collectors.toSet()) ) ); + final Map>> suffixToBaseClass; + if (classes.isEmpty()) { + // Don't load base classes if we don't have any tests. + // This allows defaults to be configured for projects that don't have any tests + // + suffixToBaseClass = Collections.emptyMap(); + } else { + suffixToBaseClass = naming.stream() + .collect( + Collectors.toMap( + TestingConventionRule::getSuffix, + rule -> rule.getBaseClasses().stream() + .map(each -> loadClassWithoutInitializing(each, isolatedClassLoader)) + .collect(Collectors.toSet()) + )); + } + problems = collectProblems( checkNoneExists( "Test classes implemented by inner classes will not run", classes.values().stream() .filter(isStaticClass) - .filter(implementsNamingConvention.or(this::seemsLikeATest)) + .filter(isPublicClass) + .filter(((Predicate>) this::implementsNamingConvention).or(this::seemsLikeATest)) ), checkNoneExists( "Seem like test classes but don't match naming convention", classes.values().stream() .filter(isStaticClass.negate()) .filter(isPublicClass) - .filter(this::seemsLikeATest) - .filter(implementsNamingConvention.negate()) + .filter(isAbstractClass.negate()) + .filter(this::seemsLikeATest) // TODO when base classes are set, check for classes that extend them + .filter(((Predicate>) this::implementsNamingConvention).negate()) ), + // TODO: check for non public classes that seem like tests + // TODO: check for abstract classes that implement the naming conventions + // No empty enabled tasks collectProblems( testClassesPerTask.entrySet().stream() - .map( entry -> + .map(entry -> checkAtLeastOneExists( - "test class in " + entry.getKey(), + "test class included in task " + entry.getKey(), entry.getValue().stream() ) ) - .collect(Collectors.joining()) + .sorted() + .collect(Collectors.joining("\n")) ), checkNoneExists( "Test classes are not included in any enabled task (" + @@ -201,25 +219,43 @@ public void doCheck() throws IOException { .anyMatch(fileSet -> fileSet.contains(testFile)) == false ) .map(classes::get) + ), + collectProblems( + suffixToBaseClass.entrySet().stream() + .filter(entry -> entry.getValue().isEmpty() == false) + .map(entry -> { + return checkNoneExists( + "Tests classes with suffix `" + entry.getKey() + "` should extend " + + entry.getValue().stream().map(Class::getName).collect(Collectors.joining(" or ")) + + " but the following classes do not", + classes.values().stream() + .filter(clazz -> clazz.getName().endsWith(entry.getKey())) + .filter(clazz -> entry.getValue().stream() + .anyMatch(test -> test.isAssignableFrom(clazz)) == false) + ); + }).sorted() + .collect(Collectors.joining("\n")) ) + // TODO: check that the testing tasks are included in the right task based on the name ( from the rule ) + // TODO: check to make sure that the main source set doesn't have classes that match + // the naming convention (just the names, don't load classes) ); } if (problems.isEmpty()) { - getLogger().error(problems); - throw new IllegalStateException("Testing conventions are not honored"); - } else { getSuccessMarker().getParentFile().mkdirs(); Files.write(getSuccessMarker().toPath(), new byte[]{}, StandardOpenOption.CREATE); + } else { + getLogger().error(problems); + throw new IllegalStateException("Testing conventions are not honored"); } } private String collectProblems(String... problems) { return Stream.of(problems) .map(String::trim) - .filter(String::isEmpty) - .map(each -> each + "\n") - .collect(Collectors.joining()); + .filter(s -> s.isEmpty() == false) + .collect(Collectors.joining("\n")); } @SuppressWarnings("unchecked") @@ -251,10 +287,11 @@ private Class getRandomizedTestingTask() { private String checkNoneExists(String message, Stream> stream) { String problem = stream .map(each -> " * " + each.getName()) + .sorted() .collect(Collectors.joining("\n")); if (problem.isEmpty() == false) { return message + ":\n" + problem; - } else{ + } else { return ""; } } @@ -263,28 +300,33 @@ private String checkAtLeastOneExists(String message, Stream> if (stream.findAny().isPresent()) { return ""; } else { - return "Expected at least one " + message + ", but found none.\n"; + return "Expected at least one " + message + ", but found none."; } } private boolean seemsLikeATest(Class clazz) { try { ClassLoader classLoader = clazz.getClassLoader(); - Class junitTest; - try { - junitTest = classLoader.loadClass("junit.framework.Test"); - } catch (ClassNotFoundException e) { - throw new IllegalStateException("Could not load junit.framework.Test. It's expected that this class is " + - "available on the tests classpath"); - } + + Class junitTest = loadClassWithoutInitializing("org.junit.Assert", classLoader); if (junitTest.isAssignableFrom(clazz)) { - getLogger().info("{} is a test because it extends junit.framework.Test", clazz.getName()); + getLogger().debug("{} is a test because it extends {}", clazz.getName(), junitTest.getName()); return true; } + + Class junitAnnotation = loadClassWithoutInitializing("org.junit.Test", classLoader); for (Method method : clazz.getMethods()) { - if (matchesTestMethodNamingConvention(clazz, method)) return true; - if (isAnnotated(clazz, method, junitTest)) return true; + if (matchesTestMethodNamingConvention(method)) { + getLogger().debug("{} is a test because it has method named '{}'", clazz.getName(), method.getName()); + return true; + } + if (isAnnotated(method, junitAnnotation)) { + getLogger().debug("{} is a test because it has method '{}' annotated with '{}'", + clazz.getName(), method.getName(), junitAnnotation.getName()); + return true; + } } + return false; } catch (NoClassDefFoundError e) { // Include the message to get more info to get more a more useful message when running Gradle without -s @@ -294,23 +336,25 @@ private boolean seemsLikeATest(Class clazz) { } } - private boolean matchesTestMethodNamingConvention(Class clazz, Method method) { - if (method.getName().startsWith(TEST_METHOD_PREFIX) && - Modifier.isStatic(method.getModifiers()) == false && - method.getReturnType().equals(Void.class) - ) { - getLogger().info("{} is a test because it has method: {}", clazz.getName(), method.getName()); + private boolean implementsNamingConvention(Class clazz) { + if (naming.stream() + .map(TestingConventionRule::getSuffix) + .anyMatch(suffix -> clazz.getName().endsWith(suffix))) { + getLogger().debug("{} is a test because it matches the naming convention", clazz.getName()); return true; } return false; } - private boolean isAnnotated(Class clazz, Method method, Class annotation) { + private boolean matchesTestMethodNamingConvention(Method method) { + return method.getName().startsWith(TEST_METHOD_PREFIX) && + Modifier.isStatic(method.getModifiers()) == false + ; + } + + private boolean isAnnotated(Method method, Class annotation) { for (Annotation presentAnnotation : method.getAnnotations()) { if (annotation.isAssignableFrom(presentAnnotation.getClass())) { - getLogger().info("{} is a test because {} is annotated with junit.framework.Test", - clazz.getName(), method.getName() - ); return true; } } @@ -380,14 +424,14 @@ public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOExce private Class loadClassWithoutInitializing(String name, ClassLoader isolatedClassLoader) { try { - return Class.forName(name, + return Class.forName( + name, // Don't initialize the class to save time. Not needed for this test and this doesn't share a VM with any other tests. false, isolatedClassLoader ); } catch (ClassNotFoundException e) { - // Will not get here as the exception will be loaded by isolatedClassLoader - throw new RuntimeException("Failed to load class " + name, e); + throw new RuntimeException("Failed to load class " + name + ". Incorrect test runtime classpath?", e); } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java index 90b10c95bc2c2..13cfb8ea61701 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java @@ -51,6 +51,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.Stream; @CacheableTask public class ThirdPartyAuditTask extends DefaultTask { @@ -63,10 +64,11 @@ public class ThirdPartyAuditTask extends DefaultTask { "\\s\\sin ([a-zA-Z0-9$.]+) \\(.*\\)" ); - /** - * patterns for classes to exclude, because we understand their issues - */ - private Set excludes = new TreeSet<>(); + private Set missingClassExcludes = new TreeSet<>(); + + private Set violationsExcludes = new TreeSet<>(); + + private Set jdkJarHellExcludes = new TreeSet<>(); private File signatureFile; @@ -115,19 +117,40 @@ public File getJarExpandDir() { ); } - public void setExcludes(String... classes) { - excludes.clear(); + public void ignoreMissingClasses(String... classesOrPackages) { + if (classesOrPackages.length == 0) { + missingClassExcludes = null; + return; + } + if (missingClassExcludes == null) { + missingClassExcludes = new TreeSet<>(); + } + for (String each : classesOrPackages) { + missingClassExcludes.add(each); + } + } + + public void ignoreViolations(String... violatingClasses) { + for (String each : violatingClasses) { + violationsExcludes.add(each); + } + } + + public void ignoreJarHellWithJDK(String ...classes) { for (String each : classes) { - if (each.indexOf('*') != -1) { - throw new IllegalArgumentException("illegal third party audit exclusion: '" + each + "', wildcards are not permitted!"); - } - excludes.add(each); + jdkJarHellExcludes.add(each); } } @Input - public Set getExcludes() { - return Collections.unmodifiableSet(excludes); + public Set getJdkJarHellExcludes() { + return jdkJarHellExcludes; + } + + @Input + @Optional + public Set getMissingClassExcludes() { + return missingClassExcludes; } @InputFiles @@ -172,14 +195,55 @@ public void runThirdPartyAudit() throws IOException { Set jdkJarHellClasses = runJdkJarHellCheck(); - try { - assertNoPointlessExclusions(missingClasses, violationsClasses, jdkJarHellClasses); - assertNoMissingAndViolations(missingClasses, violationsClasses); - assertNoJarHell(jdkJarHellClasses); - } catch (IllegalStateException e) { - getLogger().error(forbiddenApisOutput); - throw e; + if (missingClassExcludes != null) { + long bogousExcludesCount = Stream.concat(missingClassExcludes.stream(), violationsExcludes.stream()) + .filter(each -> missingClasses.contains(each) == false) + .filter(each -> violationsClasses.contains(each) == false) + .count(); + if (bogousExcludesCount != 0 && bogousExcludesCount == missingClassExcludes.size() + violationsExcludes.size()) { + logForbiddenAPIsOutput(forbiddenApisOutput); + throw new IllegalStateException( + "All excluded classes seem to have no issues. " + + "This is sometimes an indication that the check silently failed" + ); + } + assertNoPointlessExclusions("are not missing", missingClassExcludes, missingClasses); + missingClasses.removeAll(missingClassExcludes); + } + assertNoPointlessExclusions("have no violations", violationsExcludes, violationsClasses); + assertNoPointlessExclusions("do not generate jar hell with the JDK", jdkJarHellExcludes, jdkJarHellClasses); + + if (missingClassExcludes == null && (missingClasses.isEmpty() == false)) { + getLogger().info( + "Found missing classes, but task is configured to ignore all of them:\n {}", + formatClassList(missingClasses) + ); + missingClasses.clear(); + } + + violationsClasses.removeAll(violationsExcludes); + if (missingClasses.isEmpty() && violationsClasses.isEmpty()) { + getLogger().info("Third party audit passed successfully"); + } else { + logForbiddenAPIsOutput(forbiddenApisOutput); + if (missingClasses.isEmpty() == false) { + getLogger().error("Missing classes:\n{}", formatClassList(missingClasses)); + } + if(violationsClasses.isEmpty() == false) { + getLogger().error("Classes with violations:\n{}", formatClassList(violationsClasses)); + } + throw new IllegalStateException("Audit of third party dependencies failed"); } + + assertNoJarHell(jdkJarHellClasses); + } + + private void logForbiddenAPIsOutput(String forbiddenApisOutput) { + getLogger().error("Forbidden APIs output:\n{}==end of forbidden APIs==", forbiddenApisOutput); + } + + private void throwNotConfiguredCorrectlyException() { + throw new IllegalArgumentException("Audit of third party dependencies is not configured correctly"); } private void extractJars(Set jars) { @@ -221,7 +285,7 @@ private void extractJars(Set jars) { } private void assertNoJarHell(Set jdkJarHellClasses) { - jdkJarHellClasses.removeAll(excludes); + jdkJarHellClasses.removeAll(jdkJarHellExcludes); if (jdkJarHellClasses.isEmpty() == false) { throw new IllegalStateException( "Audit of third party dependencies failed:\n" + @@ -230,33 +294,22 @@ private void assertNoJarHell(Set jdkJarHellClasses) { } } - private void assertNoMissingAndViolations(Set missingClasses, Set violationsClasses) { - missingClasses.removeAll(excludes); - violationsClasses.removeAll(excludes); - String missingText = formatClassList(missingClasses); - String violationsText = formatClassList(violationsClasses); - if (missingText.isEmpty() && violationsText.isEmpty()) { - getLogger().info("Third party audit passed successfully"); - } else { - throw new IllegalStateException( - "Audit of third party dependencies failed:\n" + - (missingText.isEmpty() ? "" : "Missing classes:\n" + missingText) + - (violationsText.isEmpty() ? "" : "Classes with violations:\n" + violationsText) - ); + private void assertNoPointlessExclusions(String specifics, Set excludes, Set problematic) { + String notMissing = excludes.stream() + .filter(each -> problematic.contains(each) == false) + .map(each -> " * " + each) + .collect(Collectors.joining("\n")); + if (notMissing.isEmpty() == false) { + getLogger().error("Unnecessary exclusions, following classes " + specifics + ":\n {}", notMissing); + throw new IllegalStateException("Third party audit task is not configured correctly"); } } - private void assertNoPointlessExclusions(Set missingClasses, Set violationsClasses, Set jdkJarHellClasses) { - // keep our whitelist up to date - Set bogusExclusions = new TreeSet<>(excludes); - bogusExclusions.removeAll(missingClasses); - bogusExclusions.removeAll(jdkJarHellClasses); - bogusExclusions.removeAll(violationsClasses); - if (bogusExclusions.isEmpty() == false) { - throw new IllegalStateException( - "Invalid exclusions, nothing is wrong with these classes: " + formatClassList(bogusExclusions) - ); - } + private String formatClassList(Set classList) { + return classList.stream() + .map(name -> " * " + name) + .sorted() + .collect(Collectors.joining("\n")); } private String runForbiddenAPIsCli() throws IOException { @@ -289,12 +342,6 @@ private String runForbiddenAPIsCli() throws IOException { return forbiddenApisOutput; } - private String formatClassList(Set classList) { - return classList.stream() - .map(name -> " * " + name) - .collect(Collectors.joining("\n")); - } - private Set runJdkJarHellCheck() throws IOException { ByteArrayOutputStream standardOut = new ByteArrayOutputStream(); ExecResult execResult = getProject().javaexec(spec -> { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 1fe8bec1902f6..8926f74ca39a7 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -311,7 +311,7 @@ private static void configureCleanupHooks(Project project) { shutdownExecutorService(); }); // When the Daemon is not used, or runs into issues, rely on a shutdown hook - // When the daemon is used, but does not work correctly and eventually dies off (e.x. due to non interruptable + // When the daemon is used, but does not work correctly and eventually dies off (e.x. due to non interruptible // thread in the build) process will be stopped eventually when the daemon dies. Runtime.getRuntime().addShutdownHook(new Thread(TestClustersPlugin::shutDownAllClusters)); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index 73b3baf66ddeb..c13bcc02cbe89 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -21,6 +21,7 @@ import com.avast.gradle.dockercompose.ComposeExtension; import com.avast.gradle.dockercompose.DockerComposePlugin; import org.elasticsearch.gradle.precommit.JarHellTask; +import org.elasticsearch.gradle.precommit.TestingConventionsTasks; import org.elasticsearch.gradle.precommit.ThirdPartyAuditTask; import org.gradle.api.DefaultTask; import org.gradle.api.Plugin; @@ -100,6 +101,10 @@ public void apply(Project project) { tasks.withType(getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"), task -> task.setEnabled(false) ); + // conventions are not honored when the tasks are disabled + tasks.withType(TestingConventionsTasks.class, task -> + task.setEnabled(false) + ); return; } tasks.withType(getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"), task -> diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index b74f2b71e39f8..370a6bbcac901 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -21,6 +21,9 @@ configuration of classes that aren't in packages. --> + + + - + + diff --git a/buildSrc/src/main/resources/forbidden/es-server-signatures.txt b/buildSrc/src/main/resources/forbidden/es-server-signatures.txt index 01c7d18907346..f884e7c1a48e0 100644 --- a/buildSrc/src/main/resources/forbidden/es-server-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-server-signatures.txt @@ -146,16 +146,4 @@ org.apache.logging.log4j.Logger#warn(java.lang.Object, java.lang.Throwable) org.apache.logging.log4j.Logger#error(java.lang.Object) org.apache.logging.log4j.Logger#error(java.lang.Object, java.lang.Throwable) org.apache.logging.log4j.Logger#fatal(java.lang.Object) -org.apache.logging.log4j.Logger#fatal(java.lang.Object, java.lang.Throwable) - -# Remove once Lucene 7.7 is integrated -@defaultMessage Use org.apache.lucene.document.XLatLonShape classes instead -org.apache.lucene.document.LatLonShape -org.apache.lucene.document.LatLonShapeBoundingBoxQuery -org.apache.lucene.document.LatLonShapeLineQuery -org.apache.lucene.document.LatLonShapePolygonQuery -org.apache.lucene.document.LatLonShapeQuery - -org.apache.lucene.geo.Rectangle2D @ use @org.apache.lucene.geo.XRectangle2D instead - -org.apache.lucene.geo.Tessellator @ use @org.apache.lucene.geo.XTessellator instead \ No newline at end of file +org.apache.logging.log4j.Logger#fatal(java.lang.Object, java.lang.Throwable) \ No newline at end of file diff --git a/buildSrc/src/main/resources/minimumGradleVersion b/buildSrc/src/main/resources/minimumGradleVersion index 6e6366051638f..83364075e010b 100644 --- a/buildSrc/src/main/resources/minimumGradleVersion +++ b/buildSrc/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -5.0 \ No newline at end of file +5.1 \ No newline at end of file diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/TestingConventionsTasksIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/TestingConventionsTasksIT.java new file mode 100644 index 0000000000000..dbe06287782f7 --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/TestingConventionsTasksIT.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.precommit; + +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; +import org.gradle.testkit.runner.BuildResult; +import org.gradle.testkit.runner.GradleRunner; +import org.junit.Before; + +public class TestingConventionsTasksIT extends GradleIntegrationTestCase { + + @Before + public void setUp() { + } + + public void testInnerClasses() { + GradleRunner runner = getGradleRunner("testingConventions") + .withArguments("clean", ":no_tests_in_inner_classes:testingConventions", "-i", "-s"); + BuildResult result = runner.buildAndFail(); + assertOutputContains(result.getOutput(), + "Test classes implemented by inner classes will not run:", + " * org.elasticsearch.gradle.testkit.NastyInnerClasses$LooksLikeATestWithoutNamingConvention1", + " * org.elasticsearch.gradle.testkit.NastyInnerClasses$LooksLikeATestWithoutNamingConvention2", + " * org.elasticsearch.gradle.testkit.NastyInnerClasses$LooksLikeATestWithoutNamingConvention3", + " * org.elasticsearch.gradle.testkit.NastyInnerClasses$NamingConventionIT", + " * org.elasticsearch.gradle.testkit.NastyInnerClasses$NamingConventionTests" + ); + } + + public void testNamingConvention() { + GradleRunner runner = getGradleRunner("testingConventions") + .withArguments("clean", ":incorrect_naming_conventions:testingConventions", "-i", "-s"); + BuildResult result = runner.buildAndFail(); + assertOutputContains(result.getOutput(), + "Seem like test classes but don't match naming convention:", + " * org.elasticsearch.gradle.testkit.LooksLikeATestWithoutNamingConvention1", + " * org.elasticsearch.gradle.testkit.LooksLikeATestWithoutNamingConvention2", + " * org.elasticsearch.gradle.testkit.LooksLikeATestWithoutNamingConvention3" + ); + assertOutputDoesNotContain(result.getOutput(), "LooksLikeTestsButAbstract"); + } + + public void testNoEmptyTasks() { + GradleRunner runner = getGradleRunner("testingConventions") + .withArguments("clean", ":empty_test_task:testingConventions", "-i", "-s"); + BuildResult result = runner.buildAndFail(); + assertOutputContains(result.getOutput(), + "Expected at least one test class included in task :empty_test_task:emptyTest, but found none.", + "Expected at least one test class included in task :empty_test_task:emptyTestRandomized, but found none." + ); + } + + public void testAllTestTasksIncluded() { + GradleRunner runner = getGradleRunner("testingConventions") + .withArguments("clean", ":all_classes_in_tasks:testingConventions", "-i", "-s"); + BuildResult result = runner.buildAndFail(); + assertOutputContains(result.getOutput(), + "Test classes are not included in any enabled task (:all_classes_in_tasks:emptyTestRandomized):", + " * org.elasticsearch.gradle.testkit.NamingConventionIT", + " * org.elasticsearch.gradle.testkit.NamingConventionTests" + ); + } + + public void testTaskNotImplementBaseClass() { + GradleRunner runner = getGradleRunner("testingConventions") + .withArguments("clean", ":not_implementing_base:testingConventions", "-i", "-s"); + BuildResult result = runner.buildAndFail(); + assertOutputContains(result.getOutput(), + "Tests classes with suffix `IT` should extend org.elasticsearch.gradle.testkit.Integration but the following classes do not:", + " * org.elasticsearch.gradle.testkit.NamingConventionIT", + " * org.elasticsearch.gradle.testkit.NamingConventionMissmatchIT", + "Tests classes with suffix `Tests` should extend org.elasticsearch.gradle.testkit.Unit but the following classes do not:", + " * org.elasticsearch.gradle.testkit.NamingConventionMissmatchTests", + " * org.elasticsearch.gradle.testkit.NamingConventionTests" + ); + } + + public void testValidSetupWithoutBaseClass() { + GradleRunner runner = getGradleRunner("testingConventions") + .withArguments("clean", ":valid_setup_no_base:testingConventions", "-i", "-s"); + BuildResult result = runner.build(); + assertTaskSuccessful(result, ":valid_setup_no_base:testingConventions"); + } + + public void testValidSetupWithBaseClass() { + GradleRunner runner = getGradleRunner("testingConventions") + .withArguments("clean", ":valid_setup_with_base:testingConventions", "-i", "-s"); + BuildResult result = runner.build(); + assertTaskSuccessful(result, ":valid_setup_with_base:testingConventions"); + } + +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTaskIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTaskIT.java index 3d8796900ed83..b14b3538fc276 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTaskIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTaskIT.java @@ -81,9 +81,9 @@ public void testViolationFoundAndCompileOnlyIgnored() { assertTaskFailed(result, ":absurd"); assertOutputContains(result.getOutput(), - "> Audit of third party dependencies failed:", - " Classes with violations:", - " * TestingIO" + "Classes with violations:", + " * TestingIO", + "> Audit of third party dependencies failed" ); assertOutputDoesNotContain(result.getOutput(),"Missing classes:"); } @@ -98,9 +98,9 @@ public void testClassNotFoundAndCompileOnlyIgnored() { assertTaskFailed(result, ":absurd"); assertOutputContains(result.getOutput(), - "> Audit of third party dependencies failed:", - " Missing classes:", - " * org.apache.logging.log4j.LogManager" + "Missing classes:", + " * org.apache.logging.log4j.LogManager", + "> Audit of third party dependencies failed" ); assertOutputDoesNotContain(result.getOutput(), "Classes with violations:"); } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java index ab29a33a62eda..3e1d0b176b011 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java @@ -43,7 +43,7 @@ protected void assertOutputContains(String output, String... lines) { if (index.equals(index.stream().sorted().collect(Collectors.toList())) == false) { fail("Expected the following lines to appear in this order:\n" + Stream.of(lines).map(line -> " - `" + line + "`").collect(Collectors.joining("\n")) + - "\nBut they did not. Output is:\n\n```" + output + "\n```\n" + "\nTBut the order was different. Output is:\n\n```" + output + "\n```\n" ); } } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java index ee366ac7b7c65..514f75eaa86e9 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java @@ -21,9 +21,11 @@ import org.elasticsearch.gradle.test.GradleIntegrationTestCase; import org.gradle.testkit.runner.BuildResult; import org.gradle.testkit.runner.GradleRunner; +import org.junit.Ignore; import java.util.Arrays; +@Ignore // https://github.com/elastic/elasticsearch/issues/37218 public class TestClustersPluginIT extends GradleIntegrationTestCase { public void testListClusters() { diff --git a/buildSrc/src/testKit/testingConventions/all_classes_in_tasks/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java b/buildSrc/src/testKit/testingConventions/all_classes_in_tasks/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java new file mode 100644 index 0000000000000..48a4f7adfd99e --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/all_classes_in_tasks/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionIT { + +} \ No newline at end of file diff --git a/buildSrc/src/testKit/testingConventions/all_classes_in_tasks/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java b/buildSrc/src/testKit/testingConventions/all_classes_in_tasks/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java new file mode 100644 index 0000000000000..6afb89ddf56b0 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/all_classes_in_tasks/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionTests { + +} diff --git a/buildSrc/src/testKit/testingConventions/build.gradle b/buildSrc/src/testKit/testingConventions/build.gradle new file mode 100644 index 0000000000000..d1a21a1ead0e7 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/build.gradle @@ -0,0 +1,86 @@ +plugins { + id 'elasticsearch.build' apply false +} + +allprojects { + apply plugin: 'java' + apply plugin: 'elasticsearch.build' + + repositories { + jcenter() + } + dependencies { + testCompile "junit:junit:4.12" + } + + ext.licenseFile = file("$buildDir/dummy/license") + ext.noticeFile = file("$buildDir/dummy/notice") + + testingConventions.naming { + // Reset default to no baseClass checks + Tests { + baseClasses = [] + } + IT { + baseClasses = [] + } + } + + unitTest.enabled = false +} + +project(':empty_test_task') { + task emptyTest(type: Test) { + + } + + task emptyTestRandomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) { + + } +} + +project(':all_classes_in_tasks') { + task emptyTestRandomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) { + include "**/Convention*" + } +} + +project(':not_implementing_base') { + testingConventions.naming { + Tests { + baseClass 'org.elasticsearch.gradle.testkit.Unit' + } + IT { + baseClass 'org.elasticsearch.gradle.testkit.Integration' + } + } + task randomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) { + include "**/*IT.class" + include "**/*Tests.class" + } +} + +project(':valid_setup_no_base') { + task randomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) { + include "**/*IT.class" + include "**/*Tests.class" + } +} + +project (':valid_setup_with_base') { + task randomized(type: com.carrotsearch.gradle.junit4.RandomizedTestingTask) { + include "**/*IT.class" + include "**/*Tests.class" + } + testingConventions.naming { + Tests { + baseClass 'org.elasticsearch.gradle.testkit.Unit' + } + IT { + baseClass 'org.elasticsearch.gradle.testkit.Integration' + } + } +} + + + diff --git a/buildSrc/src/testKit/testingConventions/empty_test_task/.gitignore b/buildSrc/src/testKit/testingConventions/empty_test_task/.gitignore new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention1.java b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention1.java new file mode 100644 index 0000000000000..35d60d8a56b53 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention1.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +import org.junit.Test; + +public class LooksLikeATestWithoutNamingConvention1 { + + @Test + public void annotatedTestMethod() { + + } + +} \ No newline at end of file diff --git a/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention2.java b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention2.java new file mode 100644 index 0000000000000..1de116d21538d --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention2.java @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +import org.junit.Assert; + +public class LooksLikeATestWithoutNamingConvention2 extends Assert { + +} diff --git a/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention3.java b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention3.java new file mode 100644 index 0000000000000..4a946c3aeb0ae --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeATestWithoutNamingConvention3.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +import org.junit.Assert; +import org.junit.Test; + +public class LooksLikeATestWithoutNamingConvention3 { + + public void testMethod() { + + } + +} \ No newline at end of file diff --git a/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeTestsButAbstract.java b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeTestsButAbstract.java new file mode 100644 index 0000000000000..15718cff841e1 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/LooksLikeTestsButAbstract.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +import org.junit.Assert; +import org.junit.Test; + +public abstract class LooksLikeTestsButAbstract { + + public void testMethod() { + + } + +} diff --git a/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java new file mode 100644 index 0000000000000..48a4f7adfd99e --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionIT { + +} \ No newline at end of file diff --git a/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java new file mode 100644 index 0000000000000..95152520a3f2d --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/incorrect_naming_conventions/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionTests { + +} \ No newline at end of file diff --git a/buildSrc/src/testKit/testingConventions/no_tests_in_inner_classes/src/test/java/org/elasticsearch/gradle/testkit/NastyInnerClasses.java b/buildSrc/src/testKit/testingConventions/no_tests_in_inner_classes/src/test/java/org/elasticsearch/gradle/testkit/NastyInnerClasses.java new file mode 100644 index 0000000000000..c09cd9292926d --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/no_tests_in_inner_classes/src/test/java/org/elasticsearch/gradle/testkit/NastyInnerClasses.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +import org.junit.Assert; +import org.junit.Test; + +public class NastyInnerClasses { + + public static class NamingConventionTests { + + } + + public static class NamingConventionIT { + + } + + public static class LooksLikeATestWithoutNamingConvention1 { + @Test + public void annotatedTestMethod() { + + } + } + + public static class LooksLikeATestWithoutNamingConvention2 extends Assert { + + } + + public static class LooksLikeATestWithoutNamingConvention3 { + + public void testMethod() { + + } + + } + + static abstract public class NonOffendingAbstractTests { + + } + + private static class NonOffendingPrivateTests { + + } + + static class NonOffendingPackageTests { + + } +} diff --git a/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/AbstractIT.java b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/AbstractIT.java new file mode 100644 index 0000000000000..6abba7fd52776 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/AbstractIT.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public abstract class AbstractIT { + +} diff --git a/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/Integration.java b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/Integration.java new file mode 100644 index 0000000000000..80522be3fb5ab --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/Integration.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class Integration { + +} diff --git a/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java new file mode 100644 index 0000000000000..48a4f7adfd99e --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionIT { + +} \ No newline at end of file diff --git a/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionMissmatchIT.java b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionMissmatchIT.java new file mode 100644 index 0000000000000..ea7f921372f9b --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionMissmatchIT.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionMissmatchIT extends Unit { + +} diff --git a/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionMissmatchTests.java b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionMissmatchTests.java new file mode 100644 index 0000000000000..76e8b25c69f73 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionMissmatchTests.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionMissmatchTests extends Integration { + +} diff --git a/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java new file mode 100644 index 0000000000000..6afb89ddf56b0 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionTests { + +} diff --git a/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/Unit.java b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/Unit.java new file mode 100644 index 0000000000000..6a8ca7f758876 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/not_implementing_base/src/test/java/org/elasticsearch/gradle/testkit/Unit.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class Unit { + +} diff --git a/buildSrc/src/testKit/testingConventions/settings.gradle b/buildSrc/src/testKit/testingConventions/settings.gradle new file mode 100644 index 0000000000000..2baec09d27c8e --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/settings.gradle @@ -0,0 +1,7 @@ +include 'no_tests_in_inner_classes' +include 'incorrect_naming_conventions' +include 'empty_test_task' +include 'all_classes_in_tasks' +include 'not_implementing_base' +include 'valid_setup_no_base' +include 'valid_setup_with_base' \ No newline at end of file diff --git a/buildSrc/src/testKit/testingConventions/valid_setup_no_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java b/buildSrc/src/testKit/testingConventions/valid_setup_no_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java new file mode 100644 index 0000000000000..48a4f7adfd99e --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/valid_setup_no_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionIT { + +} \ No newline at end of file diff --git a/buildSrc/src/testKit/testingConventions/valid_setup_no_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java b/buildSrc/src/testKit/testingConventions/valid_setup_no_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java new file mode 100644 index 0000000000000..6afb89ddf56b0 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/valid_setup_no_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionTests { + +} diff --git a/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/Integration.java b/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/Integration.java new file mode 100644 index 0000000000000..508adc24bc73a --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/Integration.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class Integration { + +} diff --git a/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java b/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java new file mode 100644 index 0000000000000..cdb7ff1f1a8f3 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionIT.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionIT extends Integration { + +} diff --git a/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java b/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java new file mode 100644 index 0000000000000..12060909b1df4 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/NamingConventionTests.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class NamingConventionTests extends Unit { + +} diff --git a/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/Unit.java b/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/Unit.java new file mode 100644 index 0000000000000..6a8ca7f758876 --- /dev/null +++ b/buildSrc/src/testKit/testingConventions/valid_setup_with_base/src/test/java/org/elasticsearch/gradle/testkit/Unit.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.testkit; + +public class Unit { + +} diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 05dfe546f788c..40380af0823b6 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0 -lucene = 8.0.0-snapshot-774e9aefbc +lucene = 8.0.0-snapshot-a1c6e642aa # optional dependencies spatial4j = 0.7 diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 6580d213548f9..671e6b338e2df 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -32,13 +32,15 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import java.util.function.Supplier; + public class TransportNoopBulkAction extends HandledTransportAction { private static final BulkItemResponse ITEM_RESPONSE = new BulkItemResponse(1, DocWriteRequest.OpType.UPDATE, new UpdateResponse(new ShardId("mock", "", 1), "mock_type", "1", 1L, DocWriteResponse.Result.CREATED)); @Inject public TransportNoopBulkAction(TransportService transportService, ActionFilters actionFilters) { - super(NoopBulkAction.NAME, transportService, actionFilters, BulkRequest::new); + super(NoopBulkAction.NAME, transportService, actionFilters, (Supplier) BulkRequest::new); } @Override diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index e5dbf74cec582..ed9b4451db350 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -16,9 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -import org.elasticsearch.gradle.test.RestIntegTestTask -import org.gradle.api.internal.provider.Providers - apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.rest-test' apply plugin: 'nebula.maven-base-publish' @@ -103,6 +100,7 @@ integTestRunner { integTestCluster { systemProperty 'es.scripting.update.ctx_in_params', 'false' + setting 'reindex.remote.whitelist', ['"[::1]:*"', '"127.0.0.1:*"'] setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' setting 'xpack.security.authc.token.enabled', 'true' diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index f24f12dd6b7a0..6b0a5d2642f02 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -24,7 +24,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.nio.entity.NByteArrayEntity; import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.RequestConverters.EndpointBuilder; import org.elasticsearch.client.ml.CloseJobRequest; @@ -462,7 +462,7 @@ static Request postData(PostDataRequest postDataRequest) { BytesReference content = postDataRequest.getContent(); if (content != null) { BytesRef source = postDataRequest.getContent().toBytesRef(); - HttpEntity byteEntity = new ByteArrayEntity(source.bytes, + HttpEntity byteEntity = new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(postDataRequest.getXContentType())); @@ -686,7 +686,7 @@ static Request findFileStructure(FindFileStructureRequest findFileStructureReque BytesReference sample = findFileStructureRequest.getSample(); BytesRef source = sample.toBytesRef(); - HttpEntity byteEntity = new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(XContentType.JSON)); + HttpEntity byteEntity = new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(XContentType.JSON)); request.setEntity(byteEntity); return request; } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 63620bd987712..7a6a562e4fb7f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -25,8 +25,8 @@ import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; +import org.apache.http.nio.entity.NByteArrayEntity; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; @@ -165,7 +165,9 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { metadata.field("_index", action.index()); } if (Strings.hasLength(action.type())) { - metadata.field("_type", action.type()); + if (MapperService.SINGLE_MAPPING_NAME.equals(action.type()) == false) { + metadata.field("_type", action.type()); + } } if (Strings.hasLength(action.id())) { metadata.field("_id", action.id()); @@ -239,7 +241,7 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { content.write(separator); } } - request.setEntity(new ByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType)); + request.setEntity(new NByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType)); return request; } @@ -322,7 +324,7 @@ static Request index(IndexRequest indexRequest) { BytesRef source = indexRequest.source().toBytesRef(); ContentType contentType = createContentType(indexRequest.getContentType()); - request.setEntity(new ByteArrayEntity(source.bytes, source.offset, source.length, contentType)); + request.setEntity(new NByteArrayEntity(source.bytes, source.offset, source.length, contentType)); return request; } @@ -431,7 +433,7 @@ static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOExcep XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent(); byte[] source = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, xContent); - request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type()))); + request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type()))); return request; } @@ -464,7 +466,7 @@ static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplat XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent(); byte[] source = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, xContent); - request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type()))); + request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type()))); return request; } @@ -694,7 +696,7 @@ static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType, ToXContent.Params toXContentParams) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, toXContentParams, false).toBytesRef(); - return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); + return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); } static String endpoint(String index, String type, String id) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 9ab614cbc43f5..a9c6901d9820a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -158,11 +158,13 @@ import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.ParsedWeightedAvg; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; @@ -1732,6 +1734,7 @@ static List getDefaultNamedXContents() { map.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)); map.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)); map.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)); + map.put(WeightedAvgAggregationBuilder.NAME, (p, c) -> ParsedWeightedAvg.fromXContent(p, (String) c)); map.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)); map.put(InternalSimpleValue.NAME, (p, c) -> ParsedSimpleValue.fromXContent(p, (String) c)); map.put(DerivativePipelineAggregationBuilder.NAME, (p, c) -> ParsedDerivative.fromXContent(p, (String) c)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java index 33051ae0e432a..a5c6d0dd1810e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java @@ -23,8 +23,8 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; +import org.apache.http.nio.entity.NByteArrayEntity; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.client.watcher.ActivateWatchRequest; import org.elasticsearch.client.watcher.DeactivateWatchRequest; @@ -75,7 +75,7 @@ static Request putWatch(PutWatchRequest putWatchRequest) { } ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType()); BytesReference source = putWatchRequest.getSource(); - request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType)); + request.setEntity(new NByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType)); return request; } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java index bb286b6e5d59b..394adb3b19182 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java @@ -47,7 +47,9 @@ public final class AutoFollowStats { static final ParseField LAST_SEEN_METADATA_VERSION = new ParseField("last_seen_metadata_version"); @SuppressWarnings("unchecked") - static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats", + static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>( + "auto_follow_stats", + true, args -> new AutoFollowStats( (Long) args[0], (Long) args[1], @@ -65,11 +67,13 @@ public final class AutoFollowStats { static final ConstructingObjectParser>, Void> AUTO_FOLLOW_EXCEPTIONS_PARSER = new ConstructingObjectParser<>( "auto_follow_stats_errors", + true, args -> new AbstractMap.SimpleEntry<>((String) args[0], Tuple.tuple((Long) args[1], (ElasticsearchException) args[2]))); private static final ConstructingObjectParser, Void> AUTO_FOLLOWED_CLUSTERS_PARSER = new ConstructingObjectParser<>( "auto_followed_clusters", + true, args -> new AbstractMap.SimpleEntry<>((String) args[0], new AutoFollowedCluster((Long) args[1], (Long) args[2]))); static { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/CcrStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/CcrStatsResponse.java index 889a96683bfb3..54f79892c1d08 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/CcrStatsResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/CcrStatsResponse.java @@ -28,7 +28,9 @@ public final class CcrStatsResponse { static final ParseField AUTO_FOLLOW_STATS_FIELD = new ParseField("auto_follow_stats"); static final ParseField FOLLOW_STATS_FIELD = new ParseField("follow_stats"); - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("indices", + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "indices", + true, args -> { AutoFollowStats autoFollowStats = (AutoFollowStats) args[0]; IndicesFollowStats indicesFollowStats = (IndicesFollowStats) args[1]; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/GetAutoFollowPatternResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/GetAutoFollowPatternResponse.java index ce42c98e57c41..d05ab3f3ee363 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/GetAutoFollowPatternResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/GetAutoFollowPatternResponse.java @@ -42,7 +42,7 @@ public final class GetAutoFollowPatternResponse { static final ParseField PATTERN_FIELD = new ParseField("pattern"); private static final ConstructingObjectParser, Void> ENTRY_PARSER = new ConstructingObjectParser<>( - "get_auto_follow_pattern_response", args -> new AbstractMap.SimpleEntry<>((String) args[0], (Pattern) args[1])); + "get_auto_follow_pattern_response", true, args -> new AbstractMap.SimpleEntry<>((String) args[0], (Pattern) args[1])); static { ENTRY_PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD); @@ -50,7 +50,7 @@ public final class GetAutoFollowPatternResponse { } private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_auto_follow_pattern_response", args -> { + "get_auto_follow_pattern_response", true, args -> { @SuppressWarnings("unchecked") List> entries = (List>) args[0]; return new GetAutoFollowPatternResponse(new TreeMap<>(entries.stream() @@ -92,7 +92,7 @@ public static class Pattern extends FollowConfig { @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "pattern", args -> new Pattern((String) args[0], (List) args[1], (String) args[2])); + "pattern", true, args -> new Pattern((String) args[0], (List) args[1], (String) args[2])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), PutFollowRequest.REMOTE_CLUSTER_FIELD); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/IndicesFollowStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/IndicesFollowStats.java index 02e2fc4f4ed18..7d3af08577b16 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/IndicesFollowStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/IndicesFollowStats.java @@ -41,6 +41,7 @@ public final class IndicesFollowStats { private static final ConstructingObjectParser>, Void> ENTRY_PARSER = new ConstructingObjectParser<>( "entry", + true, args -> { String index = (String) args[0]; @SuppressWarnings("unchecked") @@ -54,7 +55,9 @@ public final class IndicesFollowStats { ENTRY_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), ShardFollowStats.PARSER, SHARDS_FIELD); } - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("indices", + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "indices", + true, args -> { @SuppressWarnings("unchecked") List>> entries = (List>>) args[0]; @@ -116,6 +119,7 @@ public static final class ShardFollowStats { static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "shard-follow-stats", + true, args -> new ShardFollowStats( (String) args[0], (String) args[1], @@ -152,6 +156,7 @@ public static final class ShardFollowStats { static final ConstructingObjectParser>, Void> READ_EXCEPTIONS_ENTRY_PARSER = new ConstructingObjectParser<>( "shard-follow-stats-read-exceptions-entry", + true, args -> new AbstractMap.SimpleEntry<>((long) args[0], Tuple.tuple((Integer) args[1], (ElasticsearchException)args[2]))); static { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/PutFollowResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/PutFollowResponse.java index 2d928b859882a..3841b868e73ab 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/PutFollowResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/PutFollowResponse.java @@ -33,7 +33,7 @@ public final class PutFollowResponse { static final ParseField INDEX_FOLLOWING_STARTED = new ParseField("index_following_started"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "put_follow_response", args -> new PutFollowResponse((boolean) args[0], (boolean) args[1], (boolean) args[2])); + "put_follow_response", true, args -> new PutFollowResponse((boolean) args[0], (boolean) args[1], (boolean) args[2])); static { PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FOLLOW_INDEX_CREATED); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/FreezeAction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/FreezeAction.java new file mode 100644 index 0000000000000..ecc054c132d67 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/FreezeAction.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.indexlifecycle; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +public class FreezeAction implements LifecycleAction, ToXContentObject { + public static final String NAME = "freeze"; + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, FreezeAction::new); + + public static FreezeAction parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + public FreezeAction() { + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public String getName() { + return NAME; + } + + @Override + public int hashCode() { + return 1; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != getClass()) { + return false; + } + return true; + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java index 22935f197731c..1c22f1e0654f8 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/IndexLifecycleNamedXContentProvider.java @@ -50,7 +50,10 @@ public List getNamedXContentParsers() { RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), - ShrinkAction::parse) + ShrinkAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, + new ParseField(FreezeAction.NAME), + FreezeAction::parse) ); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java index 2dc4e3644d1e4..21a052500a4ae 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicy.java @@ -59,7 +59,7 @@ public class LifecyclePolicy implements ToXContentObject { ALLOWED_ACTIONS.put("hot", Sets.newHashSet(RolloverAction.NAME)); ALLOWED_ACTIONS.put("warm", Sets.newHashSet(AllocateAction.NAME, ForceMergeAction.NAME, ReadOnlyAction.NAME, ShrinkAction.NAME)); - ALLOWED_ACTIONS.put("cold", Sets.newHashSet(AllocateAction.NAME)); + ALLOWED_ACTIONS.put("cold", Sets.newHashSet(AllocateAction.NAME, FreezeAction.NAME)); ALLOWED_ACTIONS.put("delete", Sets.newHashSet(DeleteAction.NAME)); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java index b5bd1367beb61..f56da88303d88 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java @@ -56,7 +56,6 @@ public class DatafeedConfig implements ToXContentObject { public static final ParseField FREQUENCY = new ParseField("frequency"); public static final ParseField INDEXES = new ParseField("indexes"); public static final ParseField INDICES = new ParseField("indices"); - public static final ParseField TYPES = new ParseField("types"); public static final ParseField QUERY = new ParseField("query"); public static final ParseField SCROLL_SIZE = new ParseField("scroll_size"); public static final ParseField AGGREGATIONS = new ParseField("aggregations"); @@ -73,7 +72,6 @@ public class DatafeedConfig implements ToXContentObject { PARSER.declareStringArray(Builder::setIndices, INDEXES); PARSER.declareStringArray(Builder::setIndices, INDICES); - PARSER.declareStringArray(Builder::setTypes, TYPES); PARSER.declareString((builder, val) -> builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY); PARSER.declareString((builder, val) -> @@ -103,7 +101,6 @@ private static BytesReference parseBytes(XContentParser parser) throws IOExcepti private final TimeValue queryDelay; private final TimeValue frequency; private final List indices; - private final List types; private final BytesReference query; private final BytesReference aggregations; private final List scriptFields; @@ -112,15 +109,14 @@ private static BytesReference parseBytes(XContentParser parser) throws IOExcepti private final DelayedDataCheckConfig delayedDataCheckConfig; - private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, List types, - BytesReference query, BytesReference aggregations, List scriptFields, - Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) { + private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, BytesReference query, + BytesReference aggregations, List scriptFields, Integer scrollSize, + ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) { this.id = id; this.jobId = jobId; this.queryDelay = queryDelay; this.frequency = frequency; this.indices = indices == null ? null : Collections.unmodifiableList(indices); - this.types = types == null ? null : Collections.unmodifiableList(types); this.query = query; this.aggregations = aggregations; this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields); @@ -149,10 +145,6 @@ public List getIndices() { return indices; } - public List getTypes() { - return types; - } - public Integer getScrollSize() { return scrollSize; } @@ -191,9 +183,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (indices != null) { builder.field(INDICES.getPreferredName(), indices); } - if (types != null) { - builder.field(TYPES.getPreferredName(), types); - } if (query != null) { builder.field(QUERY.getPreferredName(), asMap(query)); } @@ -251,7 +240,6 @@ public boolean equals(Object other) { && Objects.equals(this.frequency, that.frequency) && Objects.equals(this.queryDelay, that.queryDelay) && Objects.equals(this.indices, that.indices) - && Objects.equals(this.types, that.types) && Objects.equals(asMap(this.query), asMap(that.query)) && Objects.equals(this.scrollSize, that.scrollSize) && Objects.equals(asMap(this.aggregations), asMap(that.aggregations)) @@ -267,7 +255,7 @@ public boolean equals(Object other) { */ @Override public int hashCode() { - return Objects.hash(id, jobId, frequency, queryDelay, indices, types, asMap(query), scrollSize, asMap(aggregations), scriptFields, + return Objects.hash(id, jobId, frequency, queryDelay, indices, asMap(query), scrollSize, asMap(aggregations), scriptFields, chunkingConfig, delayedDataCheckConfig); } @@ -282,7 +270,6 @@ public static class Builder { private TimeValue queryDelay; private TimeValue frequency; private List indices; - private List types; private BytesReference query; private BytesReference aggregations; private List scriptFields; @@ -301,7 +288,6 @@ public Builder(DatafeedConfig config) { this.queryDelay = config.queryDelay; this.frequency = config.frequency; this.indices = config.indices == null ? null : new ArrayList<>(config.indices); - this.types = config.types == null ? null : new ArrayList<>(config.types); this.query = config.query; this.aggregations = config.aggregations; this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields); @@ -319,11 +305,6 @@ public Builder setIndices(String... indices) { return setIndices(Arrays.asList(indices)); } - public Builder setTypes(List types) { - this.types = types; - return this; - } - public Builder setQueryDelay(TimeValue queryDelay) { this.queryDelay = queryDelay; return this; @@ -396,7 +377,7 @@ public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheck } public DatafeedConfig build() { - return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize, + return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize, chunkingConfig, delayedDataCheckConfig); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java index 5daacdd9a0588..15598de91d468 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java @@ -60,7 +60,6 @@ public class DatafeedUpdate implements ToXContentObject { PARSER.declareString(Builder::setJobId, Job.ID); PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES); PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES); - PARSER.declareStringArray(Builder::setTypes, DatafeedConfig.TYPES); PARSER.declareString((builder, val) -> builder.setQueryDelay( TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), DatafeedConfig.QUERY_DELAY); PARSER.declareString((builder, val) -> builder.setFrequency( @@ -93,7 +92,6 @@ private static BytesReference parseBytes(XContentParser parser) throws IOExcepti private final TimeValue queryDelay; private final TimeValue frequency; private final List indices; - private final List types; private final BytesReference query; private final BytesReference aggregations; private final List scriptFields; @@ -101,15 +99,14 @@ private static BytesReference parseBytes(XContentParser parser) throws IOExcepti private final ChunkingConfig chunkingConfig; private final DelayedDataCheckConfig delayedDataCheckConfig; - private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, List types, - BytesReference query, BytesReference aggregations, List scriptFields, - Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) { + private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, BytesReference query, + BytesReference aggregations, List scriptFields, Integer scrollSize, + ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) { this.id = id; this.jobId = jobId; this.queryDelay = queryDelay; this.frequency = frequency; this.indices = indices; - this.types = types; this.query = query; this.aggregations = aggregations; this.scriptFields = scriptFields; @@ -143,7 +140,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (aggregations != null) { builder.field(DatafeedConfig.AGGREGATIONS.getPreferredName(), asMap(aggregations)); } - addOptionalField(builder, DatafeedConfig.TYPES, types); if (scriptFields != null) { builder.startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName()); for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { @@ -182,10 +178,6 @@ public List getIndices() { return indices; } - public List getTypes() { - return types; - } - public Integer getScrollSize() { return scrollSize; } @@ -240,7 +232,6 @@ public boolean equals(Object other) { && Objects.equals(this.frequency, that.frequency) && Objects.equals(this.queryDelay, that.queryDelay) && Objects.equals(this.indices, that.indices) - && Objects.equals(this.types, that.types) && Objects.equals(asMap(this.query), asMap(that.query)) && Objects.equals(this.scrollSize, that.scrollSize) && Objects.equals(asMap(this.aggregations), asMap(that.aggregations)) @@ -256,7 +247,7 @@ public boolean equals(Object other) { */ @Override public int hashCode() { - return Objects.hash(id, jobId, frequency, queryDelay, indices, types, asMap(query), scrollSize, asMap(aggregations), scriptFields, + return Objects.hash(id, jobId, frequency, queryDelay, indices, asMap(query), scrollSize, asMap(aggregations), scriptFields, chunkingConfig, delayedDataCheckConfig); } @@ -271,7 +262,6 @@ public static class Builder { private TimeValue queryDelay; private TimeValue frequency; private List indices; - private List types; private BytesReference query; private BytesReference aggregations; private List scriptFields; @@ -289,7 +279,6 @@ public Builder(DatafeedUpdate config) { this.queryDelay = config.queryDelay; this.frequency = config.frequency; this.indices = config.indices; - this.types = config.types; this.query = config.query; this.aggregations = config.aggregations; this.scriptFields = config.scriptFields; @@ -312,11 +301,6 @@ public Builder setIndices(String... indices) { return setIndices(Arrays.asList(indices)); } - public Builder setTypes(List types) { - this.types = types; - return this; - } - public Builder setQueryDelay(TimeValue queryDelay) { this.queryDelay = queryDelay; return this; @@ -380,7 +364,7 @@ public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheck } public DatafeedUpdate build() { - return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize, + return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize, chunkingConfig, delayedDataCheckConfig); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/InvalidateTokenResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/InvalidateTokenResponse.java index 8efc527d2142d..e70036ff3030d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/InvalidateTokenResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/InvalidateTokenResponse.java @@ -42,13 +42,11 @@ */ public final class InvalidateTokenResponse { - public static final ParseField CREATED = new ParseField("created"); public static final ParseField INVALIDATED_TOKENS = new ParseField("invalidated_tokens"); public static final ParseField PREVIOUSLY_INVALIDATED_TOKENS = new ParseField("previously_invalidated_tokens"); public static final ParseField ERROR_COUNT = new ParseField("error_count"); public static final ParseField ERRORS = new ParseField("error_details"); - private final boolean created; private final int invalidatedTokens; private final int previouslyInvalidatedTokens; private List errors; @@ -57,19 +55,17 @@ public final class InvalidateTokenResponse { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "tokens_invalidation_result", true, // we parse but do not use the count of errors as we implicitly have this in the size of the Exceptions list - args -> new InvalidateTokenResponse((boolean) args[0], (int) args[1], (int) args[2], (List) args[4])); + args -> new InvalidateTokenResponse((int) args[0], (int) args[1], (List) args[3])); static { - PARSER.declareBoolean(constructorArg(), CREATED); PARSER.declareInt(constructorArg(), INVALIDATED_TOKENS); PARSER.declareInt(constructorArg(), PREVIOUSLY_INVALIDATED_TOKENS); PARSER.declareInt(constructorArg(), ERROR_COUNT); PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), ERRORS); } - public InvalidateTokenResponse(boolean created, int invalidatedTokens, int previouslyInvalidatedTokens, + public InvalidateTokenResponse(int invalidatedTokens, int previouslyInvalidatedTokens, @Nullable List errors) { - this.created = created; this.invalidatedTokens = invalidatedTokens; this.previouslyInvalidatedTokens = previouslyInvalidatedTokens; if (null == errors) { @@ -79,10 +75,6 @@ public InvalidateTokenResponse(boolean created, int invalidatedTokens, int previ } } - public boolean isCreated() { - return created; - } - public int getInvalidatedTokens() { return invalidatedTokens; } @@ -104,15 +96,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; InvalidateTokenResponse that = (InvalidateTokenResponse) o; - return created == that.created && - invalidatedTokens == that.invalidatedTokens && + return invalidatedTokens == that.invalidatedTokens && previouslyInvalidatedTokens == that.previouslyInvalidatedTokens && Objects.equals(errors, that.errors); } @Override public int hashCode() { - return Objects.hash(created, invalidatedTokens, previouslyInvalidatedTokens, errors); + return Objects.hash(invalidatedTokens, previouslyInvalidatedTokens, errors); } public static InvalidateTokenResponse fromXContent(XContentParser parser) throws IOException { diff --git a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt index a9214e9333c4e..b9224ffe64971 100644 --- a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt +++ b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt @@ -30,3 +30,7 @@ org.elasticsearch.common.logging.PrefixLogger @defaultMessage We can't rely on log4j2 being on the classpath so don't log deprecations! org.elasticsearch.common.xcontent.LoggingDeprecationHandler + +@defaultMessage Use Nonblocking org.apache.http.nio.entity.NByteArrayEntity +org.apache.http.entity.ByteArrayEntity +org.apache.http.entity.StringEntity diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java index 2b870dbc475ea..72ffcd7c5062e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java @@ -35,6 +35,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.search.SearchHit; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -70,8 +72,15 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { private static BulkProcessor.Builder initBulkProcessorBuilder(BulkProcessor.Listener listener) { return BulkProcessor.builder( - (request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener), listener); + (request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, + bulkListener), listener); } + + private static BulkProcessor.Builder initBulkProcessorBuilderUsingTypes(BulkProcessor.Listener listener) { + return BulkProcessor.builder( + (request, bulkListener) -> highLevelClient().bulkAsync(request, expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE), + bulkListener), listener); + } public void testThatBulkProcessorCountIsCorrect() throws Exception { final CountDownLatch latch = new CountDownLatch(1); @@ -320,35 +329,105 @@ public void testGlobalParametersAndSingleRequest() throws Exception { public void testGlobalParametersAndBulkProcessor() throws Exception { createIndexWithMultipleShards("test"); - final CountDownLatch latch = new CountDownLatch(1); - BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); createFieldAddingPipleine("pipeline_id", "fieldNameXYZ", "valueXYZ"); + final String customType = "testType"; + final String ignoredType = "ignoredType"; int numDocs = randomIntBetween(10, 10); - try (BulkProcessor processor = initBulkProcessorBuilder(listener) - //let's make sure that the bulk action limit trips, one single execution will index all the documents - .setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs) - .setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) - .setGlobalIndex("test") - .setGlobalType("_doc") - .setGlobalRouting("routing") - .setGlobalPipeline("pipeline_id") - .build()) { - - indexDocs(processor, numDocs, null, null, "test", "pipeline_id"); - latch.await(); - - assertThat(listener.beforeCounts.get(), equalTo(1)); - assertThat(listener.afterCounts.get(), equalTo(1)); - assertThat(listener.bulkFailures.size(), equalTo(0)); - assertResponseItems(listener.bulkItems, numDocs); - - Iterable hits = searchAll(new SearchRequest("test").routing("routing")); + { + final CountDownLatch latch = new CountDownLatch(1); + BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); + //Check that untyped document additions inherit the global type + String globalType = customType; + String localType = null; + try (BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener) + //let's make sure that the bulk action limit trips, one single execution will index all the documents + .setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs) + .setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) + .setGlobalIndex("test") + .setGlobalType(globalType) + .setGlobalRouting("routing") + .setGlobalPipeline("pipeline_id") + .build()) { + + indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id"); + latch.await(); + + assertThat(listener.beforeCounts.get(), equalTo(1)); + assertThat(listener.afterCounts.get(), equalTo(1)); + assertThat(listener.bulkFailures.size(), equalTo(0)); + assertResponseItems(listener.bulkItems, numDocs, globalType); + + Iterable hits = searchAll(new SearchRequest("test").routing("routing")); + + assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); + assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(globalType)))); + assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); + } - assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); - assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType("_doc")))); - assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); } + { + //Check that typed document additions don't inherit the global type + String globalType = ignoredType; + String localType = customType; + final CountDownLatch latch = new CountDownLatch(1); + BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); + try (BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener) + //let's make sure that the bulk action limit trips, one single execution will index all the documents + .setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs) + .setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) + .setGlobalIndex("test") + .setGlobalType(globalType) + .setGlobalRouting("routing") + .setGlobalPipeline("pipeline_id") + .build()) { + indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id"); + latch.await(); + + assertThat(listener.beforeCounts.get(), equalTo(1)); + assertThat(listener.afterCounts.get(), equalTo(1)); + assertThat(listener.bulkFailures.size(), equalTo(0)); + assertResponseItems(listener.bulkItems, numDocs, localType); + + Iterable hits = searchAll(new SearchRequest("test").routing("routing")); + + assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); + assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(localType)))); + assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); + } + } + { + //Check that untyped document additions and untyped global inherit the established custom type + // (the custom document type introduced to the mapping by the earlier code in this test) + String globalType = null; + String localType = null; + final CountDownLatch latch = new CountDownLatch(1); + BulkProcessorTestListener listener = new BulkProcessorTestListener(latch); + try (BulkProcessor processor = initBulkProcessorBuilder(listener) + //let's make sure that the bulk action limit trips, one single execution will index all the documents + .setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs) + .setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) + .setGlobalIndex("test") + .setGlobalType(globalType) + .setGlobalRouting("routing") + .setGlobalPipeline("pipeline_id") + .build()) { + indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id"); + latch.await(); + + assertThat(listener.beforeCounts.get(), equalTo(1)); + assertThat(listener.afterCounts.get(), equalTo(1)); + assertThat(listener.bulkFailures.size(), equalTo(0)); + assertResponseItems(listener.bulkItems, numDocs, MapperService.SINGLE_MAPPING_NAME); + + Iterable hits = searchAll(new SearchRequest("test").routing("routing")); + + assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); + assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(customType)))); + assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); + } + } + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } @SuppressWarnings("unchecked") @@ -359,15 +438,15 @@ private Matcher[] expectedIds(int numDocs) { .>toArray(Matcher[]::new); } - private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs, String localIndex, + private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs, String localIndex, String localType, String globalIndex, String globalType, String globalPipeline) throws Exception { MultiGetRequest multiGetRequest = new MultiGetRequest(); for (int i = 1; i <= numDocs; i++) { if (randomBoolean()) { - processor.add(new IndexRequest(localIndex).id(Integer.toString(i)) + processor.add(new IndexRequest(localIndex, localType, Integer.toString(i)) .source(XContentType.JSON, "field", randomRealisticUnicodeOfLengthBetween(1, 30))); } else { - BytesArray data = bytesBulkRequest(localIndex, "_doc", i); + BytesArray data = bytesBulkRequest(localIndex, localType, i); processor.add(data, globalIndex, globalType, globalPipeline, null, XContentType.JSON); } multiGetRequest.add(localIndex, Integer.toString(i)); @@ -396,15 +475,19 @@ private static BytesArray bytesBulkRequest(String localIndex, String localType, } private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) throws Exception { - return indexDocs(processor, numDocs, "test", null, null, null); + return indexDocs(processor, numDocs, "test", null, null, null, null); } - + private static void assertResponseItems(List bulkItemResponses, int numDocs) { + assertResponseItems(bulkItemResponses, numDocs, MapperService.SINGLE_MAPPING_NAME); + } + + private static void assertResponseItems(List bulkItemResponses, int numDocs, String expectedType) { assertThat(bulkItemResponses.size(), is(numDocs)); int i = 1; for (BulkItemResponse bulkItemResponse : bulkItemResponses) { assertThat(bulkItemResponse.getIndex(), equalTo("test")); - assertThat(bulkItemResponse.getType(), equalTo("_doc")); + assertThat(bulkItemResponse.getType(), equalTo(expectedType)); assertThat(bulkItemResponse.getId(), equalTo(Integer.toString(i++))); assertThat("item " + i + " failed with cause: " + bulkItemResponse.getFailureMessage(), bulkItemResponse.isFailed(), equalTo(false)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java index 6262a5b968513..e3b280225cb7b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java @@ -143,7 +143,7 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) { MultiGetRequest multiGetRequest = new MultiGetRequest(); for (int i = 1; i <= numDocs; i++) { - processor.add(new IndexRequest(INDEX_NAME, "_doc", Integer.toString(i)) + processor.add(new IndexRequest(INDEX_NAME).id(Integer.toString(i)) .source(XContentType.JSON, "field", randomRealisticUnicodeOfCodepointLengthBetween(1, 30))); multiGetRequest.add(INDEX_NAME, Integer.toString(i)); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java index bb9f78622c821..3020eb0329b5c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.search.SearchHit; import java.io.IOException; @@ -140,20 +141,19 @@ public void testIndexGlobalAndPerRequest() throws IOException { } public void testGlobalType() throws IOException { - BulkRequest request = new BulkRequest(null, "_doc"); + BulkRequest request = new BulkRequest(null, "global_type"); request.add(new IndexRequest("index").id("1") .source(XContentType.JSON, "field", "bulk1")); request.add(new IndexRequest("index").id("2") .source(XContentType.JSON, "field", "bulk2")); - bulk(request); + bulkWithTypes(request); Iterable hits = searchAll("index"); - assertThat(hits, everyItem(hasType("_doc"))); + assertThat(hits, everyItem(hasType("global_type"))); } @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/36549") public void testTypeGlobalAndPerRequest() throws IOException { BulkRequest request = new BulkRequest(null, "global_type"); request.add(new IndexRequest("index1", "local_type", "1") @@ -161,7 +161,7 @@ public void testTypeGlobalAndPerRequest() throws IOException { request.add(new IndexRequest("index2").id("2") // will take global type .source(XContentType.JSON, "field", "bulk2")); - bulk(request); + bulkWithTypes(request); Iterable hits = searchAll("index1", "index2"); assertThat(hits, containsInAnyOrder( @@ -174,7 +174,7 @@ public void testTypeGlobalAndPerRequest() throws IOException { @SuppressWarnings("unchecked") public void testGlobalRouting() throws IOException { createIndexWithMultipleShards("index"); - BulkRequest request = new BulkRequest(null, null); + BulkRequest request = new BulkRequest(null); request.add(new IndexRequest("index").id("1") .source(XContentType.JSON, "field", "bulk1")); request.add(new IndexRequest("index").id("2") @@ -191,7 +191,7 @@ public void testGlobalRouting() throws IOException { @SuppressWarnings("unchecked") public void testMixLocalAndGlobalRouting() throws IOException { - BulkRequest request = new BulkRequest(null, null); + BulkRequest request = new BulkRequest(null); request.routing("globalRouting"); request.add(new IndexRequest("index").id("1") .source(XContentType.JSON, "field", "bulk1")); @@ -204,12 +204,32 @@ public void testMixLocalAndGlobalRouting() throws IOException { Iterable hits = searchAll(new SearchRequest("index").routing("globalRouting", "localRouting")); assertThat(hits, containsInAnyOrder(hasId("1"), hasId("2"))); } + + public void testGlobalIndexNoTypes() throws IOException { + BulkRequest request = new BulkRequest("global_index"); + request.add(new IndexRequest().id("1") + .source(XContentType.JSON, "field", "bulk1")); + request.add(new IndexRequest().id("2") + .source(XContentType.JSON, "field", "bulk2")); - private BulkResponse bulk(BulkRequest request) throws IOException { - BulkResponse bulkResponse = execute(request, highLevelClient()::bulk, highLevelClient()::bulkAsync); + bulk(request); + + Iterable hits = searchAll("global_index"); + assertThat(hits, everyItem(hasIndex("global_index"))); + } + + private BulkResponse bulkWithTypes(BulkRequest request) throws IOException { + BulkResponse bulkResponse = execute(request, highLevelClient()::bulk, highLevelClient()::bulkAsync, + expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); assertFalse(bulkResponse.hasFailures()); return bulkResponse; } + + private BulkResponse bulk(BulkRequest request) throws IOException { + BulkResponse bulkResponse = execute(request, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT); + assertFalse(bulkResponse.hasFailures()); + return bulkResponse; + } @SuppressWarnings("unchecked") private static Function fieldFromSource(String fieldName) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 59135204c5be1..19a5b03b6d6cd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -65,6 +65,7 @@ import org.elasticsearch.index.reindex.UpdateByQueryAction; import org.elasticsearch.index.reindex.UpdateByQueryRequest; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.rest.action.document.RestDeleteAction; import org.elasticsearch.rest.action.document.RestGetAction; import org.elasticsearch.rest.action.document.RestMultiGetAction; @@ -449,7 +450,7 @@ public void testMultiGetWithTypes() throws IOException { bulk.add(new IndexRequest("index", "type", "id2") .source("{\"field\":\"value2\"}", XContentType.JSON)); - highLevelClient().bulk(bulk, RequestOptions.DEFAULT); + highLevelClient().bulk(bulk, expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "id1"); multiGetRequest.add("index", "type", "id2"); @@ -819,7 +820,7 @@ public void testBulk() throws IOException { } } - BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync); + BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT); assertEquals(RestStatus.OK, bulkResponse.status()); assertTrue(bulkResponse.getTook().getMillis() > 0); assertEquals(nbItems, bulkResponse.getItems().length); @@ -875,7 +876,7 @@ public void testUpdateByQuery() throws Exception { // test1: create one doc in dest UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(); updateByQueryRequest.indices(sourceIndex); - updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1").types("_doc")); + updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1")); updateByQueryRequest.setRefresh(true); BulkByScrollResponse bulkResponse = execute(updateByQueryRequest, highLevelClient()::updateByQuery, highLevelClient()::updateByQueryAsync); @@ -917,7 +918,7 @@ public void testUpdateByQuery() throws Exception { // test update-by-query rethrottling UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(); updateByQueryRequest.indices(sourceIndex); - updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1").types("_doc")); + updateByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1")); updateByQueryRequest.setRefresh(true); // this following settings are supposed to halt reindexing after first document @@ -987,7 +988,7 @@ public void testDeleteByQuery() throws Exception { // test1: delete one doc DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(); deleteByQueryRequest.indices(sourceIndex); - deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1").types("_doc")); + deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1")); deleteByQueryRequest.setRefresh(true); BulkByScrollResponse bulkResponse = execute(deleteByQueryRequest, highLevelClient()::deleteByQuery, highLevelClient()::deleteByQueryAsync); @@ -1009,7 +1010,7 @@ public void testDeleteByQuery() throws Exception { // test delete-by-query rethrottling DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(); deleteByQueryRequest.indices(sourceIndex); - deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("2", "3").types("_doc")); + deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("2", "3")); deleteByQueryRequest.setRefresh(true); // this following settings are supposed to halt reindexing after first document @@ -1080,7 +1081,8 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) }; try (BulkProcessor processor = BulkProcessor.builder( - (request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener), listener) + (request, bulkListener) -> highLevelClient().bulkAsync(request, + RequestOptions.DEFAULT, bulkListener), listener) .setConcurrentRequests(0) .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.GB)) .setBulkActions(nbItems + 1) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java index 3b69f10344a0d..316de885fa136 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java @@ -24,10 +24,10 @@ import org.apache.http.ProtocolVersion; import org.apache.http.RequestLine; import org.apache.http.client.methods.HttpGet; -import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; import org.apache.http.message.BasicRequestLine; import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.entity.NByteArrayEntity; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Build; import org.elasticsearch.Version; @@ -166,7 +166,7 @@ private Response mockPerformRequest(Request request) throws IOException { MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT); BytesRef bytesRef = XContentHelper.toXContent(response, XContentType.JSON, false).toBytesRef(); - when(mockResponse.getEntity()).thenReturn(new ByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON)); + when(mockResponse.getEntity()).thenReturn(new NByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON)); RequestLine requestLine = new BasicRequestLine(HttpGet.METHOD_NAME, ENDPOINT, protocol); when(mockResponse.getRequestLine()).thenReturn(requestLine); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index a639a09b3cc53..92d7e94394594 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -89,6 +89,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; @@ -209,7 +210,7 @@ public void testCreateIndex() throws IOException { mappingBuilder.startObject().startObject("properties").startObject("field"); mappingBuilder.field("type", "text"); mappingBuilder.endObject().endObject().endObject(); - createIndexRequest.mapping("type_name", mappingBuilder); + createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder); CreateIndexResponse createIndexResponse = execute(createIndexRequest, highLevelClient().indices()::create, highLevelClient().indices()::createAsync); @@ -226,7 +227,7 @@ public void testCreateIndex() throws IOException { Map term = (Map) filter.get("term"); assertEquals(2016, term.get("year")); - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.type_name.properties.field.type", getIndexResponse)); + assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); } } @@ -340,7 +341,7 @@ public void testGetIndex() throws IOException { .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0) .build(); - String mappings = "\"type-1\":{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}"; + String mappings = "\"_doc\":{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}"; createIndex(indexName, basicSettings, mappings); GetIndexRequest getIndexRequest = new GetIndexRequest() @@ -353,8 +354,8 @@ public void testGetIndex() throws IOException { assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS)); assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS)); assertNotNull(getIndexResponse.getMappings().get(indexName)); - assertNotNull(getIndexResponse.getMappings().get(indexName).get("type-1")); - Object o = getIndexResponse.getMappings().get(indexName).get("type-1").getSourceAsMap().get("properties"); + assertNotNull(getIndexResponse.getMappings().get(indexName).get("_doc")); + Object o = getIndexResponse.getMappings().get(indexName).get("_doc").getSourceAsMap().get("properties"); assertThat(o, instanceOf(Map.class)); //noinspection unchecked assertThat(((Map) o).get("field-1"), instanceOf(Map.class)); @@ -370,7 +371,7 @@ public void testGetIndexWithDefaults() throws IOException { .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0) .build(); - String mappings = "\"type-1\":{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}"; + String mappings = "\"_doc\":{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}"; createIndex(indexName, basicSettings, mappings); GetIndexRequest getIndexRequest = new GetIndexRequest() @@ -384,8 +385,8 @@ public void testGetIndexWithDefaults() throws IOException { assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS)); assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS)); assertNotNull(getIndexResponse.getMappings().get(indexName)); - assertNotNull(getIndexResponse.getMappings().get(indexName).get("type-1")); - Object o = getIndexResponse.getMappings().get(indexName).get("type-1").getSourceAsMap().get("properties"); + assertNotNull(getIndexResponse.getMappings().get(indexName).get("_doc")); + Object o = getIndexResponse.getMappings().get(indexName).get("_doc").getSourceAsMap().get("properties"); assertThat(o, instanceOf(Map.class)); assertThat(((Map) o).get("field-1"), instanceOf(Map.class)); Map fieldMapping = (Map) ((Map) o).get("field-1"); @@ -408,7 +409,7 @@ public void testPutMapping() throws IOException { createIndex(indexName, Settings.EMPTY); PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); - putMappingRequest.type("type_name"); + putMappingRequest.type("_doc"); XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); mappingBuilder.startObject().startObject("properties").startObject("field"); mappingBuilder.field("type", "text"); @@ -420,7 +421,7 @@ public void testPutMapping() throws IOException { assertTrue(putMappingResponse.isAcknowledged()); Map getIndexResponse = getAsMap(indexName); - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.type_name.properties.field.type", getIndexResponse)); + assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); } public void testGetMapping() throws IOException { @@ -440,7 +441,7 @@ public void testGetMapping() throws IOException { assertTrue(putMappingResponse.isAcknowledged()); Map getIndexResponse = getAsMap(indexName); - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings._doc.properties.field.type", getIndexResponse)); + assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); GetMappingsRequest request = new GetMappingsRequest() .indices(indexName) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java index 2ee09b496fc85..9b364975c773a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java @@ -65,7 +65,6 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase { private static final String RESULTS_INDEX = ".ml-anomalies-shared"; - private static final String DOC = "doc"; private static final String JOB_ID = "get-results-it-job"; @@ -100,7 +99,7 @@ public void createJobAndIndexResults() throws IOException { } private void addBucketIndexRequest(long timestamp, boolean isInterim, BulkRequest bulkRequest) { - IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); double bucketScore = randomDoubleBetween(0.0, 100.0, true); bucketStats.report(bucketScore); indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"bucket\", \"timestamp\": " + timestamp + "," + @@ -122,7 +121,7 @@ private void addRecordIndexRequests(long timestamp, boolean isInterim, BulkReque } private void addRecordIndexRequest(long timestamp, boolean isInterim, BulkRequest bulkRequest) { - IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); double recordScore = randomDoubleBetween(0.0, 100.0, true); recordStats.report(recordScore); double p = randomDoubleBetween(0.0, 0.05, false); @@ -133,7 +132,7 @@ private void addRecordIndexRequest(long timestamp, boolean isInterim, BulkReques } private void addCategoryIndexRequest(long categoryId, String categoryName, BulkRequest bulkRequest) { - IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"category_id\": " + categoryId + ", \"terms\": \"" + categoryName + "\", \"regex\": \".*?" + categoryName + ".*\", \"max_matching_length\": 3, \"examples\": [\"" + categoryName + "\"]}", XContentType.JSON); @@ -151,7 +150,7 @@ private void addCategoriesIndexRequests(BulkRequest bulkRequest) { private void addModelSnapshotIndexRequests(BulkRequest bulkRequest) { { - IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", \"snapshot_id\":\"1541587919\"," + "\"snapshot_doc_count\":1, \"model_size_stats\":{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"model_size_stats\"," + @@ -162,7 +161,7 @@ private void addModelSnapshotIndexRequests(BulkRequest bulkRequest) { bulkRequest.add(indexRequest); } { - IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"timestamp\":1541588919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T11:08:39+0000\", \"snapshot_id\":\"1541588919\"," + "\"snapshot_doc_count\":1, \"model_size_stats\":{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"model_size_stats\"," + @@ -173,7 +172,7 @@ private void addModelSnapshotIndexRequests(BulkRequest bulkRequest) { bulkRequest.add(indexRequest); } { - IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"timestamp\":1541589919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T11:25:19+0000\", \"snapshot_id\":\"1541589919\"," + "\"snapshot_doc_count\":1, \"model_size_stats\":{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"model_size_stats\"," + @@ -752,7 +751,7 @@ public void testGetOverallBuckets() throws IOException { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (Bucket bucket : firstBuckets) { - IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); indexRequest.source("{\"job_id\":\"" + anotherJobId + "\", \"result_type\":\"bucket\", \"timestamp\": " + bucket.getTimestamp().getTime() + "," + "\"bucket_span\": 3600,\"is_interim\": " + bucket.isInterim() + ", \"anomaly_score\": " + String.valueOf(bucket.getAnomalyScore() + 10.0) + "}", XContentType.JSON); @@ -923,7 +922,7 @@ public void testGetInfluencers() throws IOException { // Last one score is higher double score = isLast ? 90.0 : 42.0; - IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"influencer\", \"timestamp\": " + timestamp + "," + "\"bucket_span\": 3600,\"is_interim\": " + isInterim + ", \"influencer_score\": " + score + ", " + "\"influencer_field_name\":\"my_influencer\", \"influencer_field_value\": \"inf_1\", \"probability\":" diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 841b5399bfb1b..1971ff6461a74 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -538,7 +538,6 @@ public void testStartDatafeed() throws Exception { while(pastCopy < now) { IndexRequest doc = new IndexRequest(); doc.index(indexName); - doc.type("_doc"); doc.id("id" + i); doc.source("{\"total\":" +randomInt(1000) + ",\"timestamp\":"+ pastCopy +"}", XContentType.JSON); bulk.add(doc); @@ -558,7 +557,6 @@ public void testStartDatafeed() throws Exception { DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId) .setIndices(indexName) .setQueryDelay(TimeValue.timeValueSeconds(1)) - .setTypes(Arrays.asList("_doc")) .setFrequency(TimeValue.timeValueSeconds(1)).build(); machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); @@ -748,7 +746,6 @@ public void testPreviewDatafeed() throws Exception { Integer total = randomInt(1000); IndexRequest doc = new IndexRequest(); doc.index(indexName); - doc.type("_doc"); doc.id("id" + i); doc.source("{\"total\":" + total + ",\"timestamp\":"+ thePast +"}", XContentType.JSON); bulk.add(doc); @@ -768,7 +765,6 @@ public void testPreviewDatafeed() throws Exception { DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId) .setIndices(indexName) .setQueryDelay(TimeValue.timeValueSeconds(1)) - .setTypes(Collections.singletonList("_doc")) .setFrequency(TimeValue.timeValueSeconds(1)).build(); machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); @@ -809,7 +805,7 @@ private String createExpiredData(String jobId) throws Exception { long timestamp = nowMillis - TimeValue.timeValueHours(totalBuckets - bucket).getMillis(); int bucketRate = bucket == anomalousBucket ? anomalousRate : normalRate; for (int point = 0; point < bucketRate; point++) { - IndexRequest indexRequest = new IndexRequest(indexId, "_doc"); + IndexRequest indexRequest = new IndexRequest(indexId); indexRequest.source(XContentType.JSON, "timestamp", timestamp, "total", randomInt(1000)); bulk.add(indexRequest); } @@ -819,7 +815,7 @@ private String createExpiredData(String jobId) throws Exception { { // Index a randomly named unused state document String docId = "non_existing_job_" + randomFrom("model_state_1234567#1", "quantiles", "categorizer_state#1"); - IndexRequest indexRequest = new IndexRequest(".ml-state", "_doc", docId); + IndexRequest indexRequest = new IndexRequest(".ml-state").id(docId); indexRequest.source(Collections.emptyMap(), XContentType.JSON); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); highLevelClient().index(indexRequest, RequestOptions.DEFAULT); @@ -1403,7 +1399,6 @@ private String createAndPutDatafeed(String jobId, String indexName) throws IOExc DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId) .setIndices(indexName) .setQueryDelay(TimeValue.timeValueSeconds(1)) - .setTypes(Arrays.asList("_doc")) .setFrequency(TimeValue.timeValueSeconds(1)).build(); highLevelClient().machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); return datafeedId; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java index f94cc41432c4c..cfdd29cdfbfbf 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.tasks.TaskSubmissionResponse; +import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -32,7 +33,6 @@ import java.io.IOException; import java.util.Collections; -import java.util.function.BooleanSupplier; public class ReindexIT extends ESRestHighLevelClientTestCase { @@ -48,8 +48,8 @@ public void testReindex() throws IOException { createIndex(sourceIndex, settings); createIndex(destinationIndex, settings); BulkRequest bulkRequest = new BulkRequest() - .add(new IndexRequest(sourceIndex, "type", "1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON)) - .add(new IndexRequest(sourceIndex, "type", "2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON)) + .add(new IndexRequest(sourceIndex).id("1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON)) + .add(new IndexRequest(sourceIndex).id("2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON)) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); assertEquals( RestStatus.OK, @@ -64,7 +64,7 @@ public void testReindex() throws IOException { ReindexRequest reindexRequest = new ReindexRequest(); reindexRequest.setSourceIndices(sourceIndex); reindexRequest.setDestIndex(destinationIndex); - reindexRequest.setSourceQuery(new IdsQueryBuilder().addIds("1").types("type")); + reindexRequest.setSourceQuery(new IdsQueryBuilder().addIds("1")); reindexRequest.setRefresh(true); BulkByScrollResponse bulkResponse = execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync); @@ -82,7 +82,7 @@ public void testReindex() throws IOException { } } - public void testReindexTask() throws IOException, InterruptedException { + public void testReindexTask() throws Exception { final String sourceIndex = "source123"; final String destinationIndex = "dest2"; { @@ -94,8 +94,8 @@ public void testReindexTask() throws IOException, InterruptedException { createIndex(sourceIndex, settings); createIndex(destinationIndex, settings); BulkRequest bulkRequest = new BulkRequest() - .add(new IndexRequest(sourceIndex, "type", "1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON)) - .add(new IndexRequest(sourceIndex, "type", "2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON)) + .add(new IndexRequest(sourceIndex).id("1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON)) + .add(new IndexRequest(sourceIndex).id("2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON)) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); assertEquals( RestStatus.OK, @@ -118,20 +118,14 @@ public void testReindexTask() throws IOException, InterruptedException { String taskId = reindexSubmission.getTask(); // <3> // end::submit-reindex-task - BooleanSupplier hasUpgradeCompleted = checkCompletionStatus(taskId); - awaitBusy(hasUpgradeCompleted); + assertBusy(checkCompletionStatus(client(), taskId)); } } - private BooleanSupplier checkCompletionStatus(String taskId) { + static CheckedRunnable checkCompletionStatus(RestClient client, String taskId) { return () -> { - try { - Response response = client().performRequest(new Request("GET", "/_tasks/" + taskId)); - return (boolean) entityAsMap(response).get("completed"); - } catch (IOException e) { - fail(e.getMessage()); - return false; - } + Response response = client.performRequest(new Request("GET", "/_tasks/" + taskId)); + assertTrue((boolean) entityAsMap(response).get("completed")); }; } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 698f7557c1306..f31c562332687 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -25,7 +25,7 @@ import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.nio.entity.NByteArrayEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; @@ -671,7 +671,7 @@ public void testIndex() throws IOException { assertEquals(method, request.getMethod()); HttpEntity entity = request.getEntity(); - assertTrue(entity instanceof ByteArrayEntity); + assertTrue(entity instanceof NByteArrayEntity); assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue()); try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) { assertEquals(nbFields, parser.map().size()); @@ -714,7 +714,7 @@ public void testIndexWithType() throws IOException { assertEquals(method, request.getMethod()); HttpEntity entity = request.getEntity(); - assertTrue(entity instanceof ByteArrayEntity); + assertTrue(entity instanceof NByteArrayEntity); assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue()); try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) { assertEquals(nbFields, parser.map().size()); @@ -787,7 +787,7 @@ public void testUpdate() throws IOException { assertEquals(HttpPost.METHOD_NAME, request.getMethod()); HttpEntity entity = request.getEntity(); - assertTrue(entity instanceof ByteArrayEntity); + assertTrue(entity instanceof NByteArrayEntity); UpdateRequest parsedUpdateRequest = new UpdateRequest(); @@ -860,7 +860,6 @@ public void testBulk() throws IOException { int nbItems = randomIntBetween(10, 100); for (int i = 0; i < nbItems; i++) { String index = randomAlphaOfLength(5); - String type = randomAlphaOfLength(5); String id = randomAlphaOfLength(5); BytesReference source = RandomObjects.randomSource(random(), xContentType); @@ -868,16 +867,16 @@ public void testBulk() throws IOException { DocWriteRequest docWriteRequest; if (opType == DocWriteRequest.OpType.INDEX) { - IndexRequest indexRequest = new IndexRequest(index, type, id).source(source, xContentType); + IndexRequest indexRequest = new IndexRequest(index).id(id).source(source, xContentType); docWriteRequest = indexRequest; if (randomBoolean()) { indexRequest.setPipeline(randomAlphaOfLength(5)); } } else if (opType == DocWriteRequest.OpType.CREATE) { - IndexRequest createRequest = new IndexRequest(index, type, id).source(source, xContentType).create(true); + IndexRequest createRequest = new IndexRequest(index).id(id).source(source, xContentType).create(true); docWriteRequest = createRequest; } else if (opType == DocWriteRequest.OpType.UPDATE) { - final UpdateRequest updateRequest = new UpdateRequest(index, type, id).doc(new IndexRequest().source(source, xContentType)); + final UpdateRequest updateRequest = new UpdateRequest(index, id).doc(new IndexRequest().source(source, xContentType)); docWriteRequest = updateRequest; if (randomBoolean()) { updateRequest.retryOnConflict(randomIntBetween(1, 5)); @@ -886,7 +885,7 @@ public void testBulk() throws IOException { randomizeFetchSourceContextParams(updateRequest::fetchSource, new HashMap<>()); } } else if (opType == DocWriteRequest.OpType.DELETE) { - docWriteRequest = new DeleteRequest(index, type, id); + docWriteRequest = new DeleteRequest(index, id); } else { throw new UnsupportedOperationException("optype [" + opType + "] not supported"); } @@ -954,9 +953,9 @@ public void testBulk() throws IOException { public void testBulkWithDifferentContentTypes() throws IOException { { BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new DeleteRequest("index", "type", "0")); - bulkRequest.add(new UpdateRequest("index", "type", "1").script(mockScript("test"))); - bulkRequest.add(new DeleteRequest("index", "type", "2")); + bulkRequest.add(new DeleteRequest("index", "0")); + bulkRequest.add(new UpdateRequest("index", "1").script(mockScript("test"))); + bulkRequest.add(new DeleteRequest("index", "2")); Request request = RequestConverters.bulk(bulkRequest); assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); @@ -964,16 +963,16 @@ public void testBulkWithDifferentContentTypes() throws IOException { { XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new DeleteRequest("index", "type", "0")); - bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), xContentType)); - bulkRequest.add(new DeleteRequest("index", "type", "2")); + bulkRequest.add(new DeleteRequest("index", "0")); + bulkRequest.add(new IndexRequest("index").id("0").source(singletonMap("field", "value"), xContentType)); + bulkRequest.add(new DeleteRequest("index", "2")); Request request = RequestConverters.bulk(bulkRequest); assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } { XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); - UpdateRequest updateRequest = new UpdateRequest("index", "type", "0"); + UpdateRequest updateRequest = new UpdateRequest("index", "0"); if (randomBoolean()) { updateRequest.doc(new IndexRequest().source(singletonMap("field", "value"), xContentType)); } else { @@ -985,8 +984,8 @@ public void testBulkWithDifferentContentTypes() throws IOException { } { BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.SMILE)); - bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new IndexRequest("index").id("0").source(singletonMap("field", "value"), XContentType.SMILE)); + bulkRequest.add(new IndexRequest("index").id("1").source(singletonMap("field", "value"), XContentType.JSON)); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest)); assertEquals( "Mismatching content-type found for request with content-type [JSON], " + "previous requests have content-type [SMILE]", @@ -994,9 +993,9 @@ public void testBulkWithDifferentContentTypes() throws IOException { } { BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.JSON)); - bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON)); - bulkRequest.add(new UpdateRequest("index", "type", "2") + bulkRequest.add(new IndexRequest("index").id("0").source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new IndexRequest("index").id("1").source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new UpdateRequest("index", "2") .doc(new IndexRequest().source(singletonMap("field", "value"), XContentType.JSON)) .upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE))); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest)); @@ -1007,12 +1006,12 @@ public void testBulkWithDifferentContentTypes() throws IOException { { XContentType xContentType = randomFrom(XContentType.CBOR, XContentType.YAML); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new DeleteRequest("index", "type", "0")); - bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON)); - bulkRequest.add(new DeleteRequest("index", "type", "2")); - bulkRequest.add(new DeleteRequest("index", "type", "3")); - bulkRequest.add(new IndexRequest("index", "type", "4").source(singletonMap("field", "value"), XContentType.JSON)); - bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), xContentType)); + bulkRequest.add(new DeleteRequest("index", "0")); + bulkRequest.add(new IndexRequest("index").id("1").source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new DeleteRequest("index", "2")); + bulkRequest.add(new DeleteRequest("index", "3")); + bulkRequest.add(new IndexRequest("index").id("4").source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new IndexRequest("index").id("1").source(singletonMap("field", "value"), xContentType)); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest)); assertEquals("Unsupported content-type found for request with content-type [" + xContentType + "], only JSON and SMILE are supported", exception.getMessage()); @@ -1022,11 +1021,11 @@ public void testBulkWithDifferentContentTypes() throws IOException { public void testGlobalPipelineOnBulkRequest() throws IOException { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.pipeline("xyz"); - bulkRequest.add(new IndexRequest("test", "doc", "11") + bulkRequest.add(new IndexRequest("test").id("11") .source(XContentType.JSON, "field", "bulk1")); - bulkRequest.add(new IndexRequest("test", "doc", "12") + bulkRequest.add(new IndexRequest("test").id("12") .source(XContentType.JSON, "field", "bulk2")); - bulkRequest.add(new IndexRequest("test", "doc", "13") + bulkRequest.add(new IndexRequest("test").id("13") .source(XContentType.JSON, "field", "bulk3")); Request request = RequestConverters.bulk(bulkRequest); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java index 970ffb15f083b..130692a5fd8bc 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java @@ -21,7 +21,7 @@ import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; +import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -51,14 +51,14 @@ public void initClient() { public void testParseEntityCustomResponseSection() throws IOException { { - HttpEntity jsonEntity = new StringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON); + HttpEntity jsonEntity = new NStringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON); BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent); assertThat(customSection, instanceOf(CustomResponseSection1.class)); CustomResponseSection1 customResponseSection1 = (CustomResponseSection1) customSection; assertEquals("value", customResponseSection1.value); } { - HttpEntity jsonEntity = new StringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON); + HttpEntity jsonEntity = new NStringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON); BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent); assertThat(customSection, instanceOf(CustomResponseSection2.class)); CustomResponseSection2 customResponseSection2 = (CustomResponseSection2) customSection; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 155a54288ab79..a94ab4541f0f9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -27,12 +27,11 @@ import org.apache.http.RequestLine; import org.apache.http.StatusLine; import org.apache.http.client.methods.HttpGet; -import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicRequestLine; import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.entity.NByteArrayEntity; import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; @@ -51,6 +50,7 @@ import org.elasticsearch.client.indexlifecycle.AllocateAction; import org.elasticsearch.client.indexlifecycle.DeleteAction; import org.elasticsearch.client.indexlifecycle.ForceMergeAction; +import org.elasticsearch.client.indexlifecycle.FreezeAction; import org.elasticsearch.client.indexlifecycle.LifecycleAction; import org.elasticsearch.client.indexlifecycle.ReadOnlyAction; import org.elasticsearch.client.indexlifecycle.RolloverAction; @@ -242,11 +242,11 @@ public void testParseEntity() throws IOException { } { IllegalStateException ise = expectThrows(IllegalStateException.class, - () -> restHighLevelClient.parseEntity(new StringEntity("", (ContentType) null), null)); + () -> restHighLevelClient.parseEntity(new NStringEntity("", (ContentType) null), null)); assertEquals("Elasticsearch didn't return the [Content-Type] header, unable to parse response body", ise.getMessage()); } { - StringEntity entity = new StringEntity("", ContentType.APPLICATION_SVG_XML); + NStringEntity entity = new NStringEntity("", ContentType.APPLICATION_SVG_XML); IllegalStateException ise = expectThrows(IllegalStateException.class, () -> restHighLevelClient.parseEntity(entity, null)); assertEquals("Unsupported Content-Type: " + entity.getContentType().getValue(), ise.getMessage()); } @@ -259,9 +259,9 @@ public void testParseEntity() throws IOException { assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); return value; }; - HttpEntity jsonEntity = new StringEntity("{\"field\":\"value\"}", ContentType.APPLICATION_JSON); + HttpEntity jsonEntity = new NStringEntity("{\"field\":\"value\"}", ContentType.APPLICATION_JSON); assertEquals("value", restHighLevelClient.parseEntity(jsonEntity, entityParser)); - HttpEntity yamlEntity = new StringEntity("---\nfield: value\n", ContentType.create("application/yaml")); + HttpEntity yamlEntity = new NStringEntity("---\nfield: value\n", ContentType.create("application/yaml")); assertEquals("value", restHighLevelClient.parseEntity(yamlEntity, entityParser)); HttpEntity smileEntity = createBinaryEntity(SmileXContent.contentBuilder(), ContentType.create("application/smile")); assertEquals("value", restHighLevelClient.parseEntity(smileEntity, entityParser)); @@ -275,7 +275,7 @@ private static HttpEntity createBinaryEntity(XContentBuilder xContentBuilder, Co builder.startObject(); builder.field("field", "value"); builder.endObject(); - return new ByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType); + return new NByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType); } } @@ -301,7 +301,7 @@ public void testParseResponseException() throws IOException { { RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", + httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); @@ -313,7 +313,7 @@ public void testParseResponseException() throws IOException { { RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException); @@ -325,7 +325,7 @@ public void testParseResponseException() throws IOException { { RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException); @@ -377,7 +377,7 @@ public void testPerformRequestOnResponseExceptionWithEntity() throws IOException CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", + httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); @@ -395,7 +395,7 @@ public void testPerformRequestOnResponseExceptionWithBrokenEntity() throws IOExc CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON)); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); when(restClient.performRequest(any(Request.class))).thenThrow(responseException); @@ -413,7 +413,7 @@ public void testPerformRequestOnResponseExceptionWithBrokenEntity2() throws IOEx CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); when(restClient.performRequest(any(Request.class))).thenThrow(responseException); @@ -457,7 +457,7 @@ public void testPerformRequestOnResponseExceptionWithIgnoresErrorValidBody() thr MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); - httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}", + httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":404}", ContentType.APPLICATION_JSON)); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); @@ -527,7 +527,7 @@ public void testWrapResponseListenerOnResponseExceptionWithEntity() throws IOExc response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", + httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); @@ -546,7 +546,7 @@ public void testWrapResponseListenerOnResponseExceptionWithBrokenEntity() throws response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); responseListener.onFailure(responseException); @@ -563,7 +563,7 @@ public void testWrapResponseListenerOnResponseExceptionWithBrokenEntity() throws response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet()); RestStatus restStatus = randomFrom(RestStatus.values()); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); responseListener.onFailure(responseException); @@ -613,7 +613,7 @@ public void testWrapResponseListenerOnResponseExceptionWithIgnoresErrorValidBody ResponseListener responseListener = restHighLevelClient.wrapResponseListener( response -> { throw new IllegalStateException(); }, trackingActionListener, Collections.singleton(404)); HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); - httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}", + httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":404}", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); @@ -644,7 +644,7 @@ public void testDefaultNamedXContents() { public void testProvidedNamedXContents() { List namedXContents = RestHighLevelClient.getProvidedNamedXContents(); - assertEquals(17, namedXContents.size()); + assertEquals(18, namedXContents.size()); Map, Integer> categories = new HashMap<>(); List names = new ArrayList<>(); for (NamedXContentRegistry.Entry namedXContent : namedXContents) { @@ -668,13 +668,14 @@ public void testProvidedNamedXContents() { assertTrue(names.contains(MeanReciprocalRank.NAME)); assertTrue(names.contains(DiscountedCumulativeGain.NAME)); assertTrue(names.contains(ExpectedReciprocalRank.NAME)); - assertEquals(Integer.valueOf(6), categories.get(LifecycleAction.class)); + assertEquals(Integer.valueOf(7), categories.get(LifecycleAction.class)); assertTrue(names.contains(AllocateAction.NAME)); assertTrue(names.contains(DeleteAction.NAME)); assertTrue(names.contains(ForceMergeAction.NAME)); assertTrue(names.contains(ReadOnlyAction.NAME)); assertTrue(names.contains(RolloverAction.NAME)); assertTrue(names.contains(ShrinkAction.NAME)); + assertTrue(names.contains(FreezeAction.NAME)); } public void testApiNamingConventions() throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java index 429b36d1d10f2..e57493acdf302 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java @@ -114,7 +114,7 @@ public int indexDocs() throws Exception { for (int second = 0; second < 60; second = second + 10) { final int value = randomIntBetween(0, 100); - final IndexRequest indexRequest = new IndexRequest("docs", "doc"); + final IndexRequest indexRequest = new IndexRequest("docs"); indexRequest.source(jsonBuilder() .startObject() .field("value", value) @@ -293,7 +293,7 @@ public void testGetRollupCaps() throws Exception { for (int second = 0; second < 60; second = second + 10) { final int value = randomIntBetween(0, 100); - final IndexRequest indexRequest = new IndexRequest("docs", "doc"); + final IndexRequest indexRequest = new IndexRequest("docs"); indexRequest.source(jsonBuilder() .startObject() .field("value", value) @@ -405,7 +405,7 @@ public void testGetRollupIndexCaps() throws Exception { for (int second = 0; second < 60; second = second + 10) { final int value = randomIntBetween(0, 100); - final IndexRequest indexRequest = new IndexRequest("docs", "doc"); + final IndexRequest indexRequest = new IndexRequest("docs"); indexRequest.source(jsonBuilder() .startObject() .field("value", value) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 26b5b286e8945..fad42d3c44c45 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -66,6 +66,9 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.WeightedAvg; +import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; +import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; @@ -371,6 +374,42 @@ public void testSearchWithTermsAndRangeAgg() throws IOException { } } + public void testSearchWithTermsAndWeightedAvg() throws IOException { + SearchRequest searchRequest = new SearchRequest("index"); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + TermsAggregationBuilder agg = new TermsAggregationBuilder("agg1", ValueType.STRING).field("type.keyword"); + agg.subAggregation(new WeightedAvgAggregationBuilder("subagg") + .value(new MultiValuesSourceFieldConfig.Builder().setFieldName("num").build()) + .weight(new MultiValuesSourceFieldConfig.Builder().setFieldName("num2").build()) + ); + searchSourceBuilder.aggregation(agg); + searchSourceBuilder.size(0); + searchRequest.source(searchSourceBuilder); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + assertSearchHeader(searchResponse); + assertNull(searchResponse.getSuggest()); + assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); + assertEquals(0, searchResponse.getHits().getHits().length); + assertEquals(Float.NaN, searchResponse.getHits().getMaxScore(), 0f); + Terms termsAgg = searchResponse.getAggregations().get("agg1"); + assertEquals("agg1", termsAgg.getName()); + assertEquals(2, termsAgg.getBuckets().size()); + Terms.Bucket type1 = termsAgg.getBucketByKey("type1"); + assertEquals(3, type1.getDocCount()); + assertEquals(1, type1.getAggregations().asList().size()); + { + WeightedAvg weightedAvg = type1.getAggregations().get("subagg"); + assertEquals(24.4, weightedAvg.getValue(), 0f); + } + Terms.Bucket type2 = termsAgg.getBucketByKey("type2"); + assertEquals(2, type2.getDocCount()); + assertEquals(1, type2.getAggregations().asList().size()); + { + WeightedAvg weightedAvg = type2.getAggregations().get("subagg"); + assertEquals(100, weightedAvg.getValue(), 0f); + } + } + public void testSearchWithMatrixStats() throws IOException { SearchRequest searchRequest = new SearchRequest("index"); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/TasksIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/TasksIT.java index 28b909df5d420..5dc168eadfe5e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/TasksIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/TasksIT.java @@ -72,7 +72,7 @@ public void testListTasks() throws IOException { assertTrue("List tasks were not found", listTasksFound); } - public void testGetValidTask() throws IOException { + public void testGetValidTask() throws Exception { // Run a Reindex to create a task @@ -112,7 +112,10 @@ public void testGetValidTask() throws IOException { TaskInfo info = taskResponse.getTaskInfo(); assertTrue(info.isCancellable()); assertEquals("reindex from [source1] to [dest][_doc]", info.getDescription()); - assertEquals("indices:data/write/reindex", info.getAction()); + assertEquals("indices:data/write/reindex", info.getAction()); + if (taskResponse.isCompleted() == false) { + assertBusy(ReindexIT.checkCompletionStatus(client(), taskId.toString())); + } } public void testGetInvalidTask() throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java index b818b7ae4219c..cb8072f6bafb3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java @@ -51,7 +51,7 @@ public void testFromXContent() throws IOException { CcrStatsResponseTests::createTestInstance, CcrStatsResponseTests::toXContent, CcrStatsResponse::fromXContent) - .supportsUnknownFields(false) + .supportsUnknownFields(true) .assertEqualsConsumer(CcrStatsResponseTests::assertEqualInstances) .assertToXContentEquivalence(false) .test(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/FollowStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/FollowStatsResponseTests.java index c8d15bc508503..5ec3cb4edcf07 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/FollowStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/FollowStatsResponseTests.java @@ -48,7 +48,7 @@ public void testFromXContent() throws IOException { FollowStatsResponseTests::createTestInstance, FollowStatsResponseTests::toXContent, FollowStatsResponse::fromXContent) - .supportsUnknownFields(false) + .supportsUnknownFields(true) .assertEqualsConsumer(FollowStatsResponseTests::assertEqualInstances) .assertToXContentEquivalence(false) .test(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/GetAutoFollowPatternResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/GetAutoFollowPatternResponseTests.java index b4a37286b4ace..f6f0f1747e2a2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/GetAutoFollowPatternResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/GetAutoFollowPatternResponseTests.java @@ -43,7 +43,7 @@ public void testFromXContent() throws IOException { this::createTestInstance, GetAutoFollowPatternResponseTests::toXContent, GetAutoFollowPatternResponse::fromXContent) - .supportsUnknownFields(false) + .supportsUnknownFields(true) .test(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/PutFollowRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/PutFollowRequestTests.java index 0814278a0cf59..35353ce4a96f9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/PutFollowRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/PutFollowRequestTests.java @@ -31,7 +31,7 @@ public class PutFollowRequestTests extends AbstractXContentTestCase { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("test_parser", - (args) -> new PutFollowRequest((String) args[0], (String) args[1], (String) args[2])); + true, (args) -> new PutFollowRequest((String) args[0], (String) args[1], (String) args[2])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), PutFollowRequest.REMOTE_CLUSTER_FIELD); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/PutFollowResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/PutFollowResponseTests.java index 48eb15717c599..00bcf535f08af 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/PutFollowResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/PutFollowResponseTests.java @@ -33,7 +33,7 @@ public void testFromXContent() throws IOException { this::createTestInstance, PutFollowResponseTests::toXContent, PutFollowResponse::fromXContent) - .supportsUnknownFields(false) + .supportsUnknownFields(true) .test(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 6d0ceba00b557..894b569f614f3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.client.documentation; +import org.apache.http.HttpHost; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; @@ -700,7 +701,7 @@ public void testBulk() throws Exception { request.add(new IndexRequest("posts").id("4") // <3> .source(XContentType.JSON,"field", "baz")); // end::bulk-request-with-mixed-operations - BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT); + BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT); assertSame(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); @@ -757,7 +758,7 @@ public void testBulk() throws Exception { // end::bulk-request-routing // tag::bulk-request-index-type - BulkRequest defaulted = new BulkRequest("posts","_doc"); // <1> + BulkRequest defaulted = new BulkRequest("posts"); // <1> // end::bulk-request-index-type // tag::bulk-execute-listener @@ -823,9 +824,6 @@ public void testReindex() throws Exception { // tag::reindex-request-conflicts request.setConflicts("proceed"); // <1> // end::reindex-request-conflicts - // tag::reindex-request-query - request.setSourceQuery(new TermQueryBuilder("user", "kimchy")); // <1> - // end::reindex-request-query // tag::reindex-request-size request.setSize(10); // <1> // end::reindex-request-size @@ -846,27 +844,29 @@ public void testReindex() throws Exception { "if (ctx._source.user == 'kimchy') {ctx._source.likes++;}", Collections.emptyMap())); // <1> // end::reindex-request-script + HttpHost host = getClusterHosts().get(0); + Integer remotePort = host.getPort(); + String remoteHost = host.getHostName(); + String user = "test_user"; + String password = "test-password"; + // tag::reindex-request-remote request.setRemoteInfo( new RemoteInfo( - "https", "localhost", 9002, null, + "http", remoteHost, remotePort, null, new BytesArray(new MatchAllQueryBuilder().toString()), - "user", "pass", Collections.emptyMap(), + user, password, Collections.emptyMap(), new TimeValue(100, TimeUnit.MILLISECONDS), new TimeValue(100, TimeUnit.SECONDS) ) ); // <1> // end::reindex-request-remote - request.setRemoteInfo(null); // Remove it for tests // tag::reindex-request-timeout request.setTimeout(TimeValue.timeValueMinutes(2)); // <1> // end::reindex-request-timeout // tag::reindex-request-refresh request.setRefresh(true); // <1> // end::reindex-request-refresh - // tag::reindex-request-slices - request.setSlices(2); // <1> - // end::reindex-request-slices // tag::reindex-request-scroll request.setScroll(TimeValue.timeValueMinutes(10)); // <1> // end::reindex-request-scroll @@ -904,6 +904,14 @@ public void testReindex() throws Exception { request.setSourceIndices("source1"); request.setDestIndex("dest"); + // These cannot be set with a remote set, so its set here instead for the docs + // tag::reindex-request-query + request.setSourceQuery(new TermQueryBuilder("user", "kimchy")); // <1> + // end::reindex-request-query + // tag::reindex-request-slices + request.setSlices(2); // <1> + // end::reindex-request-slices + ActionListener listener; // tag::reindex-execute-listener listener = new ActionListener() { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 3ff4466649ff2..8f9d8a069fd48 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -70,15 +70,15 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; -import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.GetAliasesResponse; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.SyncedFlushResponse; +import org.elasticsearch.client.core.ShardsAcknowledgedResponse; +import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.IndexTemplatesExistRequest; import org.elasticsearch.client.indices.UnfreezeIndexRequest; -import org.elasticsearch.client.core.ShardsAcknowledgedResponse; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -1249,7 +1249,7 @@ public void testGetIndex() throws Exception { Settings settings = Settings.builder().put("number_of_shards", 3).build(); String mappings = "{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}"; CreateIndexResponse createIndexResponse = client.indices().create( - new CreateIndexRequest("index", settings).mapping("doc", mappings, XContentType.JSON), + new CreateIndexRequest("index", settings).mapping("_doc", mappings, XContentType.JSON), RequestOptions.DEFAULT); assertTrue(createIndexResponse.isAcknowledged()); } @@ -1272,7 +1272,7 @@ public void testGetIndex() throws Exception { // tag::get-index-response ImmutableOpenMap indexMappings = getIndexResponse.getMappings().get("index"); // <1> - Map indexTypeMappings = indexMappings.get("doc").getSourceAsMap(); // <2> + Map indexTypeMappings = indexMappings.get("_doc").getSourceAsMap(); // <2> List indexAliases = getIndexResponse.getAliases().get("index"); // <3> String numberOfShardsString = getIndexResponse.getSetting("index", "index.number_of_shards"); // <4> Settings indexSettings = getIndexResponse.getSettings().get("index"); // <5> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 5a4df9ecff482..eb74421c7a1c6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -872,7 +872,6 @@ public void testPreviewDatafeed() throws Exception { createIndexRequest.mapping("_doc", "timestamp", "type=date", "total", "type=long"); highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()) - .setTypes(Arrays.asList("_doc")) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); @@ -932,7 +931,6 @@ public void testStartDatafeed() throws Exception { createIndexRequest.mapping("_doc", "timestamp", "type=date", "total", "type=long"); highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()) - .setTypes(Arrays.asList("_doc")) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); @@ -1053,14 +1051,12 @@ public void testGetDatafeedStats() throws Exception { createIndexRequest.mapping("_doc", "timestamp", "type=date", "total", "type=long"); highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId1, job.getId()) - .setTypes(Arrays.asList("_doc")) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); String datafeedId2 = secondJob.getId() + "-feed"; DatafeedConfig secondDatafeed = DatafeedConfig.builder(datafeedId2, secondJob.getId()) - .setTypes(Arrays.asList("_doc")) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(secondDatafeed), RequestOptions.DEFAULT); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java index 789d237c5a3bc..cfe9e98f643e6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java @@ -207,11 +207,10 @@ public void testGeoShape() throws IOException { // Using pre-indexed shapes GeoShapeQueryBuilder qb = geoShapeQuery( "pin.location", // <1> - "DEU", // <2> - "countries"); // <3> - qb.relation(ShapeRelation.WITHIN) // <4> - .indexedShapeIndex("shapes") // <5> - .indexedShapePath("location"); // <6> + "DEU"); // <2> + qb.relation(ShapeRelation.WITHIN) // <3> + .indexedShapeIndex("shapes") // <4> + .indexedShapePath("location"); // <5> // end::indexed_geo_shape } } @@ -236,9 +235,6 @@ public void testHasParent() { public void testIds() { // tag::ids - idsQuery("my_type", "type2") - .addIds("1", "4", "100"); - idsQuery() // <1> .addIds("1", "4", "100"); // end::ids diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java index 2163b188ee0e2..a1fcdbb7bfc83 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/RollupDocumentationIT.java @@ -89,7 +89,7 @@ public void setUpDocs() throws IOException { final BulkRequest bulkRequest = new BulkRequest(); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int i = 0; i < 50; i++) { - final IndexRequest indexRequest = new IndexRequest("docs", "doc"); + final IndexRequest indexRequest = new IndexRequest("docs"); indexRequest.source(jsonBuilder() .startObject() .field("timestamp", String.format(Locale.ROOT, "2018-01-01T00:%02d:00Z", i)) @@ -103,7 +103,7 @@ public void setUpDocs() throws IOException { .endObject()); bulkRequest.add(indexRequest); } - BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT); + BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT); assertEquals(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/FreezeActionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/FreezeActionTests.java new file mode 100644 index 0000000000000..3fc40ee137b53 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/FreezeActionTests.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.indexlifecycle; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +public class FreezeActionTests extends AbstractXContentTestCase { + + @Override + protected FreezeAction createTestInstance() { + return new FreezeAction(); + } + + @Override + protected FreezeAction doParseInstance(XContentParser parser) { + return FreezeAction.parse(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java index 89dfbb8635332..d703d90d95ed9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/GetLifecyclePolicyResponseTests.java @@ -66,7 +66,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java index 548ba366b640e..93fb69c2ab47d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyMetadataTests.java @@ -62,7 +62,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java index 024cb13d8df37..97c98919d8a88 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indexlifecycle/LifecyclePolicyTests.java @@ -42,7 +42,7 @@ public class LifecyclePolicyTests extends AbstractXContentTestCase VALID_HOT_ACTIONS = Sets.newHashSet(RolloverAction.NAME); private static final Set VALID_WARM_ACTIONS = Sets.newHashSet(AllocateAction.NAME, ForceMergeAction.NAME, ReadOnlyAction.NAME, ShrinkAction.NAME); - private static final Set VALID_COLD_ACTIONS = Sets.newHashSet(AllocateAction.NAME); + private static final Set VALID_COLD_ACTIONS = Sets.newHashSet(AllocateAction.NAME, FreezeAction.NAME); private static final Set VALID_DELETE_ACTIONS = Sets.newHashSet(DeleteAction.NAME); private String lifecycleName; @@ -66,7 +66,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) )); return new NamedXContentRegistry(entries); } @@ -207,6 +208,8 @@ public static LifecyclePolicy createRandomPolicy(String lifecycleName) { return RolloverActionTests.randomInstance(); case ShrinkAction.NAME: return ShrinkActionTests.randomInstance(); + case FreezeAction.NAME: + return new FreezeAction(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; @@ -236,6 +239,8 @@ private LifecycleAction getTestAction(String actionName) { return RolloverActionTests.randomInstance(); case ShrinkAction.NAME: return ShrinkActionTests.randomInstance(); + case FreezeAction.NAME: + return new FreezeAction(); default: throw new IllegalArgumentException("unsupported phase action [" + actionName + "]"); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java index c7fb1c80388be..0b0ed52d0ff67 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java @@ -51,7 +51,6 @@ public static DatafeedConfig.Builder createRandomBuilder() { long bucketSpanMillis = 3600000; DatafeedConfig.Builder builder = constructBuilder(); builder.setIndices(randomStringList(1, 10)); - builder.setTypes(randomStringList(0, 10)); if (randomBoolean()) { try { builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10))); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdateTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdateTests.java index 85a96b0a9e244..3b7ac657e3426 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdateTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdateTests.java @@ -46,9 +46,6 @@ public static DatafeedUpdate createRandom() { if (randomBoolean()) { builder.setIndices(DatafeedConfigTests.randomStringList(1, 10)); } - if (randomBoolean()) { - builder.setTypes(DatafeedConfigTests.randomStringList(1, 10)); - } if (randomBoolean()) { try { builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10))); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/InvalidateTokenResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/InvalidateTokenResponseTests.java index cbefb6fba6b5f..b514a67edb98b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/InvalidateTokenResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/InvalidateTokenResponseTests.java @@ -41,7 +41,6 @@ public void testFromXContent() throws IOException { final int invalidatedTokens = randomInt(32); final int previouslyInvalidatedTokens = randomInt(32); builder.startObject() - .field("created", false) .field("invalidated_tokens", invalidatedTokens) .field("previously_invalidated_tokens", previouslyInvalidatedTokens) .field("error_count", 0) @@ -50,7 +49,6 @@ public void testFromXContent() throws IOException { try (XContentParser parser = createParser(xContentType.xContent(), xContent)) { final InvalidateTokenResponse response = InvalidateTokenResponse.fromXContent(parser); - assertThat(response.isCreated(), Matchers.equalTo(false)); assertThat(response.getInvalidatedTokens(), Matchers.equalTo(invalidatedTokens)); assertThat(response.getPreviouslyInvalidatedTokens(), Matchers.equalTo(previouslyInvalidatedTokens)); assertThat(response.getErrorsCount(), Matchers.equalTo(0)); @@ -64,7 +62,6 @@ public void testFromXContentWithErrors() throws IOException { final int invalidatedTokens = randomInt(32); final int previouslyInvalidatedTokens = randomInt(32); builder.startObject() - .field("created", false) .field("invalidated_tokens", invalidatedTokens) .field("previously_invalidated_tokens", previouslyInvalidatedTokens) .field("error_count", 0) @@ -82,7 +79,6 @@ public void testFromXContentWithErrors() throws IOException { try (XContentParser parser = createParser(xContentType.xContent(), xContent)) { final InvalidateTokenResponse response = InvalidateTokenResponse.fromXContent(parser); - assertThat(response.isCreated(), Matchers.equalTo(false)); assertThat(response.getInvalidatedTokens(), Matchers.equalTo(invalidatedTokens)); assertThat(response.getPreviouslyInvalidatedTokens(), Matchers.equalTo(previouslyInvalidatedTokens)); assertThat(response.getErrorsCount(), Matchers.equalTo(2)); diff --git a/client/rest/build.gradle b/client/rest/build.gradle index f07ff4240ba4b..a6d8eb8467dab 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -77,7 +77,16 @@ namingConventions { skipIntegTestInDisguise = true } -thirdPartyAudit.excludes = [ +testingConventions { + naming.clear() + naming { + Tests { + baseClass 'org.elasticsearch.client.RestClientTestCase' + } + } +} + +thirdPartyAudit.ignoreMissingClasses ( //commons-logging optional dependencies 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', @@ -89,4 +98,4 @@ thirdPartyAudit.excludes = [ //commons-logging provided dependencies 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' -] +) diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java index 9191f5025581b..7eae17d83cf2b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java +++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java @@ -91,7 +91,7 @@ public class RestClientDocumentation { // end::rest-client-options-singleton @SuppressWarnings("unused") - public void testUsage() throws IOException, InterruptedException { + public void usage() throws IOException, InterruptedException { //tag::rest-client-init RestClient restClient = RestClient.builder( @@ -291,7 +291,7 @@ public void onFailure(Exception exception) { } @SuppressWarnings("unused") - public void testCommonConfiguration() throws Exception { + public void commonConfiguration() throws Exception { { //tag::rest-client-config-timeouts RestClientBuilder builder = RestClient.builder( diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 6ba69c5713c57..9f2dd73c5c8e7 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -78,13 +78,23 @@ namingConventions { skipIntegTestInDisguise = true } +testingConventions { + naming.clear() + naming { + Tests { + baseClass 'org.elasticsearch.client.RestClientTestCase' + } + } +} + + dependencyLicenses { dependencies = project.configurations.runtime.fileCollection { it.group.startsWith('org.elasticsearch') == false } } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( //commons-logging optional dependencies 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', @@ -96,4 +106,4 @@ thirdPartyAudit.excludes = [ //commons-logging provided dependencies 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' -] +) diff --git a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/documentation/SnifferDocumentation.java b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/documentation/SnifferDocumentation.java index 70d7373dfc9eb..24d0d404988ec 100644 --- a/client/sniffer/src/test/java/org/elasticsearch/client/sniff/documentation/SnifferDocumentation.java +++ b/client/sniffer/src/test/java/org/elasticsearch/client/sniff/documentation/SnifferDocumentation.java @@ -52,7 +52,7 @@ public class SnifferDocumentation { @SuppressWarnings("unused") - public void testUsage() throws IOException { + public void usage() throws IOException { { //tag::sniffer-init RestClient restClient = RestClient.builder( diff --git a/client/transport/build.gradle b/client/transport/build.gradle index 269a37105fb19..7516e5eb89cce 100644 --- a/client/transport/build.gradle +++ b/client/transport/build.gradle @@ -52,3 +52,12 @@ namingConventions { //we don't have integration tests skipIntegTestInDisguise = true } + +testingConventions { + naming.clear() + naming { + Tests { + baseClass 'com.carrotsearch.randomizedtesting.RandomizedTest' + } + } +} diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java index 23fe042321726..52b1a8c52b58e 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java @@ -22,10 +22,10 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; -import org.elasticsearch.client.Request; import org.junit.After; import org.junit.Before; diff --git a/distribution/build.gradle b/distribution/build.gradle index 317ece6bf2b50..0e439b3586bc3 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -26,8 +26,6 @@ import org.apache.tools.ant.filters.FixCrLfFilter import java.nio.file.Files import java.nio.file.Path -Collection distributions = project('archives').subprojects + project('packages').subprojects - /***************************************************************************** * Third party dependencies report * *****************************************************************************/ diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index ec7d84d7a3084..47f3b78c43f5d 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -197,21 +197,15 @@ bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unre } } - if (gradle.startParameter.taskNames == ["assemble"]) { - // Gradle needs the `artifacts` declaration, including `builtBy` bellow to make projects dependencies on this - // project work, but it will also trigger the build of these for the `assemble` task. - // Since these are only used for testing, we don't want to assemble them if `assemble` is the single command being - // ran. - logger.info("Skipping BWC builds since `assemble` is the only task name provided on the command line") - } else { - artifacts { - for (File artifactFile : artifactFiles) { - String artifactName = artifactFile.name.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch' - String suffix = artifactFile.toString()[-3..-1] - 'default' file: artifactFile, name: artifactName, type: suffix, builtBy: buildBwcVersion - } - } + artifacts { + for (File artifactFile : artifactFiles) { + String artifactName = artifactFile.name.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch' + String suffix = artifactFile.toString()[-3..-1] + 'default' file: artifactFile, name: artifactName, type: suffix, builtBy: buildBwcVersion + } } + // make sure no dependencies were added to assemble; we want it to be a no-op + assemble.dependsOn = [] }} class IndentingOutputStream extends OutputStream { @@ -237,4 +231,4 @@ class IndentingOutputStream extends OutputStream { } } } -} \ No newline at end of file +} diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index e578e63bd962d..37bbfe6688597 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -16,7 +16,7 @@ FROM centos:7 AS builder ENV PATH /usr/share/elasticsearch/bin:$PATH ENV JAVA_HOME /opt/jdk-${jdkVersion} -RUN curl -s ${jdkUrl} | tar -C /opt -zxf - +RUN curl --retry 8 -s ${jdkUrl} | tar -C /opt -zxf - # Replace OpenJDK's built-in CA certificate keystore with the one from the OS # vendor. The latter is superior in several ways. diff --git a/distribution/tools/launchers/build.gradle b/distribution/tools/launchers/build.gradle index f933c04278e7b..4c7d171663a0f 100644 --- a/distribution/tools/launchers/build.gradle +++ b/distribution/tools/launchers/build.gradle @@ -38,6 +38,15 @@ namingConventions { skipIntegTestInDisguise = true } +testingConventions { + naming.clear() + naming { + Tests { + baseClass 'org.elasticsearch.tools.launchers.LaunchersTestCase' + } + } +} + javadoc.enabled = false loggerUsageCheck.enabled = false jarHell.enabled = false diff --git a/docs/build.gradle b/docs/build.gradle index 630226639337a..035667ee84309 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -287,8 +287,8 @@ buildRestTests.setups['stackoverflow'] = ''' body: |''' // Make Kibana strongly connected to elasticsearch and logstash -// Make Kibana rarer (and therefore higher-ranking) than Javascript -// Make Javascript strongly connected to jquery and angular +// Make Kibana rarer (and therefore higher-ranking) than JavaScript +// Make JavaScript strongly connected to jquery and angular // Make Cabana strongly connected to elasticsearch but only as a result of a single author for (int i = 0; i < 150; i++) { diff --git a/docs/java-api/docs/update-by-query.asciidoc b/docs/java-api/docs/update-by-query.asciidoc index d4fe7c1c0419f..ef58d3754276e 100644 --- a/docs/java-api/docs/update-by-query.asciidoc +++ b/docs/java-api/docs/update-by-query.asciidoc @@ -72,7 +72,7 @@ operation that executes: `noop`:: Set `ctx.op = "noop"` if your script doesn't make any -changes. The `updateByQuery` operaton then omits that document from the updates. +changes. The `updateByQuery` operation then omits that document from the updates. This behavior increments the `noop` counter in the response body. `delete`:: diff --git a/docs/java-api/query-dsl/geo-shape-query.asciidoc b/docs/java-api/query-dsl/geo-shape-query.asciidoc index 803f1849b5cdf..c2cd4c14e3adc 100644 --- a/docs/java-api/query-dsl/geo-shape-query.asciidoc +++ b/docs/java-api/query-dsl/geo-shape-query.asciidoc @@ -51,7 +51,6 @@ include-tagged::{query-dsl-test}[indexed_geo_shape] -------------------------------------------------- <1> field <2> The ID of the document that containing the pre-indexed shape. -<3> Index type where the pre-indexed shape is. -<4> relation -<5> Name of the index where the pre-indexed shape is. Defaults to 'shapes'. -<6> The field specified as path containing the pre-indexed shape. Defaults to 'shape'. +<3> relation +<4> Name of the index where the pre-indexed shape is. Defaults to 'shapes'. +<5> The field specified as path containing the pre-indexed shape. Defaults to 'shape'. diff --git a/docs/java-api/query-dsl/ids-query.asciidoc b/docs/java-api/query-dsl/ids-query.asciidoc index 9abc8ed9fed7c..ba12a5df38b0e 100644 --- a/docs/java-api/query-dsl/ids-query.asciidoc +++ b/docs/java-api/query-dsl/ids-query.asciidoc @@ -8,4 +8,3 @@ See {ref}/query-dsl-ids-query.html[Ids Query] -------------------------------------------------- include-tagged::{query-dsl-test}[ids] -------------------------------------------------- -<1> type is optional diff --git a/docs/java-rest/high-level/document/bulk.asciidoc b/docs/java-rest/high-level/document/bulk.asciidoc index c50a1f790583b..061516388c4bb 100644 --- a/docs/java-rest/high-level/document/bulk.asciidoc +++ b/docs/java-rest/high-level/document/bulk.asciidoc @@ -86,8 +86,8 @@ include-tagged::{doc-tests-file}[{api}-request-routing] -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-request-index-type] -------------------------------------------------- -<1> A bulk request with global index and type used on all sub requests, unless overridden on a sub request. -Both parameters are @Nullable and can only be set during +{request}+ creation. +<1> A bulk request with a global index used on all sub requests, unless overridden on a sub request. +This parameter is @Nullable and can only be set during +{request}+ creation. include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/document/multi-get.asciidoc b/docs/java-rest/high-level/document/multi-get.asciidoc index f63bf898922f4..ca26139d23057 100644 --- a/docs/java-rest/high-level/document/multi-get.asciidoc +++ b/docs/java-rest/high-level/document/multi-get.asciidoc @@ -128,7 +128,7 @@ include-tagged::{doc-tests-file}[{api}-conflict] -------------------------------------------------- <1> `getResponse` is null. <2> `getFailure` isn't and contains an `Exception`. -<3> That `Exception` is actuall and `ElasticsearchException` +<3> That `Exception` is actually an `ElasticsearchException` <4> and it has a status of `CONFLICT`. It'd have been an HTTP 409 if this wasn't a multi get. <5> `getMessage` explains the actual cause, ` diff --git a/docs/java-rest/high-level/document/update-by-query.asciidoc b/docs/java-rest/high-level/document/update-by-query.asciidoc index fdf50148df4c8..be1692c4e9f6a 100644 --- a/docs/java-rest/high-level/document/update-by-query.asciidoc +++ b/docs/java-rest/high-level/document/update-by-query.asciidoc @@ -125,7 +125,7 @@ include::../execution.asciidoc[] [id="{upid}-{api}-response"] ==== Update By Query Response -The returned +{resposne}+ contains information about the executed operations and +The returned +{response}+ contains information about the executed operations and allows to iterate over each result as follows: ["source","java",subs="attributes,callouts,macros"] diff --git a/docs/java-rest/high-level/getting-started.asciidoc b/docs/java-rest/high-level/getting-started.asciidoc index 92a61febb6864..f65a264cc33fc 100644 --- a/docs/java-rest/high-level/getting-started.asciidoc +++ b/docs/java-rest/high-level/getting-started.asciidoc @@ -144,7 +144,7 @@ include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-cl In the rest of this documentation about the Java High Level Client, the `RestHighLevelClient` instance will be referenced as `client`. -[[java-rest-hight-getting-started-request-options]] +[[java-rest-high-getting-started-request-options]] === RequestOptions All APIs in the `RestHighLevelClient` accept a `RequestOptions` which you can diff --git a/docs/java-rest/high-level/indices/unfreeze_index.asciidoc b/docs/java-rest/high-level/indices/unfreeze_index.asciidoc index d2c477b33316d..27e98581f0c72 100644 --- a/docs/java-rest/high-level/indices/unfreeze_index.asciidoc +++ b/docs/java-rest/high-level/indices/unfreeze_index.asciidoc @@ -15,7 +15,7 @@ An +{request}+ requires an `index` argument: -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-request] -------------------------------------------------- -<1> The index to unreeze +<1> The index to unfreeze ==== Optional arguments The following arguments can optionally be provided: diff --git a/docs/java-rest/high-level/migration/get-assistance.asciidoc b/docs/java-rest/high-level/migration/get-assistance.asciidoc index 20f857eb1fb41..723eb7d09053d 100644 --- a/docs/java-rest/high-level/migration/get-assistance.asciidoc +++ b/docs/java-rest/high-level/migration/get-assistance.asciidoc @@ -1,7 +1,7 @@ [[java-rest-high-migration-get-assistance]] === Migration Get Assistance -[[java-rest-high-migraton-get-assistance-request]] +[[java-rest-high-migration-get-assistance-request]] ==== Index Upgrade Info Request An `IndexUpgradeInfoRequest` does not require any argument: diff --git a/docs/java-rest/high-level/migration/upgrade.asciidoc b/docs/java-rest/high-level/migration/upgrade.asciidoc index b5bd33d693601..feabfa4ee48e5 100644 --- a/docs/java-rest/high-level/migration/upgrade.asciidoc +++ b/docs/java-rest/high-level/migration/upgrade.asciidoc @@ -8,7 +8,7 @@ [[java-rest-high-migration-upgrade]] === Migration Upgrade -[[java-rest-high-migraton-upgrade-request]] +[[java-rest-high-migration-upgrade-request]] ==== Index Upgrade Request An +{request}+ requires an index argument. Only one index at the time should be upgraded: @@ -32,7 +32,7 @@ include-tagged::{doc-tests-file}[{api}-execute] The returned +{response}+ contains information about the executed operation -[[java-rest-high-migraton-async-upgrade-request]] +[[java-rest-high-migration-async-upgrade-request]] ==== Asynchronous Execution The asynchronous execution of an upgrade request requires both the +{request}+ diff --git a/docs/java-rest/high-level/search/rank-eval.asciidoc b/docs/java-rest/high-level/search/rank-eval.asciidoc index 6db0dadd00ed7..195e1f92f3bfb 100644 --- a/docs/java-rest/high-level/search/rank-eval.asciidoc +++ b/docs/java-rest/high-level/search/rank-eval.asciidoc @@ -82,7 +82,7 @@ include-tagged::{doc-tests}/SearchDocumentationIT.java[rank-eval-response] <2> Partial results that are keyed by their query id <3> The metric score for each partial result <4> Rated search hits contain a fully fledged `SearchHit` -<5> Rated search hits also contain an `Optional` rating that +<5> Rated search hits also contain an `Optional` rating that is not present if the document did not get a rating in the request <6> Metric details are named after the metric used in the request <7> After casting to the metric used in the request, the diff --git a/docs/java-rest/high-level/security/get-privileges.asciidoc b/docs/java-rest/high-level/security/get-privileges.asciidoc index 06ae51e669081..6eee8bbc3c1f5 100644 --- a/docs/java-rest/high-level/security/get-privileges.asciidoc +++ b/docs/java-rest/high-level/security/get-privileges.asciidoc @@ -2,7 +2,7 @@ -- :api: get-privileges :request: GetPrivilegesRequest -:respnse: GetPrivilegesResponse +:response: GetPrivilegesResponse -- [id="{upid}-{api}"] diff --git a/docs/java-rest/high-level/security/get-roles.asciidoc b/docs/java-rest/high-level/security/get-roles.asciidoc index 9ecf36353c3e8..777349222992e 100644 --- a/docs/java-rest/high-level/security/get-roles.asciidoc +++ b/docs/java-rest/high-level/security/get-roles.asciidoc @@ -2,7 +2,7 @@ -- :api: get-roles :request: GetRolesRequest -:respnse: GetRolesResponse +:response: GetRolesResponse -- [id="{upid}-{api}"] diff --git a/docs/java-rest/high-level/security/get-users.asciidoc b/docs/java-rest/high-level/security/get-users.asciidoc index e9e4a0d94911b..1d41bd76166b4 100644 --- a/docs/java-rest/high-level/security/get-users.asciidoc +++ b/docs/java-rest/high-level/security/get-users.asciidoc @@ -2,7 +2,7 @@ -- :api: get-users :request: GetUsersRequest -:respnse: GetUsersResponse +:response: GetUsersResponse -- [id="{upid}-{api}"] diff --git a/docs/java-rest/high-level/watcher/deactivate-watch.asciidoc b/docs/java-rest/high-level/watcher/deactivate-watch.asciidoc index 673423b69b983..ca2178e5c05e2 100644 --- a/docs/java-rest/high-level/watcher/deactivate-watch.asciidoc +++ b/docs/java-rest/high-level/watcher/deactivate-watch.asciidoc @@ -1,6 +1,6 @@ -- :api: deactivate-watch -:request: deactivateWatchRequet +:request: deactivateWatchRequest :response: deactivateWatchResponse :doc-tests-file: {doc-tests}/WatcherDocumentationIT.java -- diff --git a/docs/java-rest/low-level/usage.asciidoc b/docs/java-rest/low-level/usage.asciidoc index 38104215720d7..3747314b6ecd3 100644 --- a/docs/java-rest/low-level/usage.asciidoc +++ b/docs/java-rest/low-level/usage.asciidoc @@ -328,7 +328,7 @@ include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-cus The client is quite happy to execute many actions in parallel. The following example indexes many documents in parallel. In a real world scenario you'd -probably want to use the `_bulk` API instead, but the example is illustative. +probably want to use the `_bulk` API instead, but the example is illustrative. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/docs/painless/painless-execute-script.asciidoc b/docs/painless/painless-execute-script.asciidoc index 30320def79b2d..200dd3f63d56f 100644 --- a/docs/painless/painless-execute-script.asciidoc +++ b/docs/painless/painless-execute-script.asciidoc @@ -65,7 +65,7 @@ are available in the script being tested. The following parameters may be specified in `context_setup` for a filter context: document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. -index:: The name of an index containing a mapping that is compatable with the document being indexed. +index:: The name of an index containing a mapping that is compatible with the document being indexed. *Example* @@ -122,7 +122,7 @@ The `score` context executes scripts as if they were executed inside a `script_s The following parameters may be specified in `context_setup` for a score context: document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. -index:: The name of an index containing a mapping that is compatable with the document being indexed. +index:: The name of an index containing a mapping that is compatible with the document being indexed. query:: If `_score` is used in the script then a query can specified that will be used to compute a score. *Example* diff --git a/docs/perl/index.asciidoc b/docs/perl/index.asciidoc index fc487c735ebd6..d009b3d0460ac 100644 --- a/docs/perl/index.asciidoc +++ b/docs/perl/index.asciidoc @@ -28,7 +28,7 @@ This client provides: * Logging support via Log::Any -* Compatibility with the official clients for Python, Ruby, PHP and Javascript +* Compatibility with the official clients for Python, Ruby, PHP and JavaScript * Easy extensibility diff --git a/docs/plugins/analysis-phonetic.asciidoc b/docs/plugins/analysis-phonetic.asciidoc index 9d9df4827fd4e..e22f819e1eb3e 100644 --- a/docs/plugins/analysis-phonetic.asciidoc +++ b/docs/plugins/analysis-phonetic.asciidoc @@ -93,6 +93,6 @@ supported: `languageset`:: An array of languages to check. If not specified, then the language will - be guessed. Accepts: `any`, `comomon`, `cyrillic`, `english`, `french`, + be guessed. Accepts: `any`, `common`, `cyrillic`, `english`, `french`, `german`, `hebrew`, `hungarian`, `polish`, `romanian`, `russian`, `spanish`. diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index 2710cf46bff4c..57b5b8468fafb 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -185,9 +185,9 @@ Management Console. It should look similar to this. ===== Filtering by Tags The ec2 discovery can also filter machines to include in the cluster based on tags (and not just groups). The settings -to use include the `discovery.ec2.tag.` prefix. For example, setting `discovery.ec2.tag.stage` to `dev` will only -filter instances with a tag key set to `stage`, and a value of `dev`. Several tags set will require all of those tags -to be set for the instance to be included. +to use include the `discovery.ec2.tag.` prefix. For example, if you defined a tag `stage` in EC2 and set it to `dev`, +setting `discovery.ec2.tag.stage` to `dev` will only filter instances with a tag key set to `stage`, and a value +of `dev`. Adding multiple `discovery.ec2.tag` settings will require all of those tags to be set for the instance to be included. One practical use for tag filtering is when an ec2 cluster contains many nodes that are not running Elasticsearch. In this case (particularly with high `discovery.zen.ping_timeout` values) there is a risk that a new node's discovery phase diff --git a/docs/plugins/index.asciidoc b/docs/plugins/index.asciidoc index 77e97396b0e5e..4d51ff147d7a6 100644 --- a/docs/plugins/index.asciidoc +++ b/docs/plugins/index.asciidoc @@ -33,7 +33,7 @@ Issues and bug reports can usually be reported on the community plugin's web sit For advice on writing your own plugin, see <>. -IMPORTANT: Site plugins -- plugins containing HTML, CSS and Javascript -- are +IMPORTANT: Site plugins -- plugins containing HTML, CSS and JavaScript -- are no longer supported. include::plugin-script.asciidoc[] diff --git a/docs/plugins/integrations.asciidoc b/docs/plugins/integrations.asciidoc index 912d9df2f4bd3..6d543408f679f 100644 --- a/docs/plugins/integrations.asciidoc +++ b/docs/plugins/integrations.asciidoc @@ -191,7 +191,7 @@ releases 2.0 and later do not support rivers. ==== Supported by the community: * https://github.com/kodcu/pes[Pes]: - A pluggable elastic Javascript query DSL builder for Elasticsearch + A pluggable elastic JavaScript query DSL builder for Elasticsearch * https://www.wireshark.org/[Wireshark]: Protocol dissection for Zen discovery, HTTP and the binary protocol diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index 13c6a7b62ccbd..df09b28093c80 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -139,7 +139,7 @@ Some examples, using scripts: [source,js] ---- -# The simpliest one +# The simplest one PUT _snapshot/my_backup1 { "type": "azure" diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index 1b975ef761d4a..005cc30895552 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -78,7 +78,7 @@ The following settings are supported: [[repository-hdfs-availability]] [float] -===== A Note on HDFS Availablility +===== A Note on HDFS Availability When you initialize a repository, its settings are persisted in the cluster state. When a node comes online, it will attempt to initialize all repositories for which it has settings. If your cluster has an HDFS repository configured, then all nodes in the cluster must be able to reach HDFS when starting. If not, then the node will fail to initialize the diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index ddbe0b16cc6ad..b0b87dda792fe 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -34,10 +34,10 @@ PUT _snapshot/my_s3_repository [[repository-s3-client]] ==== Client Settings -The client used to connect to S3 has a number of settings available. Client setting names are of -the form `s3.client.CLIENT_NAME.SETTING_NAME`. The default client name, which is looked up by -an `s3` repository, is called `default`. It can be modified using the -<> `client`. For example: +The client that you use to connect to S3 has a number of settings available. The +settings have the form `s3.client.CLIENT_NAME.SETTING_NAME`. The default client +name that is looked up by an `s3` repository is `default`. It can be modified +using the <> `client`. For example: [source,js] ---- @@ -53,11 +53,13 @@ PUT _snapshot/my_s3_repository // CONSOLE // TEST[skip:we don't have s3 setup while testing this] -Most client settings are specified inside `elasticsearch.yml`, but some are -sensitive and must be stored in the {ref}/secure-settings.html[elasticsearch keystore]. +Most client settings can be added to the `elasticsearch.yml` configuration file +with the exception of the secure settings, which you add to the {es} keystore. +For more information about creating and updating the {es} keystore, see +{ref}/secure-settings.html[Secure settings]. -For example, before you start the node, run these commands to add AWS access -key settings to the keystore: +For example, before you start the node, run these commands to add AWS access key +settings to the keystore: [source,sh] ---- @@ -76,16 +78,17 @@ NOTE: In progress snapshot/restore tasks will not be preempted by a *reload* of the client's secure settings. The task will complete using the client as it was built when the operation started. -The following is the list of all the available client settings. -Those that must be stored in the keystore are marked as `Secure` and are *reloadable*. +The following list contains the available client settings. Those that must be +stored in the keystore are marked as "secure" and are *reloadable*; the other +settings belong in the `elasticsearch.yml` file. -`access_key`:: +`access_key` ({ref}/secure-settings.html[Secure]):: - An s3 access key. The `secret_key` setting must also be specified. (Secure) + An s3 access key. The `secret_key` setting must also be specified. -`secret_key`:: +`secret_key` ({ref}/secure-settings.html[Secure]):: - An s3 secret key. The `access_key` setting must also be specified. (Secure) + An s3 secret key. The `access_key` setting must also be specified. `session_token`:: An s3 session token. The `access_key` and `secret_key` settings must also @@ -110,13 +113,13 @@ Those that must be stored in the keystore are marked as `Secure` and are *reload The port of a proxy to connect to s3 through. -`proxy.username`:: +`proxy.username` ({ref}/secure-settings.html[Secure]):: - The username to connect to the `proxy.host` with. (Secure) + The username to connect to the `proxy.host` with. -`proxy.password`:: +`proxy.password` ({ref}/secure-settings.html[Secure]):: - The password to connect to the `proxy.host` with. (Secure) + The password to connect to the `proxy.host` with. `read_timeout`:: diff --git a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc index e1a1ce6b3b193..91c6688b10bd5 100644 --- a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc @@ -121,6 +121,82 @@ POST /museums/_search?size=0 // CONSOLE // TEST[continued] +The geohashes returned by the `geohash_grid` aggregation can be also used for zooming in. To zoom into the +first geohash `u17` returned in the previous example, it should be specified as both `top_left` and `bottom_right` corner: + +[source,js] +-------------------------------------------------- +POST /museums/_search?size=0 +{ + "aggregations" : { + "zoomed-in" : { + "filter" : { + "geo_bounding_box" : { + "location" : { + "top_left" : "u17", + "bottom_right" : "u17" + } + } + }, + "aggregations":{ + "zoom1":{ + "geohash_grid" : { + "field": "location", + "precision": 8 + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations" : { + "zoomed-in" : { + "doc_count" : 3, + "zoom1" : { + "buckets" : [ + { + "key" : "u173zy3j", + "doc_count" : 1 + }, + { + "key" : "u173zvfz", + "doc_count" : 1 + }, + { + "key" : "u173zt90", + "doc_count" : 1 + } + ] + } + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"_shards": $body._shards,"hits":$body.hits,"timed_out":false,/] + +For "zooming in" on the system that don't support geohashes, the bucket keys should be translated into bounding boxes using +one of available geohash libraries. For example, for javascript the https://github.com/sunng87/node-geohash[node-geohash] library +can be used: + +[source,js] +-------------------------------------------------- +var geohash = require('ngeohash'); + +// bbox will contain [ 52.03125, 4.21875, 53.4375, 5.625 ] +// [ minlat, minlon, maxlat, maxlon] +var bbox = geohash.decode_bbox('u17'); +-------------------------------------------------- +// NOTCONSOLE + + ==== Cell dimensions at the equator The table below shows the metric dimensions for cells covered by various string lengths of geohash. Cell dimensions vary with latitude and so the table is for the worst-case scenario at the equator. diff --git a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc index 0a8a46a0b67b6..bfaeecc1f82d3 100644 --- a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc @@ -448,7 +448,7 @@ If the number of unique terms is greater than `size`, the returned list can be s size buckets was not returned). To ensure better accuracy a multiple of the final `size` is used as the number of terms to request from each shard -using a heuristic based on the number of shards. To take manual control of this setting the `shard_size` parameter +(`2 * (size * 1.5 + 10)`). To take manual control of this setting the `shard_size` parameter can be used to control the volumes of candidate terms produced by each shard. Low-frequency terms can turn out to be the most interesting ones once all results are combined so the diff --git a/docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc b/docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc index a541eb0ac14db..429c822d3623d 100644 --- a/docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/significanttext-aggregation.asciidoc @@ -364,7 +364,7 @@ If the number of unique terms is greater than `size`, the returned list can be s size buckets was not returned). To ensure better accuracy a multiple of the final `size` is used as the number of terms to request from each shard -using a heuristic based on the number of shards. To take manual control of this setting the `shard_size` parameter +(`2 * (size * 1.5 + 10)`). To take manual control of this setting the `shard_size` parameter can be used to control the volumes of candidate terms produced by each shard. Low-frequency terms can turn out to be the most interesting ones once all results are combined so the diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index 1562bf41074e0..188b2ed3774c0 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -220,8 +220,7 @@ NOTE: `shard_size` cannot be smaller than `size` (as it doesn't make much sens override it and reset it to be equal to `size`. -The default `shard_size` will be `size` if the search request needs to go to a single shard, and `(size * 1.5 + 10)` -otherwise. +The default `shard_size` is `(size * 1.5 + 10)`. ==== Calculating Document Count Error diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc index 39a8255c90705..06641391ced32 100644 --- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc @@ -263,7 +263,7 @@ image::images/pipeline_movavg/linear_100window.png[] The `ewma` model (aka "single-exponential") is similar to the `linear` model, except older data-points become exponentially less important, rather than linearly less important. The speed at which the importance decays can be controlled with an `alpha` setting. Small values make the weight decay slowly, which provides greater smoothing and takes into account a larger -portion of the window. Larger valuers make the weight decay quickly, which reduces the impact of older values on the +portion of the window. Larger values make the weight decay quickly, which reduces the impact of older values on the moving average. This tends to make the moving average track the data more closely but with less smoothing. The default value of `alpha` is `0.3`, and the setting accepts any float from 0-1 inclusive. diff --git a/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc index febd9bc8a55d2..5745527bddd6f 100644 --- a/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc @@ -449,7 +449,7 @@ The `ewma` function (aka "single-exponential") is similar to the `linearMovAvg` except older data-points become exponentially less important, rather than linearly less important. The speed at which the importance decays can be controlled with an `alpha` setting. Small values make the weight decay slowly, which provides greater smoothing and takes into account a larger -portion of the window. Larger valuers make the weight decay quickly, which reduces the impact of older values on the +portion of the window. Larger values make the weight decay quickly, which reduces the impact of older values on the moving average. This tends to make the moving average track the data more closely but with less smoothing. `null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are diff --git a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc index cc873a4fe89ff..cc82d2eb8179f 100644 --- a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc @@ -58,7 +58,7 @@ The `fingerprint` analyzer accepts the following parameters: [horizontal] `separator`:: - The character to use to concate the terms. Defaults to a space. + The character to use to concatenate the terms. Defaults to a space. `max_output_size`:: diff --git a/docs/reference/analysis/tokenizers/chargroup-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/chargroup-tokenizer.asciidoc index e6bf79b0e961f..2c18e94878fb2 100644 --- a/docs/reference/analysis/tokenizers/chargroup-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/chargroup-tokenizer.asciidoc @@ -15,7 +15,7 @@ The `char_group` tokenizer accepts one parameter: `tokenize_on_chars`:: A list containing a list of characters to tokenize the string on. Whenever a character from this list is encountered, a new token is started. This accepts either single - characters like eg. `-`, or character groups: `whitespace`, `letter`, `digit`, + characters like e.g. `-`, or character groups: `whitespace`, `letter`, `digit`, `punctuation`, `symbol`. diff --git a/docs/reference/cat/alias.asciidoc b/docs/reference/cat/alias.asciidoc index 84d567d110a60..394231e448dc0 100644 --- a/docs/reference/cat/alias.asciidoc +++ b/docs/reference/cat/alias.asciidoc @@ -56,4 +56,4 @@ configurations in `alias3` and `alias4`. If you only want to get information about specific aliases, you can specify the aliases in comma-delimited format as a URL parameter, e.g., -/_cat/aliases/aliases/alias1,alias2. \ No newline at end of file +/_cat/aliases/alias1,alias2. diff --git a/docs/reference/cat/nodeattrs.asciidoc b/docs/reference/cat/nodeattrs.asciidoc index 6c474c2117943..2b893a4c79b11 100644 --- a/docs/reference/cat/nodeattrs.asciidoc +++ b/docs/reference/cat/nodeattrs.asciidoc @@ -10,7 +10,7 @@ GET /_cat/nodeattrs?v -------------------------------------------------- // CONSOLE // TEST[s/\?v/\?v&s=node,attr/] -// Sort the resulting attributes so we can assert on them more easilly +// Sort the resulting attributes so we can assert on them more easily Could look like: @@ -55,7 +55,7 @@ GET /_cat/nodeattrs?v&h=name,pid,attr,value -------------------------------------------------- // CONSOLE // TEST[s/,value/,value&s=node,attr/] -// Sort the resulting attributes so we can assert on them more easilly +// Sort the resulting attributes so we can assert on them more easily Might look like: diff --git a/docs/reference/cat/templates.asciidoc b/docs/reference/cat/templates.asciidoc index 076e84b72b5d3..304459ba96edb 100644 --- a/docs/reference/cat/templates.asciidoc +++ b/docs/reference/cat/templates.asciidoc @@ -12,7 +12,7 @@ GET /_cat/templates?v&s=name // TEST[s/^/PUT _template\/template0\n{"index_patterns": "te*", "order": 0}\n/] // TEST[s/^/PUT _template\/template1\n{"index_patterns": "tea*", "order": 1}\n/] // TEST[s/^/PUT _template\/template2\n{"index_patterns": "teak*", "order": 2, "version": 7}\n/] -// The substitions do two things: +// The substitutions do two things: // 1. Filter the response to just templates matching the te* pattern // so that we only get the templates we expect regardless of which // templates exist. If xpack is installed there will be unexpected diff --git a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc index 755bf63f0183f..766f502ff93a3 100644 --- a/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-stats.asciidoc @@ -47,7 +47,7 @@ GET //_ccr/stats // CONSOLE // TEST[s//follower_index/] -==== Path Parmeters +==== Path Parameters `index` :: (string) a comma-delimited list of index patterns diff --git a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc index 1bc2c3ef28838..6507c04ac5026 100644 --- a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc @@ -50,7 +50,7 @@ POST //_ccr/unfollow // CONSOLE // TEST[s//follower_index/] -==== Path Parmeters +==== Path Parameters `follower_index` (required):: (string) the name of the follower index diff --git a/docs/reference/cluster.asciidoc b/docs/reference/cluster.asciidoc index f92e364bae102..cfa2d5a6488d7 100644 --- a/docs/reference/cluster.asciidoc +++ b/docs/reference/cluster.asciidoc @@ -104,3 +104,5 @@ include::cluster/tasks.asciidoc[] include::cluster/nodes-hot-threads.asciidoc[] include::cluster/allocation-explain.asciidoc[] + +include::cluster/voting-exclusions.asciidoc[] diff --git a/docs/reference/cluster/nodes-reload-secure-settings.asciidoc b/docs/reference/cluster/nodes-reload-secure-settings.asciidoc index f02ac8e46576b..68bca72be248c 100644 --- a/docs/reference/cluster/nodes-reload-secure-settings.asciidoc +++ b/docs/reference/cluster/nodes-reload-secure-settings.asciidoc @@ -3,10 +3,10 @@ The cluster nodes reload secure settings API is used to re-read the local node's encrypted keystore. Specifically, it will prompt the keystore -decryption and reading accross the cluster. The keystore's plain content is +decryption and reading across the cluster. The keystore's plain content is used to reinitialize all compatible plugins. A compatible plugin can be -reinitilized without restarting the node. The operation is -complete when all compatible plugins have finished reinitilizing. Subsequently, +reinitialized without restarting the node. The operation is +complete when all compatible plugins have finished reinitializing. Subsequently, the keystore is closed and any changes to it will not be reflected on the node. [source,js] diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index eb3abb19d1adf..4bd3c2c9647a5 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -279,7 +279,7 @@ the operating system: `os.cgroup.memory.limit_in_bytes`. NOTE: For the cgroup stats to be visible, cgroups must be compiled into -the kernal, the `cpu` and `cpuacct` cgroup subsystems must be +the kernel, the `cpu` and `cpuacct` cgroup subsystems must be configured and stats must be readable from `/sys/fs/cgroup/cpu` and `/sys/fs/cgroup/cpuacct`. diff --git a/docs/reference/cluster/voting-exclusions.asciidoc b/docs/reference/cluster/voting-exclusions.asciidoc new file mode 100644 index 0000000000000..fcef8113912c4 --- /dev/null +++ b/docs/reference/cluster/voting-exclusions.asciidoc @@ -0,0 +1,76 @@ +[[voting-config-exclusions]] +== Voting configuration exclusions API +++++ +Voting Configuration Exclusions +++++ + +Adds or removes master-eligible nodes from the +<>. + +[float] +=== Request + +`POST _cluster/voting_config_exclusions/` + + +`DELETE _cluster/voting_config_exclusions` + +[float] +=== Path parameters + +`node_name`:: + A <> that identifies {es} nodes. + +[float] +=== Description + +By default, if there are more than three master-eligible nodes in the cluster +and you remove fewer than half of the master-eligible nodes in the cluster at +once, the <> automatically +shrinks. + +If you want to shrink the voting configuration to contain fewer than three nodes +or to remove half or more of the master-eligible nodes in the cluster at once, +you must use this API to remove departed nodes from the voting configuration +manually. It adds an entry for that node in the voting configuration exclusions +list. The cluster then tries to reconfigure the voting configuration to remove +that node and to prevent it from returning. + +If the API fails, you can safely retry it. Only a successful response +guarantees that the node has been removed from the voting configuration and will +not be reinstated. + +NOTE: Voting exclusions are required only when you remove at least half of the +master-eligible nodes from a cluster in a short time period. They are not +required when removing master-ineligible nodes or fewer than half of the +master-eligible nodes. + +The <> limits the size of the voting configuration exclusion list. The +default value is `10`. Since voting configuration exclusions are persistent and +limited in number, you must clear the voting config exclusions list once the +exclusions are no longer required. + +There is also a +<>, +which is set to true by default. If it is set to false, you must use this API to +maintain the voting configuration. + +For more information, see <>. + +[float] +=== Examples + +Add `nodeId1` to the voting configuration exclusions list: +[source,js] +-------------------------------------------------- +POST /_cluster/voting_config_exclusions/nodeId1 +-------------------------------------------------- +// CONSOLE +// TEST[catch:bad_request] + +Remove all exclusions from the list: +[source,js] +-------------------------------------------------- +DELETE /_cluster/voting_config_exclusions +-------------------------------------------------- +// CONSOLE \ No newline at end of file diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 642bcb20518e7..b12a27d2e601b 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -138,8 +138,8 @@ POST _reindex // CONSOLE // TEST[setup:twitter] -You can limit the documents by adding a type to the `source` or by adding a -query. This will only copy tweets made by `kimchy` into `new_twitter`: +You can limit the documents by adding a query to the `source`. +This will only copy tweets made by `kimchy` into `new_twitter`: [source,js] -------------------------------------------------- @@ -147,7 +147,6 @@ POST _reindex { "source": { "index": "twitter", - "type": "_doc", "query": { "term": { "user": "kimchy" @@ -162,21 +161,19 @@ POST _reindex // CONSOLE // TEST[setup:twitter] -`index` and `type` in `source` can both be lists, allowing you to copy from -lots of sources in one request. This will copy documents from the `_doc` and -`post` types in the `twitter` and `blog` indices. +`index` in `source` can be a list, allowing you to copy from lots +of sources in one request. This will copy documents from the +`twitter` and `blog` indices: [source,js] -------------------------------------------------- POST _reindex { "source": { - "index": ["twitter", "blog"], - "type": ["_doc", "post"] + "index": ["twitter", "blog"] }, "dest": { - "index": "all_together", - "type": "_doc" + "index": "all_together" } } -------------------------------------------------- @@ -299,7 +296,6 @@ Think of the possibilities! Just be careful; you are able to change: * `_id` - * `_type` * `_index` * `_version` * `_routing` diff --git a/docs/reference/frozen-indices.asciidoc b/docs/reference/frozen-indices.asciidoc index 4002df887665d..19a8e4318b0c2 100644 --- a/docs/reference/frozen-indices.asciidoc +++ b/docs/reference/frozen-indices.asciidoc @@ -64,4 +64,24 @@ The default value for `pre_filter_shard_size` is `128` but it's recommended to s significant overhead associated with this pre-filter phase. ================================ +== Monitoring frozen indices + +Frozen indices are ordinary indices that use search throttling and a memory efficient shard implementation. For API's like the +`<>` frozen indicies may identified by an index's `search.throttled` property (`sth`). + +[source,js] +-------------------------------------------------- +GET /_cat/indices/twitter?v&h=i,sth +-------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT twitter\nPOST twitter\/_freeze\n/] + +The response looks like: + +[source,txt] +-------------------------------------------------- +i sth +twitter true +-------------------------------------------------- +// TESTRESPONSE[_cat] diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc index bb5b0edd2e6bb..f89eb4ed3cd39 100644 --- a/docs/reference/how-to/search-speed.asciidoc +++ b/docs/reference/how-to/search-speed.asciidoc @@ -400,3 +400,28 @@ So what is the right number of replicas? If you have a cluster that has be able to cope with `max_failures` node failures at once at most, then the right number of replicas for you is `max(max_failures, ceil(num_nodes / num_primaries) - 1)`. + +[float] +=== Turn on adaptive replica selection + +When multiple copies of data are present, elasticsearch can use a set of +criteria called <> to select +the best copy of the data based on response time, service time, and queue size +of the node containing each copy of the shard. This can improve query throughput +and reduce latency for search-heavy applications. + +=== Tune your queries with the Profile API + +You can also analyse how expensive each component of your queries and +aggregations are using the {ref}/search-profile.html[Profile API]. This might +allow you to tune your queries to be less expensive, resulting in a positive +performance result and reduced load. Also note that Profile API payloads can be +easily visualised for better readability in the +{kibana-ref}/xpack-profiler.html[Search Profiler], which is a Kibana dev tools +UI available in all X-Pack licenses, including the free X-Pack Basic license. + +Some caveats to the Profile API are that: + + - the Profile API as a debugging tool adds significant overhead to search execution and can also have a very verbose output + - given the added overhead, the resulting took times are not reliable indicators of actual took time, but can be used comparatively between clauses for relative timing differences + - the Profile API is best for exploring possible reasons behind the most costly clauses of a query but isn't intended for accurately measuring absolute timings of each clause diff --git a/docs/reference/ilm/apis/delete-lifecycle.asciidoc b/docs/reference/ilm/apis/delete-lifecycle.asciidoc index 0b9fce20d10d0..9c45542836096 100644 --- a/docs/reference/ilm/apis/delete-lifecycle.asciidoc +++ b/docs/reference/ilm/apis/delete-lifecycle.asciidoc @@ -6,8 +6,6 @@ Delete policy ++++ -beta[] - Deletes a lifecycle policy. ==== Request diff --git a/docs/reference/ilm/apis/explain.asciidoc b/docs/reference/ilm/apis/explain.asciidoc index 1c8f5c9d861f1..66762ead9eb32 100644 --- a/docs/reference/ilm/apis/explain.asciidoc +++ b/docs/reference/ilm/apis/explain.asciidoc @@ -6,8 +6,6 @@ Explain lifecycle ++++ -beta[] - Shows an index's current lifecycle status. ==== Request diff --git a/docs/reference/ilm/apis/get-lifecycle.asciidoc b/docs/reference/ilm/apis/get-lifecycle.asciidoc index 161e82b091b3e..9bdf14d970caa 100644 --- a/docs/reference/ilm/apis/get-lifecycle.asciidoc +++ b/docs/reference/ilm/apis/get-lifecycle.asciidoc @@ -6,8 +6,6 @@ Get policy ++++ -beta[] - Retrieves a lifecycle policy. ==== Request diff --git a/docs/reference/ilm/apis/get-status.asciidoc b/docs/reference/ilm/apis/get-status.asciidoc index 4dceb18a3611a..5e67e26cb160c 100644 --- a/docs/reference/ilm/apis/get-status.asciidoc +++ b/docs/reference/ilm/apis/get-status.asciidoc @@ -6,8 +6,6 @@ Get {ilm} status ++++ -beta[] - Retrieves the current {ilm} ({ilm-init}) status. ==== Request diff --git a/docs/reference/ilm/apis/ilm-api.asciidoc b/docs/reference/ilm/apis/ilm-api.asciidoc index dcc3d1962cb64..edfc96d113fc7 100644 --- a/docs/reference/ilm/apis/ilm-api.asciidoc +++ b/docs/reference/ilm/apis/ilm-api.asciidoc @@ -1,9 +1,9 @@ [[index-lifecycle-management-api]] == {ilm-cap} API -beta[] - -You can use the following APIs to manage policies on indices. +You can use the following APIs to manage policies on indices. See +<> for more information +about Index Lifecycle Management. [float] [[ilm-api-policy-endpoint]] diff --git a/docs/reference/ilm/apis/move-to-step.asciidoc b/docs/reference/ilm/apis/move-to-step.asciidoc index 6d648f5270209..57ea1a226ea40 100644 --- a/docs/reference/ilm/apis/move-to-step.asciidoc +++ b/docs/reference/ilm/apis/move-to-step.asciidoc @@ -6,8 +6,6 @@ Move to step ++++ -beta[] - Triggers execution of a specific step in the lifecycle policy. ==== Request diff --git a/docs/reference/ilm/apis/put-lifecycle.asciidoc b/docs/reference/ilm/apis/put-lifecycle.asciidoc index 83facc6f42b01..3e07e5f0f03ee 100644 --- a/docs/reference/ilm/apis/put-lifecycle.asciidoc +++ b/docs/reference/ilm/apis/put-lifecycle.asciidoc @@ -6,9 +6,8 @@ Create policy ++++ -beta[] - -Creates or updates lifecycle policy. +Creates or updates lifecycle policy. See <> +for definitions of policy components. ==== Request diff --git a/docs/reference/ilm/apis/remove-policy-from-index.asciidoc b/docs/reference/ilm/apis/remove-policy-from-index.asciidoc index b8b44c3d0fe28..888d3f17eecac 100644 --- a/docs/reference/ilm/apis/remove-policy-from-index.asciidoc +++ b/docs/reference/ilm/apis/remove-policy-from-index.asciidoc @@ -6,8 +6,6 @@ Remove policy ++++ -beta[] - Removes the assigned lifecycle policy from an index. ==== Request diff --git a/docs/reference/ilm/apis/retry-policy.asciidoc b/docs/reference/ilm/apis/retry-policy.asciidoc index 2d4944a7ac40d..c67540d2003f7 100644 --- a/docs/reference/ilm/apis/retry-policy.asciidoc +++ b/docs/reference/ilm/apis/retry-policy.asciidoc @@ -6,8 +6,6 @@ Retry policy ++++ -beta[] - Retry executing the policy for an index that is in the ERROR step. ==== Request diff --git a/docs/reference/ilm/apis/start.asciidoc b/docs/reference/ilm/apis/start.asciidoc index 97b7985c65428..92ca2a9806379 100644 --- a/docs/reference/ilm/apis/start.asciidoc +++ b/docs/reference/ilm/apis/start.asciidoc @@ -6,8 +6,6 @@ Start {ilm} ++++ -beta[] - Start the {ilm} ({ilm-init}) plugin. ==== Request diff --git a/docs/reference/ilm/apis/stop.asciidoc b/docs/reference/ilm/apis/stop.asciidoc index b2a1dba411a16..dee1cf4fa3579 100644 --- a/docs/reference/ilm/apis/stop.asciidoc +++ b/docs/reference/ilm/apis/stop.asciidoc @@ -6,8 +6,6 @@ Stop {ilm} ++++ -beta[] - Stop the {ilm} ({ilm-init}) plugin. ==== Request diff --git a/docs/reference/ilm/error-handling.asciidoc b/docs/reference/ilm/error-handling.asciidoc index 639c2fbaddd6d..abe643255bf95 100644 --- a/docs/reference/ilm/error-handling.asciidoc +++ b/docs/reference/ilm/error-handling.asciidoc @@ -3,8 +3,6 @@ [[index-lifecycle-error-handling]] == Index lifecycle error handling -beta[] - During Index Lifecycle Management's execution of the policy for an index, it's possible for a step to encounter an error during its execution. When this happens, ILM will move the management state into an "error" step. This halts @@ -119,7 +117,7 @@ Which returns the following information: <9> the definition of the phase (in this case, the "warm" phase) that the index is currently on The index here has been moved to the error step because the shrink definition in -the policy is using an incorrect number of shards. So rectifing that in the +the policy is using an incorrect number of shards. So rectifying that in the policy entails updating the existing policy to use one instead of four for the targeted number of shards. diff --git a/docs/reference/ilm/getting-started-ilm.asciidoc b/docs/reference/ilm/getting-started-ilm.asciidoc index 63193544bfff5..f06c95f49c067 100644 --- a/docs/reference/ilm/getting-started-ilm.asciidoc +++ b/docs/reference/ilm/getting-started-ilm.asciidoc @@ -3,8 +3,6 @@ [[getting-started-index-lifecycle-management]] == Getting started with {ilm} -beta[] - Let's jump into {ilm} ({ilm-init}) by working through a hands-on scenario. This section will leverage many new concepts unique to {ilm-init} that you may not be familiar with. The following sections will explore @@ -19,8 +17,6 @@ after 90 days. === Setting up a new policy -beta[] - There are many new features introduced by {ilm-init}, but we will only focus on a few that are needed for our example. For starters, we will use the <> API to define our first policy. Lifecycle @@ -70,8 +66,6 @@ The index will be deleted 90 days after it is rolled over. === Applying a policy to our index -beta[] - There are <> to associate a policy to an index. Since we wish specific settings to be applied to the new index created from Rollover, we will set the policy via @@ -143,8 +137,6 @@ alias to be read-only for the source index. === Checking progress -beta[] - Now that we have an index managed by our policy, how do we tell what is going on? Which phase are we in? Is something broken? This section will go over a few APIs and their responses to help us inspect our indices with respect diff --git a/docs/reference/ilm/ilm-and-snapshots.asciidoc b/docs/reference/ilm/ilm-and-snapshots.asciidoc index 45028b4f1f85c..c71fbe0471da2 100644 --- a/docs/reference/ilm/ilm-and-snapshots.asciidoc +++ b/docs/reference/ilm/ilm-and-snapshots.asciidoc @@ -3,8 +3,6 @@ [[index-lifecycle-and-snapshots]] == Restoring snapshots of managed indices -beta[] - When restoring a snapshot that contains indices managed by Index Lifecycle Management, the lifecycle will automatically continue to execute after the snapshot is restored. Notably, the `min_age` is relative to the original @@ -32,4 +30,4 @@ prevent the execution of the lifecycle policy for an index: 3. Perform whatever operations you wish before resuming lifecycle execution, or remove the lifecycle policy from the index using the <> -4. Resume execution of lifecycle policies using the <> \ No newline at end of file +4. Resume execution of lifecycle policies using the <> diff --git a/docs/reference/ilm/index.asciidoc b/docs/reference/ilm/index.asciidoc index aaf7e2eede54e..aa27ab1386b80 100644 --- a/docs/reference/ilm/index.asciidoc +++ b/docs/reference/ilm/index.asciidoc @@ -5,7 +5,6 @@ [partintro] -- -beta[] The <> enable you to automate how you want to manage your indices over time. Rather than simply @@ -41,7 +40,7 @@ replicas can be reduced. For example, if you are indexing metrics data from a fleet of ATMs into Elasticsearch, you might define a policy that says: -. When the index reaches 5GB, roll over to a new index. +. When the index reaches 50GB, roll over to a new index. . Move the old index into the warm stage, mark it read only, and shrink it down to a single shard. . After 7 days, move the index into the cold stage and move it to less expensive diff --git a/docs/reference/ilm/policy-definitions.asciidoc b/docs/reference/ilm/policy-definitions.asciidoc index 2e2aababad647..ab2c0a039f9d0 100644 --- a/docs/reference/ilm/policy-definitions.asciidoc +++ b/docs/reference/ilm/policy-definitions.asciidoc @@ -1,11 +1,8 @@ -beta[] [role="xpack"] [testenv="basic"] [[ilm-policy-definition]] == Policy phases and actions -beta[] - There are four stages in the index lifecycle, in the order they are executed. @@ -26,8 +23,6 @@ phase and the delete phase, while another may define all four phases. === Timing -beta[] - Indices enter phases based on a phase's `min_age` parameter. The index will not enter the phase until the index's age is older than that of the `min_age`. The parameter is configured using a time @@ -76,8 +71,6 @@ and transition into the next phase. === Phase Execution -beta[] - The current phase definition, of an index's policy being executed, is stored in the index's metadata. The phase and its actions are compiled into a series of discrete steps that are executed sequentially. Since some {ilm-init} actions @@ -89,8 +82,6 @@ executing. === Actions -beta[] - The below list shows the actions which are available in each phase. * Hot @@ -284,6 +275,39 @@ PUT _ilm/policy/my_policy -------------------------------------------------- // CONSOLE +[[ilm-freeze-action]] +==== Freeze + +Phases allowed: cold. + +This action will <> the index +by calling the <>. + +[source,js] +-------------------------------------------------- +PUT _ilm/policy/my_policy +{ + "policy": { + "phases": { + "cold": { + "actions": { + "freeze" : { } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +[IMPORTANT] +================================ + Freezing an index will close the index and reopen it within the same API call. + This causes primaries to not be allocated for a short amount of time and + causes the cluster to go red until the primaries are allocated again. + This limitation might be removed in the future. +================================ + [[ilm-readonly-action]] ==== Read-Only @@ -393,7 +417,7 @@ PUT _ilm/policy/my_policy ===== Example: Rollover when index has too many documents This example rolls the index over when it contains at least -1000000 documents. +100000000 documents. [source,js] -------------------------------------------------- @@ -404,7 +428,7 @@ PUT _ilm/policy/my_policy "hot": { "actions": { "rollover" : { - "max_docs": 1000000 + "max_docs": 100000000 } } } @@ -549,11 +573,9 @@ PUT _ilm/policy/my_policy === Full Policy -beta[] - With all of these actions, we can support complex management strategies for our indices. This policy will define an index that will start in the hot phase, -rolling over every 20g or 7 days. After 30 days it enters the warm phase +rolling over every 50 GB or 7 days. After 30 days it enters the warm phase and increases the replicas to 2, force merges and shrinks. After 60 days it enters the cold phase and allocates to "cold" nodes, and after 90 days the index is deleted. @@ -568,7 +590,7 @@ PUT _ilm/policy/full_policy "actions": { "rollover": { "max_age": "7d", - "max_size": "20G" + "max_size": "50G" } } }, diff --git a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc index 1ab8d4399d5e7..7af686238f334 100644 --- a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc @@ -3,8 +3,6 @@ [[set-up-lifecycle-policy]] == Set up {ilm} policy -beta[] - In order for an index to use an {ilm} policy to manage its lifecycle we must first define a lifecycle policy for it to use. The following request creates a policy called `my_policy` in Elasticsearch which we can later use to manage our @@ -49,8 +47,6 @@ To set the policy for an index there are two options: [[applying-policy-to-template]] === Applying a policy to an index template -beta[] - The `index.lifecycle.name` setting can be set in an index template so that it is automatically applied to indexes matching the templates index pattern: @@ -95,8 +91,6 @@ create a new index and roll the alias over to use the new index automatically. === Apply a policy to a create index request -beta[] - The `index.lifecycle.name` setting can be set on an individual create index request so {ilm} immediately starts managing the index: diff --git a/docs/reference/ilm/start-stop-ilm.asciidoc b/docs/reference/ilm/start-stop-ilm.asciidoc index 4414c13ee0f5c..e5366f028a9c7 100644 --- a/docs/reference/ilm/start-stop-ilm.asciidoc +++ b/docs/reference/ilm/start-stop-ilm.asciidoc @@ -3,8 +3,6 @@ [[start-stop-ilm]] == Start and stop {ilm} -beta[] - All indices that are managed by ILM will continue to execute their policies. There may be times when this is not desired on certain indices, or maybe even all the indices in a cluster. For example, diff --git a/docs/reference/ilm/update-lifecycle-policy.asciidoc b/docs/reference/ilm/update-lifecycle-policy.asciidoc index a566c737b7c0d..3e6627fdd3a7e 100644 --- a/docs/reference/ilm/update-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/update-lifecycle-policy.asciidoc @@ -6,8 +6,6 @@ Update policy ++++ -beta[] - You can update an existing lifecycle policy to fix mistakes or change strategies for newly created indices. It is possible to update policy definitions and an index's `index.lifecycle.name` settings independently. To prevent the situation @@ -22,8 +20,6 @@ their effects on policy execution on indices. === Updates to policies not managing indices -beta[] - Indices not referencing an existing policy that is updated will not be affected. If an index is assigned to the policy, it will be assigned the latest version of that policy @@ -137,8 +133,6 @@ the policy. === Updates to executing policies -beta[] - Indices preserve the phase definition from the latest policy version that existed at the time that it entered that phase. Changes to the currently-executing phase within policy updates will not be reflected during execution. This means that updates to the `hot` phase, for example, will not affect @@ -445,8 +439,6 @@ GET my_index/_ilm/explain === Switching policies for an index -beta[] - Setting `index.lifecycle.name` to a different policy behaves much like a policy update, but instead of just switching to a different version, it switches to a different policy. diff --git a/docs/reference/ilm/using-policies-rollover.asciidoc b/docs/reference/ilm/using-policies-rollover.asciidoc index 3af6e125fcd99..266346fb8629f 100644 --- a/docs/reference/ilm/using-policies-rollover.asciidoc +++ b/docs/reference/ilm/using-policies-rollover.asciidoc @@ -3,8 +3,6 @@ [[using-policies-rollover]] == Using policies to manage index rollover -beta[] - The rollover action enables you to automatically roll over to a new index based on the index size, document count, or age. When a rollover is triggered, a new index is created, the write alias is updated to point to the new index, and all @@ -18,7 +16,7 @@ resource usage. You control when the rollover action is triggered by specifying one or more rollover parameters. The rollover is performed once any of the criteria are met. Because the criteria are checked periodically, the index might grow -slightly beyond the specified threshold. To control how often the critera are +slightly beyond the specified threshold. To control how often the criteria are checked, specify the `indices.lifecycle.poll_interval` cluster setting. IMPORTANT: New indices created via rollover will not automatically inherit the @@ -34,9 +32,9 @@ The rollover action takes the following parameters: |=== |Name |Description |max_size |The maximum estimated size the index is allowed to grow -to. Defaults tonull. Optional. +to. Defaults to `null`. Optional. |max_docs |The maximum number of document the index should -contain. Defaults tonull. Optional. +contain. Defaults to `null`. Optional. |max_age |The maximum age of the index. Defaults to `null`. Optional. |=== @@ -127,8 +125,6 @@ the new index, enabling indexing to continue uninterrupted. === Skipping Rollover -beta[] - The `index.lifecycle.indexing_complete` setting indicates to {ilm} whether this index has already been rolled over. If it is set to `true`, that indicates that this index has already been rolled over and does not need to be rolled over diff --git a/docs/reference/images/sql/odbc/administrator_drivers.png b/docs/reference/images/sql/odbc/administrator_drivers.png new file mode 100644 index 0000000000000..9f4a26b178ff0 Binary files /dev/null and b/docs/reference/images/sql/odbc/administrator_drivers.png differ diff --git a/docs/reference/images/sql/odbc/administrator_launch_editor.png b/docs/reference/images/sql/odbc/administrator_launch_editor.png new file mode 100644 index 0000000000000..3bb93af29f912 Binary files /dev/null and b/docs/reference/images/sql/odbc/administrator_launch_editor.png differ diff --git a/docs/reference/images/sql/odbc/administrator_system_add.png b/docs/reference/images/sql/odbc/administrator_system_add.png new file mode 100644 index 0000000000000..64d47b67f8146 Binary files /dev/null and b/docs/reference/images/sql/odbc/administrator_system_add.png differ diff --git a/docs/reference/images/sql/odbc/administrator_system_added.png b/docs/reference/images/sql/odbc/administrator_system_added.png new file mode 100644 index 0000000000000..6797264a89e25 Binary files /dev/null and b/docs/reference/images/sql/odbc/administrator_system_added.png differ diff --git a/docs/reference/images/sql/odbc/administrator_tracing.png b/docs/reference/images/sql/odbc/administrator_tracing.png new file mode 100644 index 0000000000000..14493ba8d5a2c Binary files /dev/null and b/docs/reference/images/sql/odbc/administrator_tracing.png differ diff --git a/docs/reference/images/sql/odbc/apps_excel_cred.png b/docs/reference/images/sql/odbc/apps_excel_cred.png new file mode 100644 index 0000000000000..a3da36dbf6e29 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_excel_cred.png differ diff --git a/docs/reference/images/sql/odbc/apps_excel_dsn.png b/docs/reference/images/sql/odbc/apps_excel_dsn.png new file mode 100644 index 0000000000000..7e81cc01f1211 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_excel_dsn.png differ diff --git a/docs/reference/images/sql/odbc/apps_excel_fromodbc.png b/docs/reference/images/sql/odbc/apps_excel_fromodbc.png new file mode 100644 index 0000000000000..603af4dfc7253 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_excel_fromodbc.png differ diff --git a/docs/reference/images/sql/odbc/apps_excel_loaded.png b/docs/reference/images/sql/odbc/apps_excel_loaded.png new file mode 100644 index 0000000000000..7d7ea86c8cc49 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_excel_loaded.png differ diff --git a/docs/reference/images/sql/odbc/apps_excel_picktable.png b/docs/reference/images/sql/odbc/apps_excel_picktable.png new file mode 100644 index 0000000000000..fd7aecc412871 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_excel_picktable.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_databases.png b/docs/reference/images/sql/odbc/apps_microstrat_databases.png new file mode 100644 index 0000000000000..9f1c69b796887 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_databases.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_dsn.png b/docs/reference/images/sql/odbc/apps_microstrat_dsn.png new file mode 100644 index 0000000000000..4fa4c90947fa8 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_dsn.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_inmem.png b/docs/reference/images/sql/odbc/apps_microstrat_inmem.png new file mode 100644 index 0000000000000..3e97c03111547 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_inmem.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_live.png b/docs/reference/images/sql/odbc/apps_microstrat_live.png new file mode 100644 index 0000000000000..2a3e0fa02a361 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_live.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_loadtable.png b/docs/reference/images/sql/odbc/apps_microstrat_loadtable.png new file mode 100644 index 0000000000000..a1502c4e9f31e Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_loadtable.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_newdata.png b/docs/reference/images/sql/odbc/apps_microstrat_newdata.png new file mode 100644 index 0000000000000..3a00c6dffe2c6 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_newdata.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_newdossier.png b/docs/reference/images/sql/odbc/apps_microstrat_newdossier.png new file mode 100644 index 0000000000000..275588a7fe0c2 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_newdossier.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_newds.png b/docs/reference/images/sql/odbc/apps_microstrat_newds.png new file mode 100644 index 0000000000000..45e3666eae63a Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_newds.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_tables.png b/docs/reference/images/sql/odbc/apps_microstrat_tables.png new file mode 100644 index 0000000000000..71283d05e5c88 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_tables.png differ diff --git a/docs/reference/images/sql/odbc/apps_microstrat_visualize.png b/docs/reference/images/sql/odbc/apps_microstrat_visualize.png new file mode 100644 index 0000000000000..3e15946f0f1b4 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_microstrat_visualize.png differ diff --git a/docs/reference/images/sql/odbc/apps_pbi_dsn.png b/docs/reference/images/sql/odbc/apps_pbi_dsn.png new file mode 100644 index 0000000000000..9e9512ec40f5c Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_pbi_dsn.png differ diff --git a/docs/reference/images/sql/odbc/apps_pbi_fromodbc1.png b/docs/reference/images/sql/odbc/apps_pbi_fromodbc1.png new file mode 100644 index 0000000000000..313b1edbc74f5 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_pbi_fromodbc1.png differ diff --git a/docs/reference/images/sql/odbc/apps_pbi_fromodbc2.png b/docs/reference/images/sql/odbc/apps_pbi_fromodbc2.png new file mode 100644 index 0000000000000..fade98f4ad576 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_pbi_fromodbc2.png differ diff --git a/docs/reference/images/sql/odbc/apps_pbi_loaded.png b/docs/reference/images/sql/odbc/apps_pbi_loaded.png new file mode 100644 index 0000000000000..c1927d2200c40 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_pbi_loaded.png differ diff --git a/docs/reference/images/sql/odbc/apps_pbi_picktable.png b/docs/reference/images/sql/odbc/apps_pbi_picktable.png new file mode 100644 index 0000000000000..2b2e1c8e4e501 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_pbi_picktable.png differ diff --git a/docs/reference/images/sql/odbc/apps_ps_exed.png b/docs/reference/images/sql/odbc/apps_ps_exed.png new file mode 100644 index 0000000000000..84c3c12ec4867 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_ps_exed.png differ diff --git a/docs/reference/images/sql/odbc/apps_qlik_adddata.png b/docs/reference/images/sql/odbc/apps_qlik_adddata.png new file mode 100644 index 0000000000000..b32596c1c011a Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_qlik_adddata.png differ diff --git a/docs/reference/images/sql/odbc/apps_qlik_create.png b/docs/reference/images/sql/odbc/apps_qlik_create.png new file mode 100644 index 0000000000000..4a2438c1cfae8 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_qlik_create.png differ diff --git a/docs/reference/images/sql/odbc/apps_qlik_dsn.png b/docs/reference/images/sql/odbc/apps_qlik_dsn.png new file mode 100644 index 0000000000000..79852e5016843 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_qlik_dsn.png differ diff --git a/docs/reference/images/sql/odbc/apps_qlik_newapp.png b/docs/reference/images/sql/odbc/apps_qlik_newapp.png new file mode 100644 index 0000000000000..1909707825ac3 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_qlik_newapp.png differ diff --git a/docs/reference/images/sql/odbc/apps_qlik_odbc.png b/docs/reference/images/sql/odbc/apps_qlik_odbc.png new file mode 100644 index 0000000000000..9b56fe6bcbe3a Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_qlik_odbc.png differ diff --git a/docs/reference/images/sql/odbc/apps_qlik_open.png b/docs/reference/images/sql/odbc/apps_qlik_open.png new file mode 100644 index 0000000000000..f4e33230ecc64 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_qlik_open.png differ diff --git a/docs/reference/images/sql/odbc/apps_qlik_selecttable.png b/docs/reference/images/sql/odbc/apps_qlik_selecttable.png new file mode 100644 index 0000000000000..c6a485cb85c79 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_qlik_selecttable.png differ diff --git a/docs/reference/images/sql/odbc/apps_qlik_visualize.png b/docs/reference/images/sql/odbc/apps_qlik_visualize.png new file mode 100644 index 0000000000000..c87cd505de348 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_qlik_visualize.png differ diff --git a/docs/reference/images/sql/odbc/apps_tableau_connd.png b/docs/reference/images/sql/odbc/apps_tableau_connd.png new file mode 100644 index 0000000000000..ae34f673aa318 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_tableau_connd.png differ diff --git a/docs/reference/images/sql/odbc/apps_tableau_fromodbc.png b/docs/reference/images/sql/odbc/apps_tableau_fromodbc.png new file mode 100644 index 0000000000000..717c5e4a8861e Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_tableau_fromodbc.png differ diff --git a/docs/reference/images/sql/odbc/apps_tableau_loaded.png b/docs/reference/images/sql/odbc/apps_tableau_loaded.png new file mode 100644 index 0000000000000..61e80be627ff0 Binary files /dev/null and b/docs/reference/images/sql/odbc/apps_tableau_loaded.png differ diff --git a/docs/reference/images/sql/odbc/dsn_editor_basic.png b/docs/reference/images/sql/odbc/dsn_editor_basic.png new file mode 100644 index 0000000000000..9359d45a3e7da Binary files /dev/null and b/docs/reference/images/sql/odbc/dsn_editor_basic.png differ diff --git a/docs/reference/images/sql/odbc/dsn_editor_conntest.png b/docs/reference/images/sql/odbc/dsn_editor_conntest.png new file mode 100644 index 0000000000000..7430966c9dabf Binary files /dev/null and b/docs/reference/images/sql/odbc/dsn_editor_conntest.png differ diff --git a/docs/reference/images/sql/odbc/dsn_editor_logging.png b/docs/reference/images/sql/odbc/dsn_editor_logging.png new file mode 100644 index 0000000000000..dcc62dd7d26f8 Binary files /dev/null and b/docs/reference/images/sql/odbc/dsn_editor_logging.png differ diff --git a/docs/reference/images/sql/odbc/dsn_editor_security.png b/docs/reference/images/sql/odbc/dsn_editor_security.png new file mode 100644 index 0000000000000..33bac80aff7ee Binary files /dev/null and b/docs/reference/images/sql/odbc/dsn_editor_security.png differ diff --git a/docs/reference/images/sql/odbc/dsn_editor_security_cert.png b/docs/reference/images/sql/odbc/dsn_editor_security_cert.png new file mode 100644 index 0000000000000..c37c03c6cbc5d Binary files /dev/null and b/docs/reference/images/sql/odbc/dsn_editor_security_cert.png differ diff --git a/docs/reference/images/sql/odbc/env_var_log.png b/docs/reference/images/sql/odbc/env_var_log.png new file mode 100644 index 0000000000000..739f9650d9564 Binary files /dev/null and b/docs/reference/images/sql/odbc/env_var_log.png differ diff --git a/docs/reference/images/sql/odbc/installer_accept_license.png b/docs/reference/images/sql/odbc/installer_accept_license.png new file mode 100644 index 0000000000000..4fc51d1373c8d Binary files /dev/null and b/docs/reference/images/sql/odbc/installer_accept_license.png differ diff --git a/docs/reference/images/sql/odbc/installer_choose_destination.png b/docs/reference/images/sql/odbc/installer_choose_destination.png new file mode 100644 index 0000000000000..12419b180d97c Binary files /dev/null and b/docs/reference/images/sql/odbc/installer_choose_destination.png differ diff --git a/docs/reference/images/sql/odbc/installer_finish.png b/docs/reference/images/sql/odbc/installer_finish.png new file mode 100644 index 0000000000000..a7ec3606dc296 Binary files /dev/null and b/docs/reference/images/sql/odbc/installer_finish.png differ diff --git a/docs/reference/images/sql/odbc/installer_installing.png b/docs/reference/images/sql/odbc/installer_installing.png new file mode 100644 index 0000000000000..21bec24b1bc74 Binary files /dev/null and b/docs/reference/images/sql/odbc/installer_installing.png differ diff --git a/docs/reference/images/sql/odbc/installer_preparing.png b/docs/reference/images/sql/odbc/installer_preparing.png new file mode 100644 index 0000000000000..4bb218548744e Binary files /dev/null and b/docs/reference/images/sql/odbc/installer_preparing.png differ diff --git a/docs/reference/images/sql/odbc/installer_ready_install.png b/docs/reference/images/sql/odbc/installer_ready_install.png new file mode 100644 index 0000000000000..9ad0e52abb4a8 Binary files /dev/null and b/docs/reference/images/sql/odbc/installer_ready_install.png differ diff --git a/docs/reference/images/sql/odbc/installer_started.png b/docs/reference/images/sql/odbc/installer_started.png new file mode 100644 index 0000000000000..e713594e3cc75 Binary files /dev/null and b/docs/reference/images/sql/odbc/installer_started.png differ diff --git a/docs/reference/images/sql/odbc/launch_administrator.png b/docs/reference/images/sql/odbc/launch_administrator.png new file mode 100644 index 0000000000000..f7cc37120d7ff Binary files /dev/null and b/docs/reference/images/sql/odbc/launch_administrator.png differ diff --git a/docs/reference/images/sql/odbc/msi_icon.png b/docs/reference/images/sql/odbc/msi_icon.png new file mode 100644 index 0000000000000..a45bfa28d0e7f Binary files /dev/null and b/docs/reference/images/sql/odbc/msi_icon.png differ diff --git a/docs/reference/images/sql/odbc/uninstall.png b/docs/reference/images/sql/odbc/uninstall.png new file mode 100644 index 0000000000000..5bd2ccb7fde2a Binary files /dev/null and b/docs/reference/images/sql/odbc/uninstall.png differ diff --git a/docs/reference/index-modules/allocation/prioritization.asciidoc b/docs/reference/index-modules/allocation/prioritization.asciidoc index 92051cc4dbc57..6693e6adb755e 100644 --- a/docs/reference/index-modules/allocation/prioritization.asciidoc +++ b/docs/reference/index-modules/allocation/prioritization.asciidoc @@ -10,7 +10,7 @@ Indices are sorted into priority order as follows: This means that, by default, newer indices will be recovered before older indices. -Use the per-index dynamically updateable `index.priority` setting to customise +Use the per-index dynamically updatable `index.priority` setting to customise the index prioritization order. For instance: [source,js] diff --git a/docs/reference/index-modules/store.asciidoc b/docs/reference/index-modules/store.asciidoc index c2b3d700e9b7c..8c1b99a42f2a6 100644 --- a/docs/reference/index-modules/store.asciidoc +++ b/docs/reference/index-modules/store.asciidoc @@ -40,7 +40,7 @@ The following sections lists all the different storage types supported. `fs`:: Default file system implementation. This will pick the best implementation -depending on the operating environment, which is currently `mmapfs` on all +depending on the operating environment, which is currently `hybridfs` on all supported systems but is subject to change. [[simplefs]]`simplefs`:: @@ -67,12 +67,22 @@ process equal to the size of the file being mapped. Before using this class, be sure you have allowed plenty of <>. -[[allow-mmapfs]] -You can restrict the use of the `mmapfs` store type via the setting -`node.store.allow_mmapfs`. This is a boolean setting indicating whether or not -`mmapfs` is allowed. The default is to allow `mmapfs`. This setting is useful, -for example, if you are in an environment where you can not control the ability -to create a lot of memory maps so you need disable the ability to use `mmapfs`. +[[hybridfs]]`hybridfs`:: + +The `hybridfs` type is a hybrid of `niofs` and `mmapfs`, which chooses the best +file system type for each type of file based on the read access pattern. +Currently only the Lucene term dictionary, norms and doc values files are +memory mapped. All other files are opened using Lucene `NIOFSDirectory`. +Similarly to `mmapfs` be sure you have allowed plenty of +<>. + +[[allow-mmap]] +You can restrict the use of the `mmapfs` and the related `hybridfs` store type +via the setting `node.store.allow_mmap`. This is a boolean setting indicating +whether or not memory-mapping is allowed. The default is to allow it. This +setting is useful, for example, if you are in an environment where you can not +control the ability to create a lot of memory maps so you need disable the +ability to use memory-mapping. === Pre-loading data into the file system cache diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index 37d48eec2a215..c6bf60182fd76 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -27,7 +27,7 @@ flush can be executed if another flush operation is already executing. The default is `false` and will cause an exception to be thrown on the shard level if another flush operation is already running. -`force`:: Whether a flush should be forced even if it is not necessarily needed ie. +`force`:: Whether a flush should be forced even if it is not necessarily needed i.e. if no changes will be committed to the index. This is useful if transaction log IDs should be incremented even if no uncommitted changes are present. (This setting can be considered as internal) diff --git a/docs/reference/indices/get-index.asciidoc b/docs/reference/indices/get-index.asciidoc index ffa512dca6363..8e254a576c11d 100644 --- a/docs/reference/indices/get-index.asciidoc +++ b/docs/reference/indices/get-index.asciidoc @@ -15,3 +15,63 @@ alias or wildcard expression is required. The get index API can also be applied to more than one index, or on all indices by using `_all` or `*` as index. + +[float] +=== Skipping types + +Types are scheduled to be fully removed in Elasticsearch 8.0 and will not appear +in requests or responses anymore. You can opt in for this future behaviour by +setting `include_type_name=false` in the request, which will return mappings +directly under `mappings` without keying by the type name. + +Here is an example: + +[source,js] +-------------------------------------------------- +PUT test?include_type_name=false +{ + "mappings": { + "properties": { + "foo": { + "type": "keyword" + } + } + } +} + +GET test?include_type_name=false +-------------------------------------------------- +// CONSOLE + +which returns + +[source,js] +-------------------------------------------------- +{ + "test": { + "aliases": {}, + "mappings": { + "properties": { + "foo": { + "type": "keyword" + } + } + }, + "settings": { + "index": { + "creation_date": "1547028674905", + "number_of_shards": "1", + "number_of_replicas": "1", + "uuid": "u1YpkPqLSqGIn3kNAvY8cA", + "version": { + "created": ... + }, + "provided_name": "test" + } + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/1547028674905/$body.test.settings.index.creation_date/] +// TESTRESPONSE[s/u1YpkPqLSqGIn3kNAvY8cA/$body.test.settings.index.uuid/] +// TESTRESPONSE[s/"created": \.\.\./"created": $body.test.settings.index.version.created/] diff --git a/docs/reference/indices/stats.asciidoc b/docs/reference/indices/stats.asciidoc index a95b1c81ae189..9ccd78f8f5df2 100644 --- a/docs/reference/indices/stats.asciidoc +++ b/docs/reference/indices/stats.asciidoc @@ -88,7 +88,7 @@ GET /_stats/search?groups=group1,group2 The stats returned are aggregated on the index level, with `primaries` and `total` aggregations, where `primaries` are the values for only the -primary shards, and `total` are the cumulated values for both primary and replica shards. +primary shards, and `total` are the accumulated values for both primary and replica shards. In order to get back shard level stats, set the `level` parameter to `shards`. diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 7f6bbb5302af9..578bf35cb2446 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -428,7 +428,7 @@ For example: `'Guest'.equalsIgnoreCase(ctx.network?.name)` is null safe because since `ctx.network?.name` can return null. Some situations require an explicit null check. In the following example there -is not null safe alternative, so an explict null check is needed. +is not null safe alternative, so an explicit null check is needed. [source,js] -------------------------------------------------- diff --git a/docs/reference/ingest/processors/dissect.asciidoc b/docs/reference/ingest/processors/dissect.asciidoc index 0bcd1a27c7437..0c04e7ed07396 100644 --- a/docs/reference/ingest/processors/dissect.asciidoc +++ b/docs/reference/ingest/processors/dissect.asciidoc @@ -47,7 +47,7 @@ Later dissect matches the `[` and then `]` and then assigns `@timestamp` to ever Paying special attention the parts of the string to discard will help build successful dissect patterns. Successful matches require all keys in a pattern to have a value. If any of the `%{keyname}` defined in the pattern do -not have a value, then an exception is thrown and may be handled by the <> directive. +not have a value, then an exception is thrown and may be handled by the <> directive. An empty key `%{}` or a <> can be used to match values, but exclude the value from the final document. All matched values are represented as string data types. The <> may be used to convert to expected data type. diff --git a/docs/reference/ingest/processors/dot-expand.asciidoc b/docs/reference/ingest/processors/dot-expand.asciidoc index b3322c96a25f8..1e8eb7da6cf03 100644 --- a/docs/reference/ingest/processors/dot-expand.asciidoc +++ b/docs/reference/ingest/processors/dot-expand.asciidoc @@ -5,7 +5,7 @@ Expands a field with dots into an object field. This processor allows fields with dots in the name to be accessible by other processors in the pipeline. Otherwise these <> can't be accessed by any processor. -[[dot-expender-options]] +[[dot-expander-options]] .Dot Expand Options [options="header"] |====== diff --git a/docs/reference/ingest/processors/geoip.asciidoc b/docs/reference/ingest/processors/geoip.asciidoc index fabd58a7096ff..7b80422c3aa2f 100644 --- a/docs/reference/ingest/processors/geoip.asciidoc +++ b/docs/reference/ingest/processors/geoip.asciidoc @@ -189,7 +189,7 @@ Which returns: ===== Recognizing Location as a Geopoint Although this processor enriches your document with a `location` field containing the estimated latitude and longitude of the IP address, this field will not be -indexed as a {ref}/geo-point.html[`geo_point`] type in Elasticsearch without explicitely defining it +indexed as a {ref}/geo-point.html[`geo_point`] type in Elasticsearch without explicitly defining it as such in the mapping. You can use the following mapping for the example index above: diff --git a/docs/reference/ingest/processors/grok.asciidoc b/docs/reference/ingest/processors/grok.asciidoc index 315caff0dc6e4..b266879e40b16 100644 --- a/docs/reference/ingest/processors/grok.asciidoc +++ b/docs/reference/ingest/processors/grok.asciidoc @@ -17,7 +17,7 @@ If you need help building patterns to match your logs, you will find the {kibana Grok sits on top of regular expressions, so any regular expressions are valid in grok as well. The regular expression library is Oniguruma, and you can see the full supported regexp syntax -https://github.com/kkos/oniguruma/blob/master/doc/RE[on the Onigiruma site]. +https://github.com/kkos/oniguruma/blob/master/doc/RE[on the Oniguruma site]. Grok works by leveraging this regular expression language to allow naming existing patterns and combining them into more complex patterns that match your fields. @@ -119,8 +119,8 @@ This pipeline will insert these named captures as new fields within the document [[custom-patterns]] ==== Custom Patterns -The Grok processor comes pre-packaged with a base set of pattern. These patterns may not always have -what you are looking for. Pattern have a very basic format. Each entry describes has a name and the pattern itself. +The Grok processor comes pre-packaged with a base set of patterns. These patterns may not always have +what you are looking for. Patterns have a very basic format. Each entry has a name and the pattern itself. You can add your own patterns to a processor definition under the `pattern_definitions` option. Here is an example of a pipeline specifying custom pattern definitions: diff --git a/docs/reference/mapping/dynamic/templates.asciidoc b/docs/reference/mapping/dynamic/templates.asciidoc index 4fbed66449800..3ad7da6e17744 100644 --- a/docs/reference/mapping/dynamic/templates.asciidoc +++ b/docs/reference/mapping/dynamic/templates.asciidoc @@ -48,7 +48,7 @@ reordered or deleted after they were initially added. The `match_mapping_type` is the datatype detected by the json parser. Since JSON doesn't allow to distinguish a `long` from an `integer` or a `double` from -a `float`, it will always choose the wider datatype, ie. `long` for integers +a `float`, it will always choose the wider datatype, i.e. `long` for integers and `double` for floating-point numbers. The following datatypes may be automatically detected: diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 8efb184afa6ba..92ee2d1065100 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -121,7 +121,7 @@ near perfect spatial resolution (down to 1e-7 decimal degree precision) since al spatial relations are computed using an encoded vector representation of the original shape instead of a raster-grid representation as used by the <> indexing approach. Performance of the tessellator primarily -depends on the number of vertices that define the polygon/multi-polyogn. While +depends on the number of vertices that define the polygon/multi-polygon. While this is the default indexing technique prefix trees can still be used by setting the `tree` or `strategy` parameters according to the appropriate <>. Note that these parameters are now deprecated diff --git a/docs/reference/migration/migrate_7_0/analysis.asciidoc b/docs/reference/migration/migrate_7_0/analysis.asciidoc index e4b27def9f8cf..36ad41be09aa1 100644 --- a/docs/reference/migration/migrate_7_0/analysis.asciidoc +++ b/docs/reference/migration/migrate_7_0/analysis.asciidoc @@ -31,3 +31,11 @@ instead. ==== `standard` filter has been removed The `standard` token filter has been removed because it doesn't change anything in the stream. + +[float] +==== Deprecated standard_html_strip analyzer + +The `standard_html_strip` analyzer has been deprecated, and should be replaced +with a combination of the `standard` tokenizer and `html_strip` char_filter. +Indexes created using this analyzer will still be readable in elasticsearch 7.0, +but it will not be possible to create new indexes using it. \ No newline at end of file diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index 2ca7c6787bfdf..61cbee851304c 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -174,9 +174,10 @@ major version. ==== `hits.total` is now an object in the search response The total hits that match the search request is now returned as an object -with a `value` and a `relation`. `value indicates the number of hits that -match and `relation indicates whether the value is accurate (`eq`) or a lower bound +with a `value` and a `relation`. `value` indicates the number of hits that +match and `relation` indicates whether the value is accurate (`eq`) or a lower bound (`gte`): + ``` { "hits": { @@ -200,7 +201,7 @@ will be removed in the next major version (8.0). [float] ==== `hits.total` is omitted in the response if `track_total_hits` is disabled (false) -If `track_total_hits` is set to `false in the search request the search response +If `track_total_hits` is set to `false` in the search request the search response will set `hits.total` to null and the object will not be displayed in the rest layer. You can add `rest_total_hits_as_int=true` in the search request parameters -to get the old format back (`"total": -1`). \ No newline at end of file +to get the old format back (`"total": -1`). diff --git a/docs/reference/migration/migrate_7_0/settings.asciidoc b/docs/reference/migration/migrate_7_0/settings.asciidoc index d983994b0c517..6144888fb545d 100644 --- a/docs/reference/migration/migrate_7_0/settings.asciidoc +++ b/docs/reference/migration/migrate_7_0/settings.asciidoc @@ -36,6 +36,11 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. available to keep the display output in APIs as `bulk` instead of `write`. These fallback settings and this system property have been removed. +[float] +==== Disabling memory-mapping + +* The setting `node.store.allow_mmapfs` has been renamed to `node.store.allow_mmap`. + [float] [[remove-http-enabled]] ==== Http enabled setting removed diff --git a/docs/reference/ml/aggregations.asciidoc b/docs/reference/ml/aggregations.asciidoc index 7a9d30a518fbb..3f09022d17eaa 100644 --- a/docs/reference/ml/aggregations.asciidoc +++ b/docs/reference/ml/aggregations.asciidoc @@ -8,7 +8,7 @@ and to configure your jobs to analyze aggregated data. One of the benefits of aggregating data this way is that {es} automatically distributes these calculations across your cluster. You can then feed this -aggregated data into {xpackml} instead of raw results, which +aggregated data into the {ml-features} instead of raw results, which reduces the volume of data that must be considered while detecting anomalies. There are some limitations to using aggregations in {dfeeds}, however. @@ -59,7 +59,6 @@ PUT _ml/datafeeds/datafeed-farequote { "job_id":"farequote", "indices": ["farequote"], - "types": ["response"], "aggregations": { "buckets": { "date_histogram": { diff --git a/docs/reference/ml/apis/datafeedresource.asciidoc b/docs/reference/ml/apis/datafeedresource.asciidoc index 25d407ed00f70..8e1251067dd9f 100644 --- a/docs/reference/ml/apis/datafeedresource.asciidoc +++ b/docs/reference/ml/apis/datafeedresource.asciidoc @@ -59,11 +59,6 @@ A {dfeed} resource has the following properties: (unsigned integer) The `size` parameter that is used in {es} searches. The default value is `1000`. -`types`:: - (array) A list of types to search for within the specified indices. For - example: `[]`. This property is provided for backwards compatibility with - releases earlier than 6.0.0. For more information, see <>. - `delayed_data_check_config`:: (object) Specifies whether the data feed checks for missing data and and the size of the window. For example: diff --git a/docs/reference/ml/apis/get-datafeed.asciidoc b/docs/reference/ml/apis/get-datafeed.asciidoc index 437a56b86dfbf..b54eb59bb7a52 100644 --- a/docs/reference/ml/apis/get-datafeed.asciidoc +++ b/docs/reference/ml/apis/get-datafeed.asciidoc @@ -76,7 +76,6 @@ The API returns the following results: "indices": [ "server-metrics" ], - "types": [], "query": { "match_all": { "boost": 1.0 diff --git a/docs/reference/ml/apis/put-datafeed.asciidoc b/docs/reference/ml/apis/put-datafeed.asciidoc index 791a51486c801..d2c935ba6d32d 100644 --- a/docs/reference/ml/apis/put-datafeed.asciidoc +++ b/docs/reference/ml/apis/put-datafeed.asciidoc @@ -73,11 +73,6 @@ You must create a job before you create a {dfeed}. You can associate only one (unsigned integer) The `size` parameter that is used in {es} searches. The default value is `1000`. -`types`:: - (array) A list of types to search for within the specified indices. - For example: `[]`. This property is provided for backwards compatibility with - releases earlier than 6.0.0. For more information, see <>. - `delayed_data_check_config`:: (object) Specifies if and with how large a window should the data feed check for missing data. See <>. @@ -125,7 +120,6 @@ When the {dfeed} is created, you receive the following results: "indices": [ "server-metrics" ], - "types": [], "query": { "match_all": { "boost": 1.0 diff --git a/docs/reference/ml/apis/resultsresource.asciidoc b/docs/reference/ml/apis/resultsresource.asciidoc index 8962129c73966..f2533bbd07345 100644 --- a/docs/reference/ml/apis/resultsresource.asciidoc +++ b/docs/reference/ml/apis/resultsresource.asciidoc @@ -269,7 +269,7 @@ probability of this occurrence. There can be many anomaly records depending on the characteristics and size of the input data. In practice, there are often too many to be able to manually -process them. The {xpackml} features therefore perform a sophisticated +process them. The {ml-features} therefore perform a sophisticated aggregation of the anomaly records into buckets. The number of record results depends on the number of anomalies found in each diff --git a/docs/reference/ml/apis/update-datafeed.asciidoc b/docs/reference/ml/apis/update-datafeed.asciidoc index 37b489e6ef596..1e888f823ff72 100644 --- a/docs/reference/ml/apis/update-datafeed.asciidoc +++ b/docs/reference/ml/apis/update-datafeed.asciidoc @@ -68,11 +68,6 @@ The following properties can be updated after the {dfeed} is created: (unsigned integer) The `size` parameter that is used in {es} searches. The default value is `1000`. -`types`:: - (array) A list of types to search for within the specified indices. - For example: `[]`. This property is provided for backwards compatibility with - releases earlier than 6.0.0. For more information, see <>. - For more information about these properties, see <>. @@ -120,7 +115,6 @@ with the updated values: "job_id": "total-requests", "query_delay": "83474ms", "indices": ["server-metrics"], - "types": [], "query": { "term": { "level": { diff --git a/docs/reference/ml/configuring.asciidoc b/docs/reference/ml/configuring.asciidoc index a7773b5681f89..9304a93d360c7 100644 --- a/docs/reference/ml/configuring.asciidoc +++ b/docs/reference/ml/configuring.asciidoc @@ -2,12 +2,12 @@ [[ml-configuring]] == Configuring machine learning -If you want to use {xpackml} features, there must be at least one {ml} node in +If you want to use {ml-features}, there must be at least one {ml} node in your cluster and all master-eligible nodes must have {ml} enabled. By default, all nodes are {ml} nodes. For more information about these settings, see {ref}/modules-node.html#modules-node-xpack[{ml} nodes]. -To use the {xpackml} features to analyze your data, you must create a job and +To use the {ml-features} to analyze your data, you must create a job and send your data to that job. * If your data is stored in {es}: diff --git a/docs/reference/ml/functions.asciidoc b/docs/reference/ml/functions.asciidoc index e32470c6827b6..48e56bb4627ee 100644 --- a/docs/reference/ml/functions.asciidoc +++ b/docs/reference/ml/functions.asciidoc @@ -2,7 +2,7 @@ [[ml-functions]] == Function reference -The {xpackml} features include analysis functions that provide a wide variety of +The {ml-features} include analysis functions that provide a wide variety of flexible ways to analyze data for anomalies. When you create jobs, you specify one or more detectors, which define the type of diff --git a/docs/reference/ml/functions/count.asciidoc b/docs/reference/ml/functions/count.asciidoc index 3365a0923a8b0..404ed7f2d94a3 100644 --- a/docs/reference/ml/functions/count.asciidoc +++ b/docs/reference/ml/functions/count.asciidoc @@ -14,7 +14,7 @@ in one field is unusual, as opposed to the total count. Use high-sided functions if you want to monitor unusually high event rates. Use low-sided functions if you want to look at drops in event rate. -The {xpackml} features include the following count functions: +The {ml-features} include the following count functions: * xref:ml-count[`count`, `high_count`, `low_count`] * xref:ml-nonzero-count[`non_zero_count`, `high_non_zero_count`, `low_non_zero_count`] diff --git a/docs/reference/ml/functions/geo.asciidoc b/docs/reference/ml/functions/geo.asciidoc index 3698ab7c0590e..130e17d85dcfe 100644 --- a/docs/reference/ml/functions/geo.asciidoc +++ b/docs/reference/ml/functions/geo.asciidoc @@ -5,7 +5,7 @@ The geographic functions detect anomalies in the geographic location of the input data. -The {xpackml} features include the following geographic function: `lat_long`. +The {ml-features} include the following geographic function: `lat_long`. NOTE: You cannot create forecasts for jobs that contain geographic functions. You also cannot add rules with conditions to detectors that use geographic @@ -72,7 +72,7 @@ For example, JSON data might contain the following transaction coordinates: In {es}, location data is likely to be stored in `geo_point` fields. For more information, see {ref}/geo-point.html[Geo-point datatype]. This data type is not -supported natively in {xpackml} features. You can, however, use Painless scripts +supported natively in {ml-features}. You can, however, use Painless scripts in `script_fields` in your {dfeed} to transform the data into an appropriate format. For example, the following Painless script transforms `"coords": {"lat" : 41.44, "lon":90.5}` into `"lat-lon": "41.44,90.5"`: diff --git a/docs/reference/ml/functions/info.asciidoc b/docs/reference/ml/functions/info.asciidoc index 2c3117e0e5644..c75440f238ff5 100644 --- a/docs/reference/ml/functions/info.asciidoc +++ b/docs/reference/ml/functions/info.asciidoc @@ -6,7 +6,7 @@ that is contained in strings within a bucket. These functions can be used as a more sophisticated method to identify incidences of data exfiltration or C2C activity, when analyzing the size in bytes of the data might not be sufficient. -The {xpackml} features include the following information content functions: +The {ml-features} include the following information content functions: * `info_content`, `high_info_content`, `low_info_content` diff --git a/docs/reference/ml/functions/metric.asciidoc b/docs/reference/ml/functions/metric.asciidoc index 9d6f3515a029c..7868d4b780a40 100644 --- a/docs/reference/ml/functions/metric.asciidoc +++ b/docs/reference/ml/functions/metric.asciidoc @@ -6,7 +6,7 @@ The metric functions include functions such as mean, min and max. These values are calculated for each bucket. Field values that cannot be converted to double precision floating point numbers are ignored. -The {xpackml} features include the following metric functions: +The {ml-features} include the following metric functions: * <> * <> diff --git a/docs/reference/ml/functions/rare.asciidoc b/docs/reference/ml/functions/rare.asciidoc index 1531285a7add2..87c212fbd1275 100644 --- a/docs/reference/ml/functions/rare.asciidoc +++ b/docs/reference/ml/functions/rare.asciidoc @@ -27,7 +27,7 @@ with shorter bucket spans typically being measured in minutes, not hours. for typical data. ==== -The {xpackml} features include the following rare functions: +The {ml-features} include the following rare functions: * <> * <> @@ -85,7 +85,7 @@ different rare status codes compared to the population is regarded as highly anomalous. This analysis is based on the number of different status code values, not the count of occurrences. -NOTE: To define a status code as rare the {xpackml} features look at the number +NOTE: To define a status code as rare the {ml-features} look at the number of distinct status codes that occur, not the number of times the status code occurs. If a single client IP experiences a single unique status code, this is rare, even if it occurs for that client IP in every bucket. diff --git a/docs/reference/ml/functions/sum.asciidoc b/docs/reference/ml/functions/sum.asciidoc index 7a95ad63fccee..9313a60a01a6c 100644 --- a/docs/reference/ml/functions/sum.asciidoc +++ b/docs/reference/ml/functions/sum.asciidoc @@ -11,7 +11,7 @@ If want to look at drops in totals, use low-sided functions. If your data is sparse, use `non_null_sum` functions. Buckets without values are ignored; buckets with a zero value are analyzed. -The {xpackml} features include the following sum functions: +The {ml-features} include the following sum functions: * xref:ml-sum[`sum`, `high_sum`, `low_sum`] * xref:ml-nonnull-sum[`non_null_sum`, `high_non_null_sum`, `low_non_null_sum`] diff --git a/docs/reference/ml/functions/time.asciidoc b/docs/reference/ml/functions/time.asciidoc index ac8199307f130..026d29d85d3d7 100644 --- a/docs/reference/ml/functions/time.asciidoc +++ b/docs/reference/ml/functions/time.asciidoc @@ -6,7 +6,7 @@ The time functions detect events that happen at unusual times, either of the day or of the week. These functions can be used to find unusual patterns of behavior, typically associated with suspicious user activity. -The {xpackml} features include the following time functions: +The {ml-features} include the following time functions: * <> * <> diff --git a/docs/reference/ml/images/ml-data-dates.jpg b/docs/reference/ml/images/ml-data-dates.jpg deleted file mode 100644 index e00b765402aae..0000000000000 Binary files a/docs/reference/ml/images/ml-data-dates.jpg and /dev/null differ diff --git a/docs/reference/ml/images/ml-data-keywords.jpg b/docs/reference/ml/images/ml-data-keywords.jpg deleted file mode 100644 index 8eb28cbedcf76..0000000000000 Binary files a/docs/reference/ml/images/ml-data-keywords.jpg and /dev/null differ diff --git a/docs/reference/ml/images/ml-data-metrics.jpg b/docs/reference/ml/images/ml-data-metrics.jpg deleted file mode 100644 index eeb83a76e1cb2..0000000000000 Binary files a/docs/reference/ml/images/ml-data-metrics.jpg and /dev/null differ diff --git a/docs/reference/ml/images/ml-data-topmetrics.jpg b/docs/reference/ml/images/ml-data-topmetrics.jpg deleted file mode 100644 index 80eab8f203730..0000000000000 Binary files a/docs/reference/ml/images/ml-data-topmetrics.jpg and /dev/null differ diff --git a/docs/reference/ml/images/ml-start-feed.jpg b/docs/reference/ml/images/ml-start-feed.jpg deleted file mode 100644 index 7ee09fd81bc6e..0000000000000 Binary files a/docs/reference/ml/images/ml-start-feed.jpg and /dev/null differ diff --git a/docs/reference/ml/transforms.asciidoc b/docs/reference/ml/transforms.asciidoc index 566f3685513bd..6fc67fa7c4e4b 100644 --- a/docs/reference/ml/transforms.asciidoc +++ b/docs/reference/ml/transforms.asciidoc @@ -127,7 +127,6 @@ PUT _ml/datafeeds/datafeed-test1 { "job_id": "test1", "indices": ["my_index"], - "types": ["_doc"], "query": { "match_all": { "boost": 1 @@ -233,7 +232,6 @@ PUT _ml/datafeeds/datafeed-test2 { "job_id": "test2", "indices": ["my_index"], - "types": ["_doc"], "query": { "match_all": { "boost": 1 @@ -482,7 +480,6 @@ PUT _ml/datafeeds/datafeed-test3 { "job_id": "test3", "indices": ["my_index"], - "types": ["_doc"], "query": { "match_all": { "boost": 1 @@ -551,7 +548,6 @@ PUT _ml/datafeeds/datafeed-test4 { "job_id": "test4", "indices": ["my_index"], - "types": ["_doc"], "query": { "match_all": { "boost": 1 @@ -573,7 +569,7 @@ GET _ml/datafeeds/datafeed-test4/_preview // TEST[skip:needs-licence] In {es}, location data can be stored in `geo_point` fields but this data type is -not supported natively in {xpackml} analytics. This example of a script field +not supported natively in {ml} analytics. This example of a script field transforms the data into an appropriate format. For more information, see <>. diff --git a/docs/reference/modules/discovery.asciidoc b/docs/reference/modules/discovery.asciidoc index 78e8e82f7e84f..d3e0d4fe84751 100644 --- a/docs/reference/modules/discovery.asciidoc +++ b/docs/reference/modules/discovery.asciidoc @@ -13,6 +13,16 @@ module. This module is divided into the following sections: unknown, such as when a node has just started up or when the previous master has failed. +<>:: + + This section describes how {es} uses a quorum-based voting mechanism to + make decisions even if some nodes are unavailable. + +<>:: + + This section describes the concept of voting configurations, which {es} + automatically updates as nodes leave and join the cluster. + <>:: Bootstrapping a cluster is required when an Elasticsearch cluster starts up @@ -40,11 +50,10 @@ module. This module is divided into the following sections: Cluster state publishing is the process by which the elected master node updates the cluster state on all the other nodes in the cluster. -<>:: +<>:: + + {es} performs health checks to detect and remove faulty nodes. - This section describes the detailed design behind the master election and - auto-reconfiguration logic. - <>:: There are settings that enable users to influence the discovery, cluster @@ -52,14 +61,16 @@ module. This module is divided into the following sections: include::discovery/discovery.asciidoc[] +include::discovery/quorums.asciidoc[] + +include::discovery/voting.asciidoc[] + include::discovery/bootstrapping.asciidoc[] include::discovery/adding-removing-nodes.asciidoc[] include::discovery/publishing.asciidoc[] -include::discovery/quorums.asciidoc[] - include::discovery/fault-detection.asciidoc[] -include::discovery/discovery-settings.asciidoc[] \ No newline at end of file +include::discovery/discovery-settings.asciidoc[] diff --git a/docs/reference/modules/discovery/adding-removing-nodes.asciidoc b/docs/reference/modules/discovery/adding-removing-nodes.asciidoc index a52cf1e2e7467..3b416ea51d223 100644 --- a/docs/reference/modules/discovery/adding-removing-nodes.asciidoc +++ b/docs/reference/modules/discovery/adding-removing-nodes.asciidoc @@ -12,6 +12,7 @@ cluster, and to scale the cluster up and down by adding and removing master-ineligible nodes only. However there are situations in which it may be desirable to add or remove some master-eligible nodes to or from a cluster. +[[modules-discovery-adding-nodes]] ==== Adding master-eligible nodes If you wish to add some nodes to your cluster, simply configure the new nodes @@ -24,6 +25,7 @@ cluster. You can use the `cluster.join.timeout` setting to configure how long a node waits after sending a request to join a cluster. Its default value is `30s`. See <>. +[[modules-discovery-removing-nodes]] ==== Removing master-eligible nodes When removing master-eligible nodes, it is important not to remove too many all @@ -50,7 +52,7 @@ will never automatically move a node on the voting exclusions list back into the voting configuration. Once an excluded node has been successfully auto-reconfigured out of the voting configuration, it is safe to shut it down without affecting the cluster's master-level availability. A node can be added -to the voting configuration exclusion list using the following API: +to the voting configuration exclusion list using the <> API. For example: [source,js] -------------------------------------------------- diff --git a/docs/reference/modules/discovery/discovery-settings.asciidoc b/docs/reference/modules/discovery/discovery-settings.asciidoc index 381974b5498d8..494c5ac225b87 100644 --- a/docs/reference/modules/discovery/discovery-settings.asciidoc +++ b/docs/reference/modules/discovery/discovery-settings.asciidoc @@ -3,6 +3,15 @@ Discovery and cluster formation are affected by the following settings: +`cluster.auto_shrink_voting_configuration`:: + + Controls whether the <> + sheds departed nodes automatically, as long as it still contains at least 3 + nodes. The default value is `true`. If set to `false`, the voting + configuration never shrinks automatically and you must remove departed + nodes manually with the <>. + [[master-election-settings]]`cluster.election.back_off_time`:: Sets the amount to increase the upper bound on the wait before an election @@ -152,9 +161,11 @@ APIs are not be blocked and can run on any available node. Provides a list of master-eligible nodes in the cluster. The list contains either an array of hosts or a comma-delimited string. Each value has the - format `host:port` or `host`, where `port` defaults to the setting `transport.profiles.default.port`. Note that IPv6 hosts must be bracketed. + format `host:port` or `host`, where `port` defaults to the setting + `transport.profiles.default.port`. Note that IPv6 hosts must be bracketed. The default value is `127.0.0.1, [::1]`. See <>. `discovery.zen.ping.unicast.hosts.resolve_timeout`:: - Sets the amount of time to wait for DNS lookups on each round of discovery. This is specified as a <> and defaults to `5s`. \ No newline at end of file + Sets the amount of time to wait for DNS lookups on each round of discovery. + This is specified as a <> and defaults to `5s`. diff --git a/docs/reference/modules/discovery/fault-detection.asciidoc b/docs/reference/modules/discovery/fault-detection.asciidoc index b696cdb8f7ca2..9062444b80d6c 100644 --- a/docs/reference/modules/discovery/fault-detection.asciidoc +++ b/docs/reference/modules/discovery/fault-detection.asciidoc @@ -2,8 +2,9 @@ === Cluster fault detection The elected master periodically checks each of the nodes in the cluster to -ensure that they are still connected and healthy. Each node in the cluster also periodically checks the health of the elected master. These checks -are known respectively as _follower checks_ and _leader checks_. +ensure that they are still connected and healthy. Each node in the cluster also +periodically checks the health of the elected master. These checks are known +respectively as _follower checks_ and _leader checks_. Elasticsearch allows these checks to occasionally fail or timeout without taking any action. It considers a node to be faulty only after a number of @@ -16,4 +17,4 @@ and retry setting values and attempts to remove the node from the cluster. Similarly, if a node detects that the elected master has disconnected, this situation is treated as an immediate failure. The node bypasses the timeout and retry settings and restarts its discovery phase to try and find or elect a new -master. \ No newline at end of file +master. diff --git a/docs/reference/modules/discovery/quorums.asciidoc b/docs/reference/modules/discovery/quorums.asciidoc index 8f3b74be05d9d..1a1954454268c 100644 --- a/docs/reference/modules/discovery/quorums.asciidoc +++ b/docs/reference/modules/discovery/quorums.asciidoc @@ -18,13 +18,13 @@ cluster. In many cases you can do this simply by starting or stopping the nodes as required. See <>. As nodes are added or removed Elasticsearch maintains an optimal level of fault -tolerance by updating the cluster's _voting configuration_, which is the set of -master-eligible nodes whose responses are counted when making decisions such as -electing a new master or committing a new cluster state. A decision is made only -after more than half of the nodes in the voting configuration have responded. -Usually the voting configuration is the same as the set of all the -master-eligible nodes that are currently in the cluster. However, there are some -situations in which they may be different. +tolerance by updating the cluster's <>, which is the set of master-eligible nodes whose responses are +counted when making decisions such as electing a new master or committing a new +cluster state. A decision is made only after more than half of the nodes in the +voting configuration have responded. Usually the voting configuration is the +same as the set of all the master-eligible nodes that are currently in the +cluster. However, there are some situations in which they may be different. To be sure that the cluster remains available you **must not stop half or more of the nodes in the voting configuration at the same time**. As long as more @@ -38,46 +38,6 @@ cluster-state update that adjusts the voting configuration to match, and this can take a short time to complete. It is important to wait for this adjustment to complete before removing more nodes from the cluster. -[float] -==== Setting the initial quorum - -When a brand-new cluster starts up for the first time, it must elect its first -master node. To do this election, it needs to know the set of master-eligible -nodes whose votes should count. This initial voting configuration is known as -the _bootstrap configuration_ and is set in the -<>. - -It is important that the bootstrap configuration identifies exactly which nodes -should vote in the first election. It is not sufficient to configure each node -with an expectation of how many nodes there should be in the cluster. It is also -important to note that the bootstrap configuration must come from outside the -cluster: there is no safe way for the cluster to determine the bootstrap -configuration correctly on its own. - -If the bootstrap configuration is not set correctly, when you start a brand-new -cluster there is a risk that you will accidentally form two separate clusters -instead of one. This situation can lead to data loss: you might start using both -clusters before you notice that anything has gone wrong and it is impossible to -merge them together later. - -NOTE: To illustrate the problem with configuring each node to expect a certain -cluster size, imagine starting up a three-node cluster in which each node knows -that it is going to be part of a three-node cluster. A majority of three nodes -is two, so normally the first two nodes to discover each other form a cluster -and the third node joins them a short time later. However, imagine that four -nodes were erroneously started instead of three. In this case, there are enough -nodes to form two separate clusters. Of course if each node is started manually -then it's unlikely that too many nodes are started. If you're using an automated -orchestrator, however, it's certainly possible to get into this situation-- -particularly if the orchestrator is not resilient to failures such as network -partitions. - -The initial quorum is only required the very first time a whole cluster starts -up. New nodes joining an established cluster can safely obtain all the -information they need from the elected master. Nodes that have previously been -part of a cluster will have stored to disk all the information that is required -when they restart. - [float] ==== Master elections @@ -104,92 +64,3 @@ and then started again then it will automatically recover, such as during a action with the APIs described here in these cases, because the set of master nodes is not changing permanently. -[float] -==== Automatic changes to the voting configuration - -Nodes may join or leave the cluster, and Elasticsearch reacts by automatically -making corresponding changes to the voting configuration in order to ensure that -the cluster is as resilient as possible. - -The default auto-reconfiguration -behaviour is expected to give the best results in most situations. The current -voting configuration is stored in the cluster state so you can inspect its -current contents as follows: - -[source,js] --------------------------------------------------- -GET /_cluster/state?filter_path=metadata.cluster_coordination.last_committed_config --------------------------------------------------- -// CONSOLE - -NOTE: The current voting configuration is not necessarily the same as the set of -all available master-eligible nodes in the cluster. Altering the voting -configuration involves taking a vote, so it takes some time to adjust the -configuration as nodes join or leave the cluster. Also, there are situations -where the most resilient configuration includes unavailable nodes, or does not -include some available nodes, and in these situations the voting configuration -differs from the set of available master-eligible nodes in the cluster. - -Larger voting configurations are usually more resilient, so Elasticsearch -normally prefers to add master-eligible nodes to the voting configuration after -they join the cluster. Similarly, if a node in the voting configuration -leaves the cluster and there is another master-eligible node in the cluster that -is not in the voting configuration then it is preferable to swap these two nodes -over. The size of the voting configuration is thus unchanged but its -resilience increases. - -It is not so straightforward to automatically remove nodes from the voting -configuration after they have left the cluster. Different strategies have -different benefits and drawbacks, so the right choice depends on how the cluster -will be used. You can control whether the voting configuration automatically shrinks by using the following setting: - -`cluster.auto_shrink_voting_configuration`:: - - Defaults to `true`, meaning that the voting configuration will automatically - shrink, shedding departed nodes, as long as it still contains at least 3 - nodes. If set to `false`, the voting configuration never automatically - shrinks; departed nodes must be removed manually using the - <>. - -NOTE: If `cluster.auto_shrink_voting_configuration` is set to `true`, the -recommended and default setting, and there are at least three master-eligible -nodes in the cluster, then Elasticsearch remains capable of processing -cluster-state updates as long as all but one of its master-eligible nodes are -healthy. - -There are situations in which Elasticsearch might tolerate the loss of multiple -nodes, but this is not guaranteed under all sequences of failures. If this -setting is set to `false` then departed nodes must be removed from the voting -configuration manually, using the -<>, to achieve -the desired level of resilience. - -No matter how it is configured, Elasticsearch will not suffer from a "split-brain" inconsistency. -The `cluster.auto_shrink_voting_configuration` setting affects only its availability in the -event of the failure of some of its nodes, and the administrative tasks that -must be performed as nodes join and leave the cluster. - -[float] -==== Even numbers of master-eligible nodes - -There should normally be an odd number of master-eligible nodes in a cluster. -If there is an even number, Elasticsearch leaves one of them out of the voting -configuration to ensure that it has an odd size. This omission does not decrease -the failure-tolerance of the cluster. In fact, improves it slightly: if the -cluster suffers from a network partition that divides it into two equally-sized -halves then one of the halves will contain a majority of the voting -configuration and will be able to keep operating. If all of the master-eligible -nodes' votes were counted, neither side would contain a strict majority of the -nodes and so the cluster would not be able to make any progress. - -For instance if there are four master-eligible nodes in the cluster and the -voting configuration contained all of them, any quorum-based decision would -require votes from at least three of them. This situation means that the cluster -can tolerate the loss of only a single master-eligible node. If this cluster -were split into two equal halves, neither half would contain three -master-eligible nodes and the cluster would not be able to make any progress. -If the voting configuration contains only three of the four master-eligible -nodes, however, the cluster is still only fully tolerant to the loss of one -node, but quorum-based decisions require votes from two of the three voting -nodes. In the event of an even split, one half will contain two of the three -voting nodes so that half will remain available. diff --git a/docs/reference/modules/discovery/voting.asciidoc b/docs/reference/modules/discovery/voting.asciidoc new file mode 100644 index 0000000000000..7c6ea0c1cc985 --- /dev/null +++ b/docs/reference/modules/discovery/voting.asciidoc @@ -0,0 +1,140 @@ +[[modules-discovery-voting]] +=== Voting configurations + +Each {es} cluster has a _voting configuration_, which is the set of +<> whose responses are counted when making +decisions such as electing a new master or committing a new cluster state. +Decisions are made only after a majority (more than half) of the nodes in the +voting configuration respond. + +Usually the voting configuration is the same as the set of all the +master-eligible nodes that are currently in the cluster. However, there are some +situations in which they may be different. + +IMPORTANT: To ensure the cluster remains available, you **must not stop half or +more of the nodes in the voting configuration at the same time**. As long as more +than half of the voting nodes are available, the cluster can work normally. For +example, if there are three or four master-eligible nodes, the cluster +can tolerate one unavailable node. If there are two or fewer master-eligible +nodes, they must all remain available. + +After a node joins or leaves the cluster, {es} reacts by automatically making +corresponding changes to the voting configuration in order to ensure that the +cluster is as resilient as possible. It is important to wait for this adjustment +to complete before you remove more nodes from the cluster. For more information, +see <>. + +The current voting configuration is stored in the cluster state so you can +inspect its current contents as follows: + +[source,js] +-------------------------------------------------- +GET /_cluster/state?filter_path=metadata.cluster_coordination.last_committed_config +-------------------------------------------------- +// CONSOLE + +NOTE: The current voting configuration is not necessarily the same as the set of +all available master-eligible nodes in the cluster. Altering the voting +configuration involves taking a vote, so it takes some time to adjust the +configuration as nodes join or leave the cluster. Also, there are situations +where the most resilient configuration includes unavailable nodes or does not +include some available nodes. In these situations, the voting configuration +differs from the set of available master-eligible nodes in the cluster. + +Larger voting configurations are usually more resilient, so Elasticsearch +normally prefers to add master-eligible nodes to the voting configuration after +they join the cluster. Similarly, if a node in the voting configuration +leaves the cluster and there is another master-eligible node in the cluster that +is not in the voting configuration then it is preferable to swap these two nodes +over. The size of the voting configuration is thus unchanged but its +resilience increases. + +It is not so straightforward to automatically remove nodes from the voting +configuration after they have left the cluster. Different strategies have +different benefits and drawbacks, so the right choice depends on how the cluster +will be used. You can control whether the voting configuration automatically +shrinks by using the +<>. + +NOTE: If `cluster.auto_shrink_voting_configuration` is set to `true` (which is +the default and recommended value) and there are at least three master-eligible +nodes in the cluster, Elasticsearch remains capable of processing cluster state +updates as long as all but one of its master-eligible nodes are healthy. + +There are situations in which Elasticsearch might tolerate the loss of multiple +nodes, but this is not guaranteed under all sequences of failures. If the +`cluster.auto_shrink_voting_configuration` setting is `false`, you must remove +departed nodes from the voting configuration manually. Use the +<> to achieve the desired level +of resilience. + +No matter how it is configured, Elasticsearch will not suffer from a +"split-brain" inconsistency. The `cluster.auto_shrink_voting_configuration` +setting affects only its availability in the event of the failure of some of its +nodes and the administrative tasks that must be performed as nodes join and +leave the cluster. + +[float] +==== Even numbers of master-eligible nodes + +There should normally be an odd number of master-eligible nodes in a cluster. +If there is an even number, Elasticsearch leaves one of them out of the voting +configuration to ensure that it has an odd size. This omission does not decrease +the failure-tolerance of the cluster. In fact, improves it slightly: if the +cluster suffers from a network partition that divides it into two equally-sized +halves then one of the halves will contain a majority of the voting +configuration and will be able to keep operating. If all of the votes from +master-eligible nodes were counted, neither side would contain a strict majority +of the nodes and so the cluster would not be able to make any progress. + +For instance if there are four master-eligible nodes in the cluster and the +voting configuration contained all of them, any quorum-based decision would +require votes from at least three of them. This situation means that the cluster +can tolerate the loss of only a single master-eligible node. If this cluster +were split into two equal halves, neither half would contain three +master-eligible nodes and the cluster would not be able to make any progress. +If the voting configuration contains only three of the four master-eligible +nodes, however, the cluster is still only fully tolerant to the loss of one +node, but quorum-based decisions require votes from two of the three voting +nodes. In the event of an even split, one half will contain two of the three +voting nodes so that half will remain available. + +[float] +==== Setting the initial voting configuration + +When a brand-new cluster starts up for the first time, it must elect its first +master node. To do this election, it needs to know the set of master-eligible +nodes whose votes should count. This initial voting configuration is known as +the _bootstrap configuration_ and is set in the +<>. + +It is important that the bootstrap configuration identifies exactly which nodes +should vote in the first election. It is not sufficient to configure each node +with an expectation of how many nodes there should be in the cluster. It is also +important to note that the bootstrap configuration must come from outside the +cluster: there is no safe way for the cluster to determine the bootstrap +configuration correctly on its own. + +If the bootstrap configuration is not set correctly, when you start a brand-new +cluster there is a risk that you will accidentally form two separate clusters +instead of one. This situation can lead to data loss: you might start using both +clusters before you notice that anything has gone wrong and it is impossible to +merge them together later. + +NOTE: To illustrate the problem with configuring each node to expect a certain +cluster size, imagine starting up a three-node cluster in which each node knows +that it is going to be part of a three-node cluster. A majority of three nodes +is two, so normally the first two nodes to discover each other form a cluster +and the third node joins them a short time later. However, imagine that four +nodes were erroneously started instead of three. In this case, there are enough +nodes to form two separate clusters. Of course if each node is started manually +then it's unlikely that too many nodes are started. If you're using an automated +orchestrator, however, it's certainly possible to get into this situation-- +particularly if the orchestrator is not resilient to failures such as network +partitions. + +The initial quorum is only required the very first time a whole cluster starts +up. New nodes joining an established cluster can safely obtain all the +information they need from the elected master. Nodes that have previously been +part of a cluster will have stored to disk all the information that is required +when they restart. diff --git a/docs/reference/modules/indices/recovery.asciidoc b/docs/reference/modules/indices/recovery.asciidoc index 84352cfe6898e..d9e034941f80e 100644 --- a/docs/reference/modules/indices/recovery.asciidoc +++ b/docs/reference/modules/indices/recovery.asciidoc @@ -1,10 +1,24 @@ [[recovery]] === Indices Recovery -The following _expert_ setting can be set to manage the recovery policy. +<> is the process used to build a new copy of a +shard on a node by copying data from the primary. {es} uses this peer recovery +process to rebuild shard copies that were lost if a node has failed, and uses +the same process when migrating a shard copy between nodes to rebalance the +cluster or to honor any changes to the <>. + +The following _expert_ setting can be set to manage the resources consumed by +peer recoveries: `indices.recovery.max_bytes_per_sec`:: - Defaults to `40mb`. + Limits the total inbound and outbound peer recovery traffic on each node. + Since this limit applies on each node, but there may be many nodes + performing peer recoveries concurrently, the total amount of peer recovery + traffic within a cluster may be much higher than this limit. If you set + this limit too high then there is a risk that ongoing peer recoveries will + consume an excess of bandwidth (or other resources) which could destabilize + the cluster. Defaults to `40mb`. This setting can be dynamically updated on a live cluster with the -<> API: +<> API. diff --git a/docs/reference/modules/ml-node.asciidoc b/docs/reference/modules/ml-node.asciidoc index 9e4413e3a0c7e..5a907adfbbf3a 100644 --- a/docs/reference/modules/ml-node.asciidoc +++ b/docs/reference/modules/ml-node.asciidoc @@ -9,10 +9,9 @@ If {xpack} is installed, there is an additional node type: <>:: A node that has `xpack.ml.enabled` and `node.ml` set to `true`, which is the -default behavior when {xpack} is installed. If you want to use {xpackml} -features, there must be at least one {ml} node in your cluster. For more -information about {xpackml} features, -see {xpack-ref}/xpack-ml.html[Machine Learning in the Elastic Stack]. +default behavior when {xpack} is installed. If you want to use {ml-features}, there must be at least one {ml} node in your cluster. For more +information about {ml-features}, +see {stack-ov}/xpack-ml.html[Machine learning in the {stack}]. IMPORTANT: Do not set use the `node.ml` setting unless {xpack} is installed. Otherwise, the node fails to start. @@ -88,11 +87,11 @@ node.ml: false <5> [[ml-node]] === [xpack]#Machine learning node# -The {xpackml} features provide {ml} nodes, which run jobs and handle {ml} API +The {ml-features} provide {ml} nodes, which run jobs and handle {ml} API requests. If `xpack.ml.enabled` is set to true and `node.ml` is set to `false`, the node can service API requests but it cannot run jobs. -If you want to use {xpackml} features in your cluster, you must enable {ml} +If you want to use {ml-features} in your cluster, you must enable {ml} (set `xpack.ml.enabled` to `true`) on all master-eligible nodes. Do not use these settings if you do not have {xpack} installed. diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index a80f4a3b11b57..180a8190868f0 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -106,7 +106,7 @@ keepalives cannot be configured. ==== Transport Compression [float] -===== Request Compresssion +===== Request Compression By default, the `transport.compress` setting is `false` and network-level request compression is disabled between nodes in the cluster. This default diff --git a/docs/reference/monitoring/collecting-monitoring-data.asciidoc b/docs/reference/monitoring/collecting-monitoring-data.asciidoc index 432b5f4d01f4c..8ab0443edb167 100644 --- a/docs/reference/monitoring/collecting-monitoring-data.asciidoc +++ b/docs/reference/monitoring/collecting-monitoring-data.asciidoc @@ -88,9 +88,11 @@ example: xpack.monitoring.collection.indices: logstash-*, index1, test2 ---------------------------------- -You can prepend `+` or `-` to explicitly include or exclude index names or +You can prepend `-` to explicitly exclude index names or patterns. For example, to include all indices that start with `test` except -`test3`, you could specify `+test*,-test3`. +`test3`, you could specify `test*,-test3`. To include system indices such as +.security and .kibana, add `.*` to the list of included names. +For example `.*,test*,-test3` -- .. Optional: Specify how often to collect monitoring data. The default value for diff --git a/docs/reference/query-dsl/feature-query.asciidoc b/docs/reference/query-dsl/feature-query.asciidoc index 387278a432af6..5d1683854375b 100644 --- a/docs/reference/query-dsl/feature-query.asciidoc +++ b/docs/reference/query-dsl/feature-query.asciidoc @@ -213,7 +213,7 @@ exponent. Scores are computed as `S^exp^ / (S^exp^ + pivot^exp^)`. Like for the and scores are in +(0, 1)+. `exponent` must be positive, but is typically in +[0.5, 1]+. A good value should -be computed via traning. If you don't have the opportunity to do so, we recommend +be computed via training. If you don't have the opportunity to do so, we recommend that you stick to the `saturation` function instead. [source,js] diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index f796881d520c6..059d0db14b51a 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -81,7 +81,7 @@ GET /example/_search ==== Pre-Indexed Shape The Query also supports using a shape which has already been indexed in -another index and/or index type. This is particularly useful for when +another index. This is particularly useful for when you have a pre-defined list of shapes which are useful to your application and you want to reference this using a logical name (for example 'New Zealand') rather than having to provide their coordinates @@ -90,7 +90,6 @@ each time. In this situation it is only necessary to provide: * `id` - The ID of the document that containing the pre-indexed shape. * `index` - Name of the index where the pre-indexed shape is. Defaults to 'shapes'. -* `type` - Index type where the pre-indexed shape is. * `path` - The field specified as path containing the pre-indexed shape. Defaults to 'shape'. * `routing` - The routing of the shape document if required. @@ -130,7 +129,6 @@ GET /example/_search "location": { "indexed_shape": { "index": "shapes", - "type": "_doc", "id": "deu", "path": "location" } diff --git a/docs/reference/query-dsl/ids-query.asciidoc b/docs/reference/query-dsl/ids-query.asciidoc index 55adcb8f94cf8..8798a2fb093f8 100644 --- a/docs/reference/query-dsl/ids-query.asciidoc +++ b/docs/reference/query-dsl/ids-query.asciidoc @@ -10,13 +10,9 @@ GET /_search { "query": { "ids" : { - "type" : "_doc", "values" : ["1", "4", "100"] } } } -------------------------------------------------- // CONSOLE - -The `type` is optional and can be omitted, and can also accept an array -of values. If no type is specified, all types defined in the index mapping are tried. diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index 19035d96ae04d..64a2a6052df71 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -42,12 +42,10 @@ GET /_search "like" : [ { "_index" : "imdb", - "_type" : "movies", "_id" : "1" }, { "_index" : "imdb", - "_type" : "movies", "_id" : "2" }, "and potentially some more text here as well" @@ -74,7 +72,6 @@ GET /_search "like" : [ { "_index" : "marvel", - "_type" : "quotes", "doc" : { "name": { "first": "Ben", @@ -85,7 +82,6 @@ GET /_search }, { "_index" : "marvel", - "_type" : "quotes", "_id" : "2" } ], diff --git a/docs/reference/query-dsl/terms-query.asciidoc b/docs/reference/query-dsl/terms-query.asciidoc index c0e94900d7d82..db4597fbea504 100644 --- a/docs/reference/query-dsl/terms-query.asciidoc +++ b/docs/reference/query-dsl/terms-query.asciidoc @@ -36,9 +36,6 @@ The terms lookup mechanism supports the following options: `index`:: The index to fetch the term values from. -`type`:: - The type to fetch the term values from. - `id`:: The id of the document to fetch the term values from. @@ -93,7 +90,6 @@ GET /tweets/_search "terms" : { "user" : { "index" : "users", - "type" : "_doc", "id" : "2", "path" : "followers" } diff --git a/docs/reference/release-notes/7.0.0-alpha2.asciidoc b/docs/reference/release-notes/7.0.0-alpha2.asciidoc index 7f66d21408224..b07088d6cfed6 100644 --- a/docs/reference/release-notes/7.0.0-alpha2.asciidoc +++ b/docs/reference/release-notes/7.0.0-alpha2.asciidoc @@ -18,7 +18,7 @@ Index APIs:: * Always enforce cluster-wide shard limit {pull}34892[#34892] (issues: {issue}20705[#20705], {issue}34021[#34021]) Ranking:: -* Forbid negative scores in functon_score query {pull}35709[#35709] (issue: {issue}33309[#33309]) +* Forbid negative scores in function_score query {pull}35709[#35709] (issue: {issue}33309[#33309]) Scripting:: * Delete deprecated getValues from ScriptDocValues {pull}36183[#36183] (issue: {issue}22919[#22919]) @@ -26,7 +26,7 @@ Scripting:: Search:: * Remove the deprecated _termvector endpoint. {pull}36131[#36131] (issues: {issue}36098[#36098], {issue}8484[#8484]) * Remove deprecated Graph endpoints {pull}35956[#35956] -* Validate metdata on `_msearch` {pull}35938[#35938] (issue: {issue}35869[#35869]) +* Validate metadata on `_msearch` {pull}35938[#35938] (issue: {issue}35869[#35869]) * Make hits.total an object in the search response {pull}35849[#35849] (issue: {issue}33028[#33028]) * Remove the distinction between query and filter context in QueryBuilders {pull}35354[#35354] (issue: {issue}35293[#35293]) * Throw a parsing exception when boost is set in span_or query (#28390) {pull}34112[#34112] (issue: {issue}28390[#28390]) @@ -544,7 +544,7 @@ Search:: * Add a More Like This query routing requirement check (#29678) {pull}33974[#33974] Security:: -* Remove license state listeners on closables {pull}36308[#36308] (issues: {issue}33328[#33328], {issue}35627[#35627], {issue}35628[#35628]) +* Remove license state listeners on closeables {pull}36308[#36308] (issues: {issue}33328[#33328], {issue}35627[#35627], {issue}35628[#35628]) Snapshot/Restore:: * Upgrade GCS Dependencies to 1.55.0 {pull}36634[#36634] (issues: {issue}35229[#35229], {issue}35459[#35459]) diff --git a/docs/reference/rollup/overview.asciidoc b/docs/reference/rollup/overview.asciidoc index b2570f647e72b..90c5e20a850c1 100644 --- a/docs/reference/rollup/overview.asciidoc +++ b/docs/reference/rollup/overview.asciidoc @@ -6,7 +6,7 @@ experimental[] Time-based data (documents that are predominantly identified by their timestamp) often have associated retention policies -to manage data growth. For example, your system may be generating 500,000 documents every second. That will generate +to manage data growth. For example, your system may be generating 500 documents every second. That will generate 43 million documents per day, and nearly 16 billion documents a year. While your analysts and data scientists may wish you stored that data indefinitely for analysis, time is never-ending and diff --git a/docs/reference/search/multi-search.asciidoc b/docs/reference/search/multi-search.asciidoc index 50c5a942277d2..9e3bff3c0c063 100644 --- a/docs/reference/search/multi-search.asciidoc +++ b/docs/reference/search/multi-search.asciidoc @@ -5,7 +5,7 @@ The multi search API allows to execute several search requests within the same API. The endpoint for it is `_msearch`. The format of the request is similar to the bulk API format and makes -use of the newline delimited JSON (NDJSON) format. the structure is as +use of the newline delimited JSON (NDJSON) format. The structure is as follows (the structure is specifically optimized to reduce parsing if a specific search ends up redirected to another node): diff --git a/docs/reference/search/rank-eval.asciidoc b/docs/reference/search/rank-eval.asciidoc index 81c464b71d575..c549b5e7a689b 100644 --- a/docs/reference/search/rank-eval.asciidoc +++ b/docs/reference/search/rank-eval.asciidoc @@ -126,7 +126,7 @@ GET /my_index/_rank_eval <1> the template id <2> the template definition to use -<3> a reference to a previously defined temlate +<3> a reference to a previously defined template <4> the parameters to use to fill the template [float] diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index 7145b40c43e3d..9970c4cc6223f 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -189,6 +189,8 @@ include::request/from-size.asciidoc[] include::request/sort.asciidoc[] +include::request/track-total-hits.asciidoc[] + include::request/source-filtering.asciidoc[] include::request/stored-fields.asciidoc[] diff --git a/docs/reference/search/request/track-total-hits.asciidoc b/docs/reference/search/request/track-total-hits.asciidoc new file mode 100644 index 0000000000000..bdad4dbde918e --- /dev/null +++ b/docs/reference/search/request/track-total-hits.asciidoc @@ -0,0 +1,176 @@ +[[search-request-track-total-hits]] +=== Track total hits + +Generally the total hit count can't be computed accurately without visiting all +matches, which is costly for queries that match lots of documents. The +`track_total_hits` parameter allows you to control how the total number of hits +should be tracked. When set to `true` the search response will always track the +number of hits that match the query accurately (e.g. `total.relation` will always +be equal to `"eq"` when `track_total_hits is set to true). + +[source,js] +-------------------------------------------------- +GET twitter/_search +{ + "track_total_hits": true, + "query": { + "match" : { + "message" : "Elasticsearch" + } + } +} +-------------------------------------------------- +// TEST[setup:twitter] +// CONSOLE + +\... returns: + +[source,js] +-------------------------------------------------- +{ + "_shards": ... + "timed_out": false, + "took": 100, + "hits": { + "max_score": 1.0, + "total" : { + "value": 2048, <1> + "relation": "eq" <2> + }, + "hits": ... + } +} +-------------------------------------------------- +// TESTRESPONSE[s/"_shards": \.\.\./"_shards": "$body._shards",/] +// TESTRESPONSE[s/"took": 100/"took": $body.took/] +// TESTRESPONSE[s/"max_score": 1\.0/"max_score": $body.hits.max_score/] +// TESTRESPONSE[s/"value": 2048/"value": $body.hits.total.value/] +// TESTRESPONSE[s/"hits": \.\.\./"hits": "$body.hits.hits"/] + +<1> The total number of hits that match the query. +<2> The count is accurate (e.g. `"eq"` means equals). + +If you don't need to track the total number of hits you can improve query times +by setting this option to `false`. In such case the search can efficiently skip +non-competitive hits because it doesn't need to count all matches: + +[source,js] +-------------------------------------------------- +GET twitter/_search +{ + "track_total_hits": false, + "query": { + "match" : { + "message" : "Elasticsearch" + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +\... returns: + +[source,js] +-------------------------------------------------- +{ + "_shards": ... + "timed_out": false, + "took": 10, + "hits" : { <1> + "max_score": 1.0, + "hits": ... + } +} +-------------------------------------------------- +// TESTRESPONSE[s/"_shards": \.\.\./"_shards": "$body._shards",/] +// TESTRESPONSE[s/"took": 10/"took": $body.took/] +// TESTRESPONSE[s/"max_score": 1\.0/"max_score": $body.hits.max_score/] +// TESTRESPONSE[s/"hits": \.\.\./"hits": "$body.hits.hits"/] + +<1> The total number of hits is unknown. + +Given that it is often enough to have a lower bound of the number of hits, +such as "there are at least 1000 hits", it is also possible to set +`track_total_hits` as an integer that represents the number of hits to count +accurately. The search can efficiently skip non-competitive document as soon +as collecting at least $`track_total_hits` documents. This is a good trade +off to speed up searches if you don't need the accurate number of hits after +a certain threshold. + + +For instance the following query will track the total hit count that match +the query accurately up to 100 documents: + +[source,js] +-------------------------------------------------- +GET twitter/_search +{ + "track_total_hits": 100, + "query": { + "match" : { + "message" : "Elasticsearch" + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +The `hits.total.relation` in the response will indicate if the +value returned in `hits.total.value` is accurate (`eq`) or a lower +bound of the total (`gte`). + +For instance the following response: + +[source,js] +-------------------------------------------------- +{ + "_shards": ... + "timed_out": false, + "took": 30, + "hits" : { + "max_score": 1.0, + "total" : { + "value": 42, <1> + "relation": "eq" <2> + }, + "hits": ... + } +} +-------------------------------------------------- +// TESTRESPONSE[s/"_shards": \.\.\./"_shards": "$body._shards",/] +// TESTRESPONSE[s/"took": 30/"took": $body.took/] +// TESTRESPONSE[s/"max_score": 1\.0/"max_score": $body.hits.max_score/] +// TESTRESPONSE[s/"value": 42/"value": $body.hits.total.value/] +// TESTRESPONSE[s/"hits": \.\.\./"hits": "$body.hits.hits"/] + +<1> 42 documents match the query +<2> and the count is accurate (`"eq"`) + +\... indicates that the number of hits returned in the `total` +is accurate. + +If the total number of his that match the query is greater than the +value set in `track_total_hits`, the total hits in the response +will indicate that the returned value is a lower bound: + +[source,js] +-------------------------------------------------- +{ + "_shards": ... + "hits" : { + "max_score": 1.0, + "total" : { + "value": 100, <1> + "relation": "gte" <2> + }, + "hits": ... + } +} +-------------------------------------------------- +// TESTRESPONSE +// TEST[skip:response is already tested in the previous snippet] + +<1> There are at least 100 documents that match the query +<2> This is a lower bound (`gte`). \ No newline at end of file diff --git a/docs/reference/search/uri-request.asciidoc b/docs/reference/search/uri-request.asciidoc index 320e65bf3ee0b..87e1da907fb7d 100644 --- a/docs/reference/search/uri-request.asciidoc +++ b/docs/reference/search/uri-request.asciidoc @@ -101,10 +101,12 @@ is important). |`track_scores` |When sorting, set to `true` in order to still track scores and return them as part of each hit. -|`track_total_hits` |Set to `false` in order to disable the tracking +|`track_total_hits` |Defaults to true. Set to `false` in order to disable the tracking of the total number of hits that match the query. -(see <> for more details). -Defaults to true. +It also accepts an integer which in this case represents the number of +hits to count accurately. +(See the <> documentation +for more details). |`timeout` |A search timeout, bounding the search request to be executed within the specified time value and bail with the hits accumulated up to diff --git a/docs/reference/settings/ml-settings.asciidoc b/docs/reference/settings/ml-settings.asciidoc index 5d4f9519006ae..113f264a31331 100644 --- a/docs/reference/settings/ml-settings.asciidoc +++ b/docs/reference/settings/ml-settings.asciidoc @@ -77,6 +77,9 @@ opening spend more time in the `opening` state. Defaults to `2`. These settings are for advanced use cases; the default values are generally sufficient: +`xpack.ml.enable_config_migration` (<>):: +Reserved. + `xpack.ml.max_anomaly_records` (<>):: The maximum number of records that are output per bucket. The default value is `500`. diff --git a/docs/reference/settings/monitoring-settings.asciidoc b/docs/reference/settings/monitoring-settings.asciidoc index 56bfaf5d4f863..59a766d4dd0ca 100644 --- a/docs/reference/settings/monitoring-settings.asciidoc +++ b/docs/reference/settings/monitoring-settings.asciidoc @@ -76,9 +76,10 @@ Sets the timeout for collecting the cluster statistics. Defaults to `10s`. Controls which indices Monitoring collects data from. Defaults to all indices. Specify the index names as a comma-separated list, for example `test1,test2,test3`. Names can include wildcards, for -example `test*`. You can explicitly include or exclude indices by prepending -`+` to include the index, or `-` to exclude the index. For example, to include all indices that -start with `test` except `test3`, you could specify `+test*,-test3`. +example `test*`. You can explicitly exclude indices by prepending `-`. For example `test*,-test3` will +monitor all indexes that start with `test` except for `test3`. System indices like .security* or .kibana* +always start with a `.`, and generally should be monitored. Consider adding `.*` to the list of indices +ensure monitoring of system indices. For example `.*,test*,-test3` `xpack.monitoring.collection.index.stats.timeout`:: diff --git a/docs/reference/settings/notification-settings.asciidoc b/docs/reference/settings/notification-settings.asciidoc index 25cecfc63627e..e098f22716876 100644 --- a/docs/reference/settings/notification-settings.asciidoc +++ b/docs/reference/settings/notification-settings.asciidoc @@ -64,6 +64,14 @@ request is aborted. Specifies the maximum size an HTTP response is allowed to have, defaults to `10mb`, the maximum configurable value is `50mb`. +`xpack.http.whitelist`:: +A list of URLs, that the internal HTTP client is allowed to connect to. This +client is used in the HTTP input, the webhook, the slack, pagerduty, hipchat +and jira actions. This setting can be updated dynamically. It defaults to `*` +allowing everything. Note: If you configure this setting and you are using one +of the slack/pagerduty/hipchat actions, you have to ensure that the +corresponding endpoints are whitelisted as well. + [[ssl-notification-settings]] :ssl-prefix: xpack.http :component: {watcher} diff --git a/docs/reference/setup/bootstrap-checks.asciidoc b/docs/reference/setup/bootstrap-checks.asciidoc index 34b39546324d1..d9b540d39ceb3 100644 --- a/docs/reference/setup/bootstrap-checks.asciidoc +++ b/docs/reference/setup/bootstrap-checks.asciidoc @@ -157,9 +157,9 @@ and is enforced on Linux only. To pass the maximum map count check, you must configure `vm.max_map_count` via `sysctl` to be at least `262144`. Alternatively, the maximum map count check is only needed if you are using -`mmapfs` as the <> for your indices. If you -<> the use of `mmapfs` then this bootstrap check will -not be enforced. +`mmapfs` or `hybridfs` as the <> for your +indices. If you <> the use of `mmap` then this +bootstrap check will not be enforced. === Client JVM check diff --git a/docs/reference/setup/setup-xclient.asciidoc b/docs/reference/setup/setup-xclient.asciidoc index 819e3de98f4bd..9da482af8493a 100644 --- a/docs/reference/setup/setup-xclient.asciidoc +++ b/docs/reference/setup/setup-xclient.asciidoc @@ -11,7 +11,7 @@ cluster where {xpack} is installed, then you must download and configure the . Add the {xpack} transport JAR file to your *CLASSPATH*. You can download the {xpack} distribution and extract the JAR file manually or you can get it from the -https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.jar[Elasticsearc Maven repository]. +https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.jar[Elasticsearch Maven repository]. As with any dependency, you will also need its transitive dependencies. Refer to the https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.pom[X-Pack POM file for your version] when downloading for offline usage. diff --git a/docs/reference/sql/endpoints/client-apps/dbvis.asciidoc b/docs/reference/sql/endpoints/client-apps/dbvis.asciidoc index 3e8092babe8cd..779a27a06c752 100644 --- a/docs/reference/sql/endpoints/client-apps/dbvis.asciidoc +++ b/docs/reference/sql/endpoints/client-apps/dbvis.asciidoc @@ -21,7 +21,7 @@ Add the {es} JDBC driver to DbVisualizer through *Tools* > *Driver Manager*: image:images/sql/client-apps/dbvis-1-driver-manager.png[] Create a new driver entry through *Driver* > *Create Driver* entry and add the JDBC driver in the files panel -through the buttons on the right. Once specify, the driver class and its version should be automatically picked up - one can force the refresh through the *Find driver in liste locations* button, the second from the bottom on the right hand side: +through the buttons on the right. Once specify, the driver class and its version should be automatically picked up - one can force the refresh through the *Find driver in listed locations* button, the second from the bottom on the right hand side: image:images/sql/client-apps/dbvis-2-driver.png[] diff --git a/docs/reference/sql/endpoints/client-apps/excel.asciidoc b/docs/reference/sql/endpoints/client-apps/excel.asciidoc new file mode 100644 index 0000000000000..6f7c98e04d5b5 --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/excel.asciidoc @@ -0,0 +1,63 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-excel]] +=== Microsoft Excel + +experimental[] + +[quote, https://www.techopedia.com/definition/5430/microsoft-excel] +____ +https://products.office.com/en/excel[Microsoft Excel] is a software program [...] that allows users to organize, format and calculate data +with formulas using a spreadsheet system. +____ + +==== Prerequisites + +* Microsoft Office 2016 or higher +* {es-sql} <> +* A preconfigured User or System DSN (see <> section on how to configure a DSN). + +==== Load data into a spreadsheet + +First, you'll need to choose ODBC as the source to load data from. To do so, click on the _Data_ tab, then _New Query_ button, in the +drop-down menu expand _From Other Sources_, then choose _From ODBC_: + +[[apps_excel_fromodbc]] +.ODBC as data source +image:images/sql/odbc/apps_excel_fromodbc.png[] + +This will open a new window with a drop down menu populated with the DSNs that Excel found on the system. Choose a DSN configured to +connect to your {es} instance and press the _OK_ button: + +[[apps_excel_dsn]] +.Choose a DSN +image:images/sql/odbc/apps_excel_dsn.png[] + +This will lead to a new window, allowing the user to input the connection credentials. + +A username might be required by Excel even if the {es} instance has no security enabled. Providing a bogus username with no password in +this case will not hinder the connectivity. Note however that Excel will cache these credentials (so in case you do have security enabled, +you won't be prompted for the credentials a second time). + +Fill in the username and the password and press +_Connect_. + +[[apps_excel_cred]] +.Provide connection credentials +image:images/sql/odbc/apps_excel_cred.png[] + +Once connected, Excel will read {es}'s catalog and offer the user a choice of tables (indices) to load data from. Clicking on one of the +tables will load a preview of the data within: + +[[apps_excel_picktable]] +.Pick table to load +image:images/sql/odbc/apps_excel_picktable.png[] + +Now click the _Load_ button, which will have Excel load all the data from the table into a spreadsheet: + +[[apps_excel_loaded]] +.Data loaded in spreadsheet +image:images/sql/odbc/apps_excel_loaded.png[] + + +// vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=138 columns=140 diff --git a/docs/reference/sql/endpoints/client-apps/index.asciidoc b/docs/reference/sql/endpoints/client-apps/index.asciidoc index c06e5899d575c..80932a0a27242 100644 --- a/docs/reference/sql/endpoints/client-apps/index.asciidoc +++ b/docs/reference/sql/endpoints/client-apps/index.asciidoc @@ -5,19 +5,33 @@ beta[] -Thanks to its <> interface, a broad range of third-party applications can use {es}'s SQL capabilities. +Thanks to its <> and <> interfaces, a broad range of third-party applications can use {es}'s SQL capabilities. This section lists, in alphabetical order, a number of them and their respective configuration - the list however is by no means comprehensive (feel free to https://www.elastic.co/blog/art-of-pull-request[submit a PR] to improve it): as long as the app can use the {es-sql} driver, it can use {es-sql}. * <> * <> +* <> +* <> +* <> +* <> +* <> * <> * <> +* <> NOTE: Each application has its own requirements and license; these are outside the scope of this documentation which covers only the configuration aspect with {es-sql}. +WARNING: The support for applications implementing the ODBC 2.x standard and prior is currently limited. + include::dbeaver.asciidoc[] include::dbvis.asciidoc[] +include::excel.asciidoc[] +include::powerbi.asciidoc[] +include::ps1.asciidoc[] +include::microstrat.asciidoc[] +include::qlik.asciidoc[] include::squirrel.asciidoc[] +include::tableau.asciidoc[] include::workbench.asciidoc[] diff --git a/docs/reference/sql/endpoints/client-apps/microstrat.asciidoc b/docs/reference/sql/endpoints/client-apps/microstrat.asciidoc new file mode 100644 index 0000000000000..898d84afb14ac --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/microstrat.asciidoc @@ -0,0 +1,97 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-microstrat]] +=== MicroStrategy Desktop + +experimental[] + +[quote, https://www.microstrategy.com/us/resources/library/videos/new-microstrategy-desktop] +____ +https://www.microstrategy.com/us/get-started/desktop[MicroStrategy Desktop] is a free data discovery tool that helps people bring data to +life using powerful self-service analytics. +____ + +==== Prerequisites + +* MicroStrategy Desktop 11 or higher +* {es-sql} <> +* A preconfigured User or System DSN (see <> section on how to configure a DSN). + +==== Data loading + +To use the {product} to load data into MicroStrategy Desktop perform the following steps in sequence. + +. Create a New Dossier ++ +Once the application is launched, you'll first need to create a _New Dossier_: ++ +[[apps_microstrat_newdossier]] +image:images/sql/odbc/apps_microstrat_newdossier.png[] ++ +. New Data ++ +To import into the _New Dossier_ just opened, press on the _New Data_ button in the _DATASETS_ column: ++ +[[apps_microstrat_newdata]] +image:images/sql/odbc/apps_microstrat_newdata.png[] ++ +. Access data from Tables ++ +This opens a new window that allows to choose the source to load data from. Click on the _Databases_ icon: ++ +[[apps_microstrat_databases]] +image:images/sql/odbc/apps_microstrat_databases.png[] ++ +. New Data Source ++ +In the newly opened _Import from Table - Select_ window, click on the *+* button to the right of _DATA SOURCES_ item: ++ +[[apps_microstrat_newds]] +image:images/sql/odbc/apps_microstrat_newds.png[] ++ +. Data Source ++ +In the _Data Source_ window, tick the radio button for _DSN Data Sources_. In the _DSN_ drop-down box, choose the name of the DSN that you +have previously configured. For the _Version_, chose _Generic DBMS_. ++ +Input a user name and password in the provided fields. +Note that the application requires them irrespective of the fact that they might already be part of the previously configured DSN and the +new input will take precedence over those. ++ +Finally, give a name to the application-specific data source you're just configuring: ++ +[[apps_microstrat_dsn]] +image:images/sql/odbc/apps_microstrat_dsn.png[] ++ +. Select Import Options ++ +Choosing an import methodology follows. You can pick any of the options; we'll exemplify the _Select Tables_ option: ++ +[[apps_microstrat_tables]] +image:images/sql/odbc/apps_microstrat_tables.png[] ++ +. Import from Table - Select ++ +The data source you've named two steps before is now listed in the _DATA SOURCES_ column. Clicking on its name triggers the +application to query the {es} instance configured in the DSN and list the tables available within: ++ +[[apps_microstrat_loadtable]] +image:images/sql/odbc/apps_microstrat_loadtable.png[] ++ +. Data Access Mode ++ +Choose a table to load data from and press the _Finish_ button. When doing so, the application offers to choose a loading methodology. +You can choose whichever, we'll exemplify the _Connect Live_ way: ++ +[[apps_microstrat_live]] +image:images/sql/odbc/apps_microstrat_live.png[] ++ +. Visualize the data ++ +From the _DATASETS_ column you can choose what table columns (or index fields) to visualize: ++ +[[apps_microstrat_visualize]] +image:images/sql/odbc/apps_microstrat_visualize.png[] + + +// vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=138 columns=140 diff --git a/docs/reference/sql/endpoints/client-apps/powerbi.asciidoc b/docs/reference/sql/endpoints/client-apps/powerbi.asciidoc new file mode 100644 index 0000000000000..f2ddb3b2e131b --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/powerbi.asciidoc @@ -0,0 +1,55 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-powerbi]] +=== Microsoft Power BI Desktop + +experimental[] + +[quote, https://powerbi.microsoft.com/en-us/what-is-power-bi/] +____ +https://powerbi.microsoft.com/en-us/desktop/[Power BI] is a business analytics solution that lets you visualize your data and share +insights across your organization, or embed them in your app or website. +____ + +==== Prerequisites + +* Microsoft Power BI Desktop 2.63 or higher +* {es-sql} <> +* A preconfigured User or System DSN (see <> section on how to configure a DSN). + +==== Data loading + +First, you'll need to choose ODBC as the source to load data from. Once launched, click on the _Get Data_ button (under _Home_ tab), then +on the _More..._ button at the bottom of the list: + +[[apps_pbi_fromodbc1]] +.Get Data / More... +image:images/sql/odbc/apps_pbi_fromodbc1.png[] + +In the new opened window scroll at the bottom of the _All_ list and select the _ODBC_ entry, then click on the _Connect_ button: + +[[apps_pbi_fromodbc2]] +.ODBC / Connect +image:images/sql/odbc/apps_pbi_fromodbc2.png[] + +This will replace current window with a new _From ODBC_ one, where you'll have to select a previously configured DSN: + +[[apps_pbi_dsn]] +.Choose a DSN +image:images/sql/odbc/apps_pbi_dsn.png[] + +Once connected Power BI will read {es}'s catalog and offer the user a choice of tables (indices) to load data from. Clicking on one of the +tables will load a preview of the data within: + +[[apps_pbi_picktable]] +.Pick table to load +image:images/sql/odbc/apps_pbi_picktable.png[] + +Now tick the chosen table and click on the _Load_ button. Power BI will now load and analyze the data, populating a list with the available +columns. These can now be used to build the desired visualisation: + +[[apps_pbi_loaded]] +.Visualize the data +image:images/sql/odbc/apps_pbi_loaded.png[] + +// vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=138 columns=140 diff --git a/docs/reference/sql/endpoints/client-apps/ps1.asciidoc b/docs/reference/sql/endpoints/client-apps/ps1.asciidoc new file mode 100644 index 0000000000000..2f43b0978ba27 --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/ps1.asciidoc @@ -0,0 +1,50 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-ps1]] +=== Microsoft PowerShell + +experimental[] + +[quote, https://docs.microsoft.com/en-us/powershell/scripting/powershell-scripting] +____ +https://docs.microsoft.com/en-us/powershell/[PowerShell] is a task-based command-line shell and scripting language built on .NET. +____ + +PowerShell is available on all recent Windows Desktop OSes. It also has embedded ODBC support, thus offering a quick and accessible way to +connect to {es}. + +==== Prerequisites + +* Microsoft PowerShell +* {es-sql} <> +* A preconfigured User or System DSN (see <> section on how to configure a DSN). + +==== Writing a script + +While putting the following instructions into a script file is not an absolute requirement, doing so will make it easier to extend and +reuse. The following instructions exemplify how to execute a simple SELECT query from an existing index in your {es} instance, using a DSN +configured in advance. Open a new file, `select.ps1`, and place the following instructions in it: + +["source","powershell",subs="attributes,callouts"] +-------------------------------------------- +$connectstring = "DSN=Local Elasticsearch;" +$sql = "SELECT * FROM library" + +$conn = New-Object System.Data.Odbc.OdbcConnection($connectstring) +$conn.open() +$cmd = New-Object system.Data.Odbc.OdbcCommand($sql,$conn) +$da = New-Object system.Data.Odbc.OdbcDataAdapter($cmd) +$dt = New-Object system.Data.datatable +$null = $da.fill($dt) +$conn.close() +$dt +-------------------------------------------- + +Now open a PowerShell shell and simply execute the script: + +[[apps_excel_exed]] +.Run SQL in PowerShell +image:images/sql/odbc/apps_ps_exed.png[] + + +// vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=138 columns=140 diff --git a/docs/reference/sql/endpoints/client-apps/qlik.asciidoc b/docs/reference/sql/endpoints/client-apps/qlik.asciidoc new file mode 100644 index 0000000000000..425914ad176bc --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/qlik.asciidoc @@ -0,0 +1,85 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-qlik]] +=== Qlik Sense Desktop + +experimental[] + +[quote, https://help.qlik.com/en-US/sense/February2018/Subsystems/Hub/Content/Introduction/at-a-glance.htm] +____ +https://www.qlik.com/us/try-or-buy/download-qlik-sense[Qlik Sense Desktop] is a Windows application that gives individuals the opportunity +to use Qlik Sense and create personalized, interactive data visualizations, reports, and dashboards from multiple data sources with +drag-and-drop ease. +____ + +==== Prerequisites + +* Qlik Sense Desktop November 2018 or higher +* {es-sql} <> +* A preconfigured User or System DSN (see <> section on how to configure a DSN). + +==== Data loading + +To use the {product} to load data into Qlik Sense Desktop perform the following steps in sequence. + +. Create new app ++ +Once the application is launched, you'll first need to click on the _Create new app_ button: ++ +[[apps_qlik_newapp]] +image:images/sql/odbc/apps_qlik_newapp.png[] ++ +. Name app ++ +...then give it a name, ++ +[[apps_qlik_create]] +image:images/sql/odbc/apps_qlik_create.png[] ++ +. Open app ++ +...and then open it: ++ +[[apps_qlik_open]] +image:images/sql/odbc/apps_qlik_open.png[] ++ +. Add data to your app ++ +Start configuring the source to load data from in the newly created app: ++ +[[apps_qlik_adddata]] +image:images/sql/odbc/apps_qlik_adddata.png[] ++ +. Load from ODBC ++ +You'll be given a choice of sources to select. Click on the _ODBC_ icon: ++ +[[apps_qlik_odbc]] +image:images/sql/odbc/apps_qlik_odbc.png[] ++ +. Choose DSN ++ +In the _Create new connection (ODBC)_ dialog, click on the DSN name that you have previously configured for your {es} instance: ++ +[[apps_qlik_dsn]] +image:images/sql/odbc/apps_qlik_dsn.png[] ++ +Provide a username and password in the respective fields, if authentication is enabled on your instance and if these are not already part +of the DSN. Press the _Create_ button. ++ +. Select source table ++ +The application will now connect to the {es} instance and query the catalog information, presenting you with a list of tables that you can +load data from: ++ +[[apps_qlik_selecttable]] +image:images/sql/odbc/apps_qlik_selecttable.png[] ++ +. Visualize the data ++ +Press on the _Add data_ button and customize your data visualization: ++ +[[apps_qlik_visualize]] +image:images/sql/odbc/apps_qlik_visualize.png[] + +// vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=138 columns=140 diff --git a/docs/reference/sql/endpoints/client-apps/tableau.asciidoc b/docs/reference/sql/endpoints/client-apps/tableau.asciidoc new file mode 100644 index 0000000000000..c302f67ad977e --- /dev/null +++ b/docs/reference/sql/endpoints/client-apps/tableau.asciidoc @@ -0,0 +1,49 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-client-apps-tableau]] +=== Tableau Desktop + +experimental[] + +[quote, https://www.tableau.com/products/what-is-tableau] +____ +https://www.tableau.com/products/desktop[Tableau] is the most powerful, secure, and flexible end-to-end analytics platform +for your data. +____ + +==== Prerequisites + +* Tableau 2018 or higher +* {es-sql} <> +* A preconfigured User or System DSN (see <> section on how to configure a DSN). + +==== Data loading + +First, you'll need to choose ODBC as the source to load data from. Once launched, click on the _More..._ menu item and in the expanded +list of connectors, choose _Other Databases (ODBC)_: + +[[apps_tableau_fromodbc]] +.ODBC as data source +image:images/sql/odbc/apps_tableau_fromodbc.png[] + +In the new connection window that appears, select the previously configured DSN that will connect to the desired {es} instance. Press the +_Connect_ button. In case credentials are needed, a new windows - driver's DSN editor - will be launched and you'll need to provide the +_Username_ and _Password_ in the respective fields. (Note that these will not be stored as part of the DSN, but only remembered for the +duration of the session). +If the connection is successful, the _Connection Attributes_ section of the connection window is populated with the respective details; +press the _Sign In_ button next: + +[[apps_tableau_connd]] +.Authenticate and sign in +image:images/sql/odbc/apps_tableau_connd.png[] + +In Tableau Desktop's main window then choose the name of {es} instance as the _Database_, select one table that you'd like to load from +the list (click on the magnifying glass icon to have them all shown), drag the table over the work area, then click the _Update Now_ +button to load a preview of the table: + +[[apps_tableau_loaded]] +.Data loaded +image:images/sql/odbc/apps_tableau_loaded.png[] + + +// vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=138 columns=140 diff --git a/docs/reference/sql/endpoints/index.asciidoc b/docs/reference/sql/endpoints/index.asciidoc index 59c397f97aa46..924905a29283d 100644 --- a/docs/reference/sql/endpoints/index.asciidoc +++ b/docs/reference/sql/endpoints/index.asciidoc @@ -2,4 +2,5 @@ include::rest.asciidoc[] include::translate.asciidoc[] include::cli.asciidoc[] include::jdbc.asciidoc[] +include::odbc.asciidoc[] include::client-apps/index.asciidoc[] diff --git a/docs/reference/sql/endpoints/odbc.asciidoc b/docs/reference/sql/endpoints/odbc.asciidoc new file mode 100644 index 0000000000000..f0d7886102679 --- /dev/null +++ b/docs/reference/sql/endpoints/odbc.asciidoc @@ -0,0 +1,27 @@ +:es: Elasticsearch +:es-sql: {es} SQL +:version: 6.5.0 +:product: {es-sql} ODBC Driver + +[role="xpack"] +[testenv="platinum"] +[[sql-odbc]] +== SQL ODBC + +[[sql-odbc-overview]] +[float] +=== Overview + +experimental[] + +{product} is a feature-rich 3.80 ODBC driver for {es}. +It is a core level driver, exposing all of the functionality accessible through the {es}'s SQL ODBC API, converting ODBC calls into +{es-sql}. + +* <> +* <> + +include::odbc/installation.asciidoc[] +include::odbc/configuration.asciidoc[] + +// vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=138 diff --git a/docs/reference/sql/endpoints/odbc/configuration.asciidoc b/docs/reference/sql/endpoints/odbc/configuration.asciidoc new file mode 100644 index 0000000000000..dbc6f7b87c729 --- /dev/null +++ b/docs/reference/sql/endpoints/odbc/configuration.asciidoc @@ -0,0 +1,268 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-odbc-setup]] +=== Configuration + +experimental[] + +Once the driver has been installed, in order for an application to be able to connect to {es} through ODBC, a set of configuration parameters must be provided to the driver. Depending on the application, there are generally three ways of providing these parameters: + +* through a connection string; +* using a User DSN or System DSN; +* through a File DSN. + +DSN (_data source name_) is a generic name given to the set of parameters an ODBC driver needs to connect to a database. + +We will refer to these parameters as _connection parameters_ or _DSN_ (despite some of these parameters configuring some other aspects of a driver's functions; e.g. logging, buffer sizes...). + +Using a DSN is the most widely used, simplest and safest way of performing the driver configuration. Constructing a connection string, on the other hand, is the most crude way and consequently the least common method. + +We will focus on DSN usage only. + +[[data-source-administrator]] +==== 1. Launching ODBC Data Source Administrator + +For DSN management, ODBC provides the _ODBC Data Source Administrator_ application, readily installed on all recent desktop Windows operating systems. + +- The 32-bit version of the Odbcad32.exe file is located in the `%systemdrive%\Windows\SysWoW64` folder. +- The 64-bit version of the Odbcad32.exe file is located in the `%systemdrive%\Windows\System32` folder. + +To launch it, open the search menu - _Win + S_ - and type "ODBC Data Sources (64-bit)" or "ODBC Data Sources (32-bit)" and press _Enter_: + +[[launch_administrator]] +.Launching ODBC Data Source Administrator +image:images/sql/odbc/launch_administrator.png[] + +Once launched, you can verify that the driver was installed correctly by clicking on the _Drivers_ tab of the ODBC Data Source Administrator and checking that _Elasticsearch Driver_ is present in the list of installed drivers. + +You should also see the version number of the installed driver. + +[[administrator_drivers]] +.Drivers tab +image:images/sql/odbc/administrator_drivers.png[] + +[[dsn-configuration]] +==== 2. Configure a DSN +The next step is to configure a DSN. You can choose between the following options mapped on the first three tabs of the Administrator application: + +* User DSN ++ +The connections configured under this tab are only available to the currently logged in user. Each of these DSNs are referred to by a chosen arbitrary name (typically a host or cluster name). ++ +The actual set of parameters making up the DSN is stored through the driver in the system registry. Thus, a user will later only need to provide an application with the DSN name in order to connect to the configured {es} instance. ++ +* System DSN ++ +Similar to a User DSN, except that the connections configured under this tab will be available to all the users configured on the system. +* File DSN ++ +This tab contains functionality that will allow to have one set of connection parameters written into a file, rather then the Registry. ++ +Such a file can be then shared among multiple systems and the user will need to specify the path to it, in order to have the application connect to the configured {es} instance. + +The configuration steps are similar for all the above points. Following is an example of configuring a System DSN. + +[float] +===== 2.1 Launch {product} DSN Editor +Click on the _System DSN_ tab, then on the _Add..._ button: + +[[system_add]] +.Add a new DSN +image:images/sql/odbc/administrator_system_add.png[] + +A new window will open, listing all available installed drivers. Click on _{es} Driver_, to highlight it, then on the _Finish_ button: + +[[launch_editor]] +.Launch the DSN Editor +image:images/sql/odbc/administrator_launch_editor.png[] + +This action closes the previously opened second window and open a new one instead, {product}'s DSN Editor: + +[[dsn_editor]] +.{product} DSN Editor +image:images/sql/odbc/dsn_editor_basic.png[] + +This new window has three tabs, each responsible for a set of configuration parameters, as follows. + +[float] +===== 2.2 Connection parameters +This tab allows configuration for the following items: + +* Name ++ +This is the name the DSN will be referred by. ++ +NOTE: The characters available for this field are limited to the set permitted for a Registry key. ++ +Example: _localhost_ ++ +* Description ++ +This field allows a arbitrary text; generally used for short notes about the configured connection. ++ +Example: _Clear-text connection to the local [::1]:9200._ ++ +* Hostname ++ +This field requires an IP address or a resolvable DNS name of the {es} instance that the driver will connect to. ++ +Example: _::1_ ++ +* Port ++ +The port on which the {es} listens on. ++ +NOTE: If left empty, the default *9200* port number will be used. ++ +* Username, Password ++ +If security is enabled, these fields will need to contain the credentials of the user configured to access the REST SQL endpoint. + +At a minimum, the _Name_ and _Hostname_ fields must be provisioned, before the DSN can be saved. + +WARNING: Connection encryption is enabled by default. This will need to be changed if connecting to a SQL API endpoint with no cryptography enabled. + +[float] +===== 2.3 Cryptography parameters +One of the following SSL options can be chosen: + +* Disabled. All communications unencrypted. ++ +The communication between the driver and the {es} instance is performed over a clear-text connection. ++ +WARNING: This setting can expose the access credentials to a 3rd party intercepting the network traffic and is not recommended. ++ +* Enabled. Certificate not validated. ++ +The connection encryption is enabled, but the certificate of the server is not validated. ++ +This is currently the default setting. ++ +NOTE: This setting allows a 3rd party to act with ease as a man-in-the-middle and thus intercept all communications. ++ +* Enabled. Certificate is validated; hostname not validated. ++ +The connection encryption is enabled and the driver verifies that server's certificate is valid, but it does *not* verify if the +certificate is running on the server it was meant for. ++ +NOTE: This setting allows a 3rd party that had access to server's certificate to act as a man-in-the-middle and thus intercept all the +communications. ++ +* Enabled. Certificate is validated; hostname validated. ++ +The connection encryption is enabled and the driver verifies that both the certificate is valid, as well as that it is being deployed on +the server that the certificate was meant for. ++ +* Enabled. Certificate identity chain validated. ++ +This setting is equivalent to the previous one, with one additional check against certificate's revocation. This offers the strongest +security option and is the recommended setting for production deployments. ++ +* Certificate File ++ +In case the server uses a certificate that is not part of the PKI, for example using a self-signed certificate, you can configure the path to a X.509 certificate file that will be used by the driver to validate server's offered certificate. ++ +The driver will only read the contents of the file just before a connection is attempted. See <> section further on how to check the validity of the provided parameters. ++ +If using the file browser to locate the certificate - by pressing the _Browse..._ button - only files with _.pem_ and _.der_ extensions +will be considered by default. Choose _All Files (\*.*)_ from the drop down, if your file ends with a different extension: ++ +[[dsn_editor_cert]] +.Certificate file browser +image:images/sql/odbc/dsn_editor_security_cert.png[] + +[float] +===== 2.4 Logging parameters +For troubleshooting purposes, the {product} offers functionality to log the API calls that an application makes; this is enabled in the Administrator application: + +[[administrator_tracing]] +.Enable Application ODBC API logging +image:images/sql/odbc/administrator_tracing.png[] + +However, this only logs the ODBC API calls made by the application into the _Driver Manager_ and not those made by the _Driver Manager_ into the driver itself. To enable logging of the calls that the driver receives, as well as internal driver processing events, you can enable driver's logging on Editor's _Logging_ tab: + +* Enable Logging? ++ +Ticking this will enable driver's logging. A logging directory is also mandatory when this option is enabled (see the next option). +However the specified logging directory will be saved in the DSN if provided, even if logging is disabled. ++ +* Log Directory ++ +Here is to specify which directory to write the log files in. ++ +NOTE: The driver will create *one log file per connection*, for those connections that generate logging messages. ++ +* Log Level ++ +Configure the verbosity of the logs. ++ +[[administrator_logging]] +.Enable driver logging +image:images/sql/odbc/dsn_editor_logging.png[] ++ +When authentication is enabled, the password will be redacted from the logs. + +NOTE: Debug-logging can quickly lead to the creation of many very large files and generate significant processing overhead. Only enable if +instructed so and preferably only when fetching low volumes of data. + +[float] +[[connection_testing]] +===== 2.5 Testing the connection +Once the _Hostname_, the _Port_ (if different from implicit default) and the SSL options are configured, you can test if the provided +parameters are correct by pressing the _Test Connection_ button. This will instruct the driver to connect to the {es} instance and perform +a simple SQL test query. (This will thus require a running {es} instance with the SQL plugin enabled.) + +[[dsn_editor_conntest]] +.Connection testing +image:images/sql/odbc/dsn_editor_conntest.png[] + +NOTE: When connection testing, all the configured parameters are taken into account, including the logging configuration. This will allow +early detection of potential file/directory access rights conflicts. + +See <> section further for an alternative way of configuring the logging. + +[[available-dsn]] +==== 3. DSN is available +Once everything is in place, pressing the _Save_ button will store the configuration into the chosen destination (Registry or file). + +Before saving a DSN configuration the provided file/directory paths are verified to be valid on the current system. The DSN editor +will however not verify in any way the validity or reachability of the configured _Hostname_ : _Port_. See <> +for an exhaustive check. + +If everything is correct, the name of the newly created DSN will be listed as available to use: + +[[system_added]] +.Connection added +image:images/sql/odbc/administrator_system_added.png[] + +[[alternative_logging]] +==== Alternative logging configuration + +Due to the specification of the ODBC API, the driver will receive the configured DSN parameters - including the logging ones - only once a +connection API is invoked (such as _SQLConnect_ or _SQLDriverConnect_). The _Driver Manager_ will however always make a set of API calls +into the driver before attempting to establish a connection. To capture those calls as well, one needs to pass logging configuration +parameters in an alternative way. The {product} will use an environment variable for this purpose. + +Configuring an environment variable is OS specific and not detailed in this guide. Whether the variable should be configured system-wide +or user-specific depends on the way the ODBC-enabled application is being run and if logging should affect the current user only or not. + +The definition of the environment variable needs to be done as follows: + +* Name: _ESODBC_LOG_DIR_ + +* Value: [path](?[level]), where: ++ +[path] is the path to the directory where the log files will be written into; ++ +[level] is optional and can take one of the following values: _debug_, _info_, _warn_, _error_; if not provided, _debug_ is assumed. + +[[env_var_logging]] +.Logging environment variable +image:images/sql/odbc/env_var_log.png[] + +NOTE: When enabling the logging through the environment variable, the driver will create *one log file per process*. + +Both ways of configuring the logging can coexist and both can use the same destination logging directory. However, one logging message +will only be logged once, the connection logging taking precedence over the environment variable logging. + +// vim: set noet fenc=utf-8 ff=dos sts=0 sw=4 ts=4 tw=138 diff --git a/docs/reference/sql/endpoints/odbc/installation.asciidoc b/docs/reference/sql/endpoints/odbc/installation.asciidoc new file mode 100644 index 0000000000000..e112e24bba78f --- /dev/null +++ b/docs/reference/sql/endpoints/odbc/installation.asciidoc @@ -0,0 +1,163 @@ +[role="xpack"] +[testenv="platinum"] +[[sql-odbc-installation]] +=== Driver installation + +experimental[] + +The {product} can be installed on Microsoft Windows using an MSI package. The installation process is simple and is composed of standard MSI wizard steps. + +[[prerequisites]] +==== Installation Prerequisites + +Before you install the {product} you need to meet the following prerequisites; + +* Windows 10 64 bit _or_ Windows Server 2016 64 bit operating system +* .NET Framework 4.0 full - https://www.microsoft.com/en-au/download/details.aspx?id=17718 +* Microsoft Visual C++ Redistributable for Visual Studio 2017 - https://support.microsoft.com/en-au/help/2977003/the-latest-supported-visual-c-downloads +- The 64 bit driver requires the x64 redistributable (this also installs the components needed for the 32 bit driver) +- The 32 bit driver requires the x86 redistributable +* Elevated privileges (administrator) for the User performing the installation + +If you fail to meet any of the prerequisites the installer will show an error message and abort the installation. + +NOTE: It is not possible to inline upgrade using the MSI. In order to upgrade, you will first have to uninstall the old driver and then install the new driver. + +[[download]] +==== Download the `.msi` package(s) + +Download the `.msi` package for {product} {version} from: +https://www.elastic.co/downloads/odbc-client + +There are two versions of the installer available: + +- *32 bit driver (x86)* for use with the Microsoft Office 2016 suite of applications; notably Microsoft Excel and Microsoft Access and other 32 bit based programs. +- *64 bit driver (x64)* recommended for use with all other applications. + +Users should consider downloading and installing both the 32 and 64 bit drivers for maximum compatibility across applications installed on their system. + +[[installation-gui]] +==== Installation using the graphical user interface (GUI) + +Double-click the downloaded `.msi` package to launch a GUI wizard that will guide you through the installation process. + +You will first be presented with a welcome screen: + +image::images/sql/odbc/installer_started.png[Installer Welcome Screen] + +Clicking *Next* will present the End User License Agreement. You will need to accept the license agreement in order to continue the installation. + +image::images/sql/odbc/installer_accept_license.png[Installer EULA Screen] + +The following screen allows you to customise the installation path for the Elasticsearch ODBC driver files. + +NOTE: The default installation path is of the format: *%ProgramFiles%\Elastic\ODBCDriver{backslash}{version}* + +image::images/sql/odbc/installer_choose_destination.png[Installer Driver Path] + +You are now ready to install the driver. + +NOTE: You will require elevated privileges (administrator) for installation. + +image::images/sql/odbc/installer_ready_install.png[Installer Begin] + +Assuming the installation takes place without error you should see progress screen, followed by the finish screen: + +image::images/sql/odbc/installer_installing.png[Installer Installing] + +On the finish screen you can launch the ODBC Data Source Administration screen by checking the dialog checkbox. This will automatically launch the configuration screen on close (either 32 bit or 64 bit) where you can configure a DSN. + +image::images/sql/odbc/installer_finish.png[Installer Complete] + +As with any MSI installation package, a log file for the installation process can be found within the `%TEMP%` directory, with a randomly generated name adhering to the format `MSI.LOG`. + +If you encounter an error during installation we would encourage you to open an issue https://github.com/elastic/elasticsearch-sql-odbc/issues, attach your installation log file and provide additional details so we can investigate. + +[[installation-cmd]] +==== Installation using the command line + +NOTE: The examples given below apply to installation of the 64 bit MSI package. To achieve the same result with the 32 bit MSI package you would instead use the filename suffix `windows-x86.msi` + +The `.msi` can also be installed via the command line. The simplest installation using the same defaults as the GUI is achieved by first navigating to the download directory, then running: + +["source","sh",subs="attributes,callouts"] +-------------------------------------------- +msiexec.exe /i esodbc-{version}-windows-x86_64.msi /qn +-------------------------------------------- + +By default, `msiexec.exe` does not wait for the installation process to complete, since it runs in the Windows subsystem. To wait on the process to finish and ensure that `%ERRORLEVEL%` is set accordingly, it is recommended to use `start /wait` to create a process and wait for it to exit: + +["source","sh",subs="attributes,callouts"] +-------------------------------------------- +start /wait msiexec.exe /i esodbc-{version}-windows-x86_64.msi /qn +-------------------------------------------- + +As with any MSI installation package, a log file for the installation process can be found within the `%TEMP%` directory, with a randomly generated name adhering to the format `MSI.LOG`. The path to a log file can be supplied using the `/l` command line argument + +["source","sh",subs="attributes,callouts"] +-------------------------------------------- +start /wait msiexec.exe /i esodbc-{version}-windows-x86_64.msi /qn /l install.log +-------------------------------------------- + +Supported Windows Installer command line arguments can be viewed using: + +["source","sh",subs="attributes,callouts"] +-------------------------------------------- +msiexec.exe /help +-------------------------------------------- + +...or by consulting the https://msdn.microsoft.com/en-us/library/windows/desktop/aa367988(v=vs.85).aspx[Windows Installer SDK Command-Line Options]. + +[[odbc-msi-command-line-options]] +===== Command line options + +All settings exposed within the GUI are also available as command line arguments (referred to as _properties_ within Windows Installer documentation) that can be passed to `msiexec.exe`: + +[horizontal] +`INSTALLDIR`:: + + The installation directory. + Defaults to ++%ProgramFiles%\Elastic\ODBCDriver{backslash}{version}++. + +To pass a value, simply append the property name and value using the format `=""` to +the installation command. For example, to use a different installation directory to the default one: + +["source","sh",subs="attributes,callouts"] +-------------------------------------------- +start /wait msiexec.exe /i esodbc-{version}-windows-x86_64.msi /qn INSTALLDIR="c:\CustomDirectory" +-------------------------------------------- + +Consult the https://msdn.microsoft.com/en-us/library/windows/desktop/aa367988(v=vs.85).aspx[Windows Installer SDK Command-Line Options] +for additional rules related to values containing quotation marks. + +[[odbc-uninstall-msi-gui]] +===== Uninstall using Add/Remove Programs + +The `.msi` package handles uninstallation of all directories and files added as part of installation. + +WARNING: Uninstallation will remove **all** contents created as part of installation. + +An installed program can be uninstalled by pressing the Windows key and typing `add or remove programs` to open the system settings. + +Once opened, find the Elasticsearch ODBC Driver installation within the list of installed applications, click and choose `Uninstall`: + +[[odbc-msi-installer-uninstall]] +image::images/sql/odbc/uninstall.png[] + +[[odbc-uninstall-msi-command-line]] +===== Uninstall using the command line + +Uninstallation can also be performed from the command line by navigating to the directory +containing the `.msi` package and running: + +["source","sh",subs="attributes,callouts"] +-------------------------------------------- +start /wait msiexec.exe /x esodbc-{version}-windows-x86_64.msi /qn +-------------------------------------------- + +Similar to the install process, a path for a log file for the uninstallation process can be passed using the `/l` command line argument + +["source","sh",subs="attributes,callouts"] +-------------------------------------------- +start /wait msiexec.exe /x esodbc-{version}-windows-x86_64.msi /qn /l uninstall.log +-------------------------------------------- diff --git a/docs/reference/sql/functions/aggs.asciidoc b/docs/reference/sql/functions/aggs.asciidoc index d55da5a45a032..b23b4ebbc9af4 100644 --- a/docs/reference/sql/functions/aggs.asciidoc +++ b/docs/reference/sql/functions/aggs.asciidoc @@ -53,11 +53,42 @@ COUNT(expression<1>) Returns the total number (count) of input values. +In case of `COUNT(*)` or `COUNT()`, _all_ values are considered (including `null` or missing ones). + +In case of `COUNT()` `null` values are not considered. + + ["source","sql",subs="attributes,macros"] -------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[aggCountStar] -------------------------------------------------- + +[[sql-functions-aggs-count-all]] +===== `COUNT(ALL)` + +.Synopsis: +[source, sql] +-------------------------------------------------- +COUNT(ALL field_name<1>) +-------------------------------------------------- + +*Input*: + +<1> a field name + +*Output*: numeric value + +.Description: + +Returns the total number (count) of all _non-null_ input values. `COUNT()` and `COUNT(ALL )` are equivalent. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[aggCountAll] +-------------------------------------------------- + + [[sql-functions-aggs-count-distinct]] ===== `COUNT(DISTINCT)` @@ -75,7 +106,7 @@ COUNT(DISTINCT field_name<1>) .Description: -Returns the total number of _distinct_ values in input values. +Returns the total number of _distinct non-null_ values in input values. ["source","sql",subs="attributes,macros"] -------------------------------------------------- @@ -192,7 +223,7 @@ PERCENTILE(field_name<1>, numeric_exp<2>) *Input*: <1> a numeric field -<2> a numeric expression +<2> a numeric expression (must be a constant and not based on a field) *Output*: `double` numeric value @@ -218,7 +249,7 @@ PERCENTILE_RANK(field_name<1>, numeric_exp<2>) *Input*: <1> a numeric field -<2> a numeric expression +<2> a numeric expression (must be a constant and not based on a field) *Output*: `double` numeric value diff --git a/docs/reference/sql/index.asciidoc b/docs/reference/sql/index.asciidoc index 5c6b23f0091f4..821000b8ee213 100644 --- a/docs/reference/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -36,7 +36,7 @@ indices and return results in tabular format. SQL and print tabular results. <>:: A JDBC driver for {es}. -{sql-odbc}[ODBC]:: +<>:: An ODBC driver for {es}. <>:: Documentation for configuring various SQL/BI tools with {es-sql}. diff --git a/docs/reference/sql/limitations.asciidoc b/docs/reference/sql/limitations.asciidoc index b66c246bc4278..33a0859a7fda1 100644 --- a/docs/reference/sql/limitations.asciidoc +++ b/docs/reference/sql/limitations.asciidoc @@ -69,3 +69,24 @@ a field is an array (has multiple values) or not, so without reading all the dat When doing aggregations (`GROUP BY`) {es-sql} relies on {es}'s `composite` aggregation for its support for paginating results. But this type of aggregation does come with a limitation: sorting can only be applied on the key used for the aggregation's buckets. This means that queries like `SELECT * FROM test GROUP BY age ORDER BY COUNT(*)` are not possible. + +[float] +=== Using a sub-select + +Using sub-selects (`SELECT X FROM (SELECT Y)`) is **supported to a small degree**: any sub-select that can be "flattened" into a single +`SELECT` is possible with {es-sql}. For example: + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[limitationSubSelect] +-------------------------------------------------- + +The query above is possible because it is equivalent with: + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[limitationSubSelectRewritten] +-------------------------------------------------- + +But, if the sub-select would include a `GROUP BY` or `HAVING` or the enclosing `SELECT` would be more complex than `SELECT X +FROM (SELECT ...) WHERE [simple_condition]`, this is currently **un-supported**. diff --git a/docs/reference/sql/overview.asciidoc b/docs/reference/sql/overview.asciidoc index 814742b5f78c9..c5b3f0f5399e8 100644 --- a/docs/reference/sql/overview.asciidoc +++ b/docs/reference/sql/overview.asciidoc @@ -30,7 +30,7 @@ No need for additional hardware, processes, runtimes or libraries to query {es}; Lightweight and efficient:: -{es-sql} does not abstract {es} and its search capabilities - on the contrary, it embraces and exposes SQL to allow proper full-text search, in real-time, in the same declarative, succint fashion. +{es-sql} does not abstract {es} and its search capabilities - on the contrary, it embraces and exposes SQL to allow proper full-text search, in real-time, in the same declarative, succinct fashion. diff --git a/docs/reference/upgrade/reindex_upgrade.asciidoc b/docs/reference/upgrade/reindex_upgrade.asciidoc index c13343b64c38a..0f1eb155e6433 100644 --- a/docs/reference/upgrade/reindex_upgrade.asciidoc +++ b/docs/reference/upgrade/reindex_upgrade.asciidoc @@ -48,7 +48,7 @@ pre-5.x indices forward to 6.x. Data in time-based indices generally becomes less useful as time passes and are deleted as they age past your retention period. -Unless you have an unusally long retention period, you can just +Unless you have an unusually long retention period, you can just wait to upgrade to 6.x until all of your pre-5.x indices have been deleted. diff --git a/docs/reference/upgrade/rolling_upgrade.asciidoc b/docs/reference/upgrade/rolling_upgrade.asciidoc index 86a21627a8901..dff3895ac4c1d 100644 --- a/docs/reference/upgrade/rolling_upgrade.asciidoc +++ b/docs/reference/upgrade/rolling_upgrade.asciidoc @@ -18,6 +18,12 @@ you can do a rolling upgrade you must encrypt the internode-communication with SSL/TLS, which requires a full cluster restart. For more information about this requirement and the associated bootstrap check, see <>. +WARNING: The format used for the internal indices used by Kibana and {xpack} +has changed in 6.x. When upgrading from 5.6 to 6.x, these internal indices have +to be {stack-ref}/upgrading-elastic-stack.html#upgrade-internal-indices[upgraded] +before the rolling upgrade procedure can start. Otherwise the upgraded node will +refuse to join the cluster. + To perform a rolling upgrade: . *Disable shard allocation*. diff --git a/docs/reference/upgrade/set-paths-tip.asciidoc b/docs/reference/upgrade/set-paths-tip.asciidoc index 2dd120767c268..adfe3e29dac3a 100644 --- a/docs/reference/upgrade/set-paths-tip.asciidoc +++ b/docs/reference/upgrade/set-paths-tip.asciidoc @@ -2,7 +2,7 @@ ================================================ When you extract the zip or tarball packages, the `elasticsearch-n.n.n` -directory contains the Elasticsearh `config`, `data`, `logs` and +directory contains the Elasticsearch `config`, `data`, `logs` and `plugins` directories. We recommend moving these directories out of the Elasticsearch directory diff --git a/docs/resiliency/index.asciidoc b/docs/resiliency/index.asciidoc index 8c14aca953512..27aa620a34fd2 100644 --- a/docs/resiliency/index.asciidoc +++ b/docs/resiliency/index.asciidoc @@ -607,7 +607,7 @@ Currently, the circuit breaker protects against loading too much field data by e Elasticsearch has moved from an object-based cache to a page-based cache recycler as described in issue {GIT}4557[#4557]. This change makes garbage collection easier by limiting fragmentation, since all pages have the same size and are recycled. It also allows managing the size of the cache not based on the number of objects it contains, but on the memory that it uses. -These pages are used for two main purposes: implementing higher level data structures such as hash tables that are used internally by aggregations to eg. map terms to counts, as well as reusing memory in the translog/transport layer as detailed in issue {GIT}5691[#5691]. +These pages are used for two main purposes: implementing higher level data structures such as hash tables that are used internally by aggregations to e.g. map terms to counts, as well as reusing memory in the translog/transport layer as detailed in issue {GIT}5691[#5691]. [float] === Dedicated Master Nodes Resiliency (STATUS: DONE, v1.0.0) diff --git a/docs/ruby/client.asciidoc b/docs/ruby/client.asciidoc index 0301e47d8bcdf..2037ae1a0b280 100644 --- a/docs/ruby/client.asciidoc +++ b/docs/ruby/client.asciidoc @@ -101,7 +101,7 @@ persistent ("keep-alive") HTTP connections. === Extensions The https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-extensions[`elasticsearch-extensions`] -Rubygem provides a number of extensions to the core client, such as an API to programatically launch +Rubygem provides a number of extensions to the core client, such as an API to programmatically launch Elasticsearch clusters (eg. for testing purposes), and more. Please see its diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index dbda453e5f9fb..b4a6c49754869 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -66,8 +66,8 @@ public static Iterable parameters() throws Exception { entries.addAll(ExecutableSection.DEFAULT_EXECUTABLE_CONTEXTS); entries.add(new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("compare_analyzers"), CompareAnalyzers::parse)); - NamedXContentRegistry executeableSectionRegistry = new NamedXContentRegistry(entries); - return ESClientYamlSuiteTestCase.createParameters(executeableSectionRegistry); + NamedXContentRegistry executableSectionRegistry = new NamedXContentRegistry(entries); + return ESClientYamlSuiteTestCase.createParameters(executableSectionRegistry); } @Override diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 457aad0d98108..87b738cbd0516 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 5333afa71b6f5..e73574cc2a21c 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.0-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-5.1-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=17847c8e12b2bcfce26a79f425f082c31d4ded822f99a66127eee2d96bf18216 +distributionSha256Sum=7b8a8b9cce0406733d2d3fab3874386c530657c73c3f4e9a3837d081e26060d8 diff --git a/libs/core/build.gradle b/libs/core/build.gradle index 50b1b88bc6136..1f218f546ffd8 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -105,7 +105,7 @@ if (isEclipse) { } } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // from log4j 'org/osgi/framework/AdaptPermission', 'org/osgi/framework/AdminPermission', @@ -116,4 +116,4 @@ thirdPartyAudit.excludes = [ 'org/osgi/framework/SynchronousBundleListener', 'org/osgi/framework/wiring/BundleWire', 'org/osgi/framework/wiring/BundleWiring' -] +) diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle index 37b494624eddb..9ca02df35aabe 100644 --- a/libs/grok/build.gradle +++ b/libs/grok/build.gradle @@ -48,9 +48,9 @@ if (isEclipse) { } } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // joni has AsmCompilerSupport, but that isn't being used: 'org.objectweb.asm.ClassWriter', 'org.objectweb.asm.MethodVisitor', - 'org.objectweb.asm.Opcodes', -] + 'org.objectweb.asm.Opcodes' +) diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/EventHandler.java b/libs/nio/src/main/java/org/elasticsearch/nio/EventHandler.java index 87a2489fdbc27..7eebfe24665a2 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/EventHandler.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/EventHandler.java @@ -150,11 +150,11 @@ protected void writeException(SocketChannelContext context, Exception exception) } /** - * This method is called when a listener attached to a channel operation throws an exception. + * This method is called when a task or listener attached to a channel operation throws an exception. * * @param exception that occurred */ - protected void listenerException(Exception exception) { + protected void taskException(Exception exception) { exceptionHandler.accept(exception); } diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/NioSelector.java b/libs/nio/src/main/java/org/elasticsearch/nio/NioSelector.java index 6820b6a07188f..1484ba2198f12 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/NioSelector.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/NioSelector.java @@ -33,6 +33,7 @@ import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; @@ -54,6 +55,7 @@ public class NioSelector implements Closeable { private final Selector selector; private final ByteBuffer ioBuffer; + private final TaskScheduler taskScheduler = new TaskScheduler(); private final ReentrantLock runLock = new ReentrantLock(); private final CountDownLatch exitedLoop = new CountDownLatch(1); private final AtomicBoolean isClosed = new AtomicBoolean(false); @@ -67,7 +69,7 @@ public NioSelector(EventHandler eventHandler) throws IOException { public NioSelector(EventHandler eventHandler, Selector selector) { this.selector = selector; this.eventHandler = eventHandler; - this.ioBuffer = ByteBuffer.allocateDirect(1 << 16); + this.ioBuffer = ByteBuffer.allocateDirect(1 << 18); } /** @@ -81,6 +83,10 @@ public ByteBuffer getIoBuffer() { return ioBuffer; } + public TaskScheduler getTaskScheduler() { + return taskScheduler; + } + public Selector rawSelector() { return selector; } @@ -145,8 +151,16 @@ void singleLoop() { try { closePendingChannels(); preSelect(); - - int ready = selector.select(300); + long nanosUntilNextTask = taskScheduler.nanosUntilNextTask(System.nanoTime()); + int ready; + if (nanosUntilNextTask == 0) { + ready = selector.selectNow(); + } else { + long millisUntilNextTask = TimeUnit.NANOSECONDS.toMillis(nanosUntilNextTask); + // Only select until the next task needs to be run. Do not select with a value of 0 because + // that blocks without a timeout. + ready = selector.select(Math.min(300, Math.max(millisUntilNextTask, 1))); + } if (ready > 0) { Set selectionKeys = selector.selectedKeys(); Iterator keyIterator = selectionKeys.iterator(); @@ -164,6 +178,8 @@ void singleLoop() { } } } + + handleScheduledTasks(System.nanoTime()); } catch (ClosedSelectorException e) { if (isOpen()) { throw e; @@ -245,6 +261,17 @@ void preSelect() { handleQueuedWrites(); } + private void handleScheduledTasks(long nanoTime) { + Runnable task; + while ((task = taskScheduler.pollTask(nanoTime)) != null) { + try { + task.run(); + } catch (Exception e) { + eventHandler.taskException(e); + } + } + } + /** * Queues a write operation to be handled by the event loop. This can be called by any thread and is the * api available for non-selector threads to schedule writes. @@ -267,8 +294,10 @@ public void queueChannelClose(NioChannel channel) { ChannelContext context = channel.getContext(); assert context.getSelector() == this : "Must schedule a channel for closure with its selector"; channelsToClose.offer(context); - ensureSelectorOpenForEnqueuing(channelsToClose, context); - wakeup(); + if (isOnCurrentThread() == false) { + ensureSelectorOpenForEnqueuing(channelsToClose, context); + wakeup(); + } } /** @@ -324,7 +353,7 @@ public void executeListener(BiConsumer listener, V value) { try { listener.accept(value, null); } catch (Exception e) { - eventHandler.listenerException(e); + eventHandler.taskException(e); } } @@ -340,7 +369,7 @@ public void executeFailedListener(BiConsumer listener, Excepti try { listener.accept(null, exception); } catch (Exception e) { - eventHandler.listenerException(e); + eventHandler.taskException(e); } } diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java b/libs/nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java index 864fe793fdf73..a43a799423f06 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java @@ -234,6 +234,9 @@ protected boolean closeNow() { return closeNow; } + protected void setCloseNow() { + closeNow = true; + } // When you read or write to a nio socket in java, the heap memory passed down must be copied to/from // direct memory. The JVM internally does some buffering of the direct memory, however we can save space @@ -291,9 +294,14 @@ protected int readFromChannel(InboundChannelBuffer channelBuffer) throws IOExcep } } + // Currently we limit to 64KB. This is a trade-off which means more syscalls, in exchange for less + // copying. + private final int WRITE_LIMIT = 1 << 16; + protected int flushToChannel(ByteBuffer buffer) throws IOException { int initialPosition = buffer.position(); ByteBuffer ioBuffer = getSelector().getIoBuffer(); + ioBuffer.limit(Math.min(WRITE_LIMIT, ioBuffer.limit())); copyBytes(buffer, ioBuffer); ioBuffer.flip(); int bytesWritten; @@ -315,6 +323,7 @@ protected int flushToChannel(FlushOperation flushOperation) throws IOException { int totalBytesFlushed = 0; while (continueFlush) { ioBuffer.clear(); + ioBuffer.limit(Math.min(WRITE_LIMIT, ioBuffer.limit())); int j = 0; ByteBuffer[] buffers = flushOperation.getBuffersToWrite(); while (j < buffers.length && ioBuffer.remaining() > 0) { diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/TaskScheduler.java b/libs/nio/src/main/java/org/elasticsearch/nio/TaskScheduler.java new file mode 100644 index 0000000000000..e197230147c8b --- /dev/null +++ b/libs/nio/src/main/java/org/elasticsearch/nio/TaskScheduler.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.nio; + +import java.util.Comparator; +import java.util.PriorityQueue; + +/** + * A basic priority queue backed timer service. The service is thread local and should only be used by a + * single nio selector event loop thread. + */ +public class TaskScheduler { + + private final PriorityQueue tasks = new PriorityQueue<>(Comparator.comparingLong(DelayedTask::getDeadline)); + + /** + * Schedule a task at the defined relative nanotime. When {@link #pollTask(long)} is called with a + * relative nanotime after the scheduled time, the task will be returned. This method returns a + * {@link Runnable} that can be run to cancel the scheduled task. + * + * @param task to schedule + * @param relativeNanos defining when to execute the task + * @return runnable that will cancel the task + */ + public Runnable scheduleAtRelativeTime(Runnable task, long relativeNanos) { + DelayedTask delayedTask = new DelayedTask(relativeNanos, task); + tasks.offer(delayedTask); + return delayedTask; + } + + Runnable pollTask(long relativeNanos) { + DelayedTask task; + while ((task = tasks.peek()) != null) { + if (relativeNanos - task.deadline >= 0) { + tasks.remove(); + if (task.cancelled == false) { + return task.runnable; + } + } else { + return null; + } + } + return null; + } + + long nanosUntilNextTask(long relativeNanos) { + DelayedTask nextTask = tasks.peek(); + if (nextTask == null) { + return Long.MAX_VALUE; + } else { + return Math.max(nextTask.deadline - relativeNanos, 0); + } + } + + private static class DelayedTask implements Runnable { + + private final long deadline; + private final Runnable runnable; + private boolean cancelled = false; + + private DelayedTask(long deadline, Runnable runnable) { + this.deadline = deadline; + this.runnable = runnable; + } + + private long getDeadline() { + return deadline; + } + + @Override + public void run() { + cancelled = true; + } + } +} diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java index 6e1e34ec1f572..f3ffab1baef67 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java @@ -245,7 +245,7 @@ public void testPostHandlingWillRemoveWriteIfNecessary() throws IOException { public void testListenerExceptionCallsGenericExceptionHandler() throws IOException { RuntimeException listenerException = new RuntimeException(); - handler.listenerException(listenerException); + handler.taskException(listenerException); verify(genericExceptionHandler).accept(listenerException); } diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/NioSelectorTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/NioSelectorTests.java index bd5f1c1eb346f..8cde769cca3a2 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/NioSelectorTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/NioSelectorTests.java @@ -19,8 +19,10 @@ package org.elasticsearch.nio; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.io.IOException; import java.nio.ByteBuffer; @@ -31,6 +33,8 @@ import java.nio.channels.Selector; import java.util.Collections; import java.util.HashSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import static org.mockito.Matchers.any; @@ -98,6 +102,39 @@ public void testQueueChannelForClosed() throws IOException { verify(eventHandler).handleClose(context); } + public void testNioDelayedTasksAreExecuted() throws IOException { + AtomicBoolean isRun = new AtomicBoolean(false); + long nanoTime = System.nanoTime() - 1; + selector.getTaskScheduler().scheduleAtRelativeTime(() -> isRun.set(true), nanoTime); + + assertFalse(isRun.get()); + selector.singleLoop(); + verify(rawSelector).selectNow(); + assertTrue(isRun.get()); + } + + public void testDefaultSelectorTimeoutIsUsedIfNoTaskSooner() throws IOException { + long delay = new TimeValue(15, TimeUnit.MINUTES).nanos(); + selector.getTaskScheduler().scheduleAtRelativeTime(() -> {}, System.nanoTime() + delay); + + selector.singleLoop(); + verify(rawSelector).select(300); + } + + public void testSelectorTimeoutWillBeReducedIfTaskSooner() throws Exception { + // As this is a timing based test, we must assertBusy in the very small chance that the loop is + // delayed for 50 milliseconds (causing a selectNow()) + assertBusy(() -> { + ArgumentCaptor captor = ArgumentCaptor.forClass(Long.class); + long delay = new TimeValue(50, TimeUnit.MILLISECONDS).nanos(); + selector.getTaskScheduler().scheduleAtRelativeTime(() -> {}, System.nanoTime() + delay); + selector.singleLoop(); + verify(rawSelector).select(captor.capture()); + assertTrue(captor.getValue() > 0); + assertTrue(captor.getValue() < 300); + }); + } + public void testSelectorClosedExceptionIsNotCaughtWhileRunning() throws IOException { boolean closedSelectorExceptionCaught = false; when(rawSelector.select(anyInt())).thenThrow(new ClosedSelectorException()); @@ -425,7 +462,7 @@ public void testExecuteListenerWillHandleException() throws Exception { selector.executeListener(listener, null); - verify(eventHandler).listenerException(exception); + verify(eventHandler).taskException(exception); } public void testExecuteFailedListenerWillHandleException() throws Exception { @@ -435,6 +472,6 @@ public void testExecuteFailedListenerWillHandleException() throws Exception { selector.executeFailedListener(listener, ioException); - verify(eventHandler).listenerException(exception); + verify(eventHandler).taskException(exception); } } diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/TaskSchedulerTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/TaskSchedulerTests.java new file mode 100644 index 0000000000000..4f5c074826b25 --- /dev/null +++ b/libs/nio/src/test/java/org/elasticsearch/nio/TaskSchedulerTests.java @@ -0,0 +1,104 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.nio; + +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.locks.LockSupport; + +public class TaskSchedulerTests extends ESTestCase { + + private TaskScheduler scheduler = new TaskScheduler(); + + public void testScheduleTask() { + AtomicBoolean complete = new AtomicBoolean(false); + + long executeTime = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(10); + scheduler.scheduleAtRelativeTime(() -> complete.set(true), executeTime); + + while (true) { + long nanoTime = System.nanoTime(); + Runnable runnable = scheduler.pollTask(nanoTime); + if (nanoTime - executeTime >= 0) { + runnable.run(); + assertTrue(complete.get()); + break; + } else { + assertNull(runnable); + LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(1)); + } + } + } + + public void testPollScheduleTaskAtExactTime() { + long executeTime = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(10); + scheduler.scheduleAtRelativeTime(() -> {}, executeTime); + + assertNull(scheduler.pollTask(executeTime - 1)); + assertNotNull(scheduler.pollTask(executeTime)); + } + + public void testTaskOrdering() { + AtomicBoolean first = new AtomicBoolean(false); + AtomicBoolean second = new AtomicBoolean(false); + AtomicBoolean third = new AtomicBoolean(false); + long executeTime = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(10); + scheduler.scheduleAtRelativeTime(() -> third.set(true), executeTime + 2); + scheduler.scheduleAtRelativeTime(() -> first.set(true), executeTime); + scheduler.scheduleAtRelativeTime(() -> second.set(true), executeTime + 1); + + scheduler.pollTask(executeTime + 10).run(); + assertTrue(first.get()); + assertFalse(second.get()); + assertFalse(third.get()); + scheduler.pollTask(executeTime + 10).run(); + assertTrue(first.get()); + assertTrue(second.get()); + assertFalse(third.get()); + scheduler.pollTask(executeTime + 10).run(); + assertTrue(first.get()); + assertTrue(second.get()); + assertTrue(third.get()); + } + + public void testTaskCancel() { + AtomicBoolean first = new AtomicBoolean(false); + AtomicBoolean second = new AtomicBoolean(false); + long executeTime = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(10); + Runnable cancellable = scheduler.scheduleAtRelativeTime(() -> first.set(true), executeTime); + scheduler.scheduleAtRelativeTime(() -> second.set(true), executeTime + 1); + + cancellable.run(); + scheduler.pollTask(executeTime + 10).run(); + assertFalse(first.get()); + assertTrue(second.get()); + assertNull(scheduler.pollTask(executeTime + 10)); + } + + public void testNanosUntilNextTask() { + long nanoTime = System.nanoTime(); + long executeTime = nanoTime + TimeUnit.MILLISECONDS.toNanos(10); + scheduler.scheduleAtRelativeTime(() -> {}, executeTime); + assertEquals(TimeUnit.MILLISECONDS.toNanos(10), scheduler.nanosUntilNextTask(nanoTime)); + assertEquals(TimeUnit.MILLISECONDS.toNanos(5), scheduler.nanosUntilNextTask(nanoTime + TimeUnit.MILLISECONDS.toNanos(5))); + } +} diff --git a/libs/secure-sm/build.gradle b/libs/secure-sm/build.gradle index 3baf3513b1206..97b6652fc12a1 100644 --- a/libs/secure-sm/build.gradle +++ b/libs/secure-sm/build.gradle @@ -66,3 +66,12 @@ jarHell.enabled = false namingConventions { testClass = 'junit.framework.TestCase' } + +testingConventions { + naming.clear() + naming { + Tests { + baseClass 'junit.framework.TestCase' + } + } +} diff --git a/libs/x-content/build.gradle b/libs/x-content/build.gradle index 0ec4e0d6ad312..af8d5f20d3af7 100644 --- a/libs/x-content/build.gradle +++ b/libs/x-content/build.gradle @@ -71,10 +71,10 @@ if (isEclipse) { } } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml) 'com.fasterxml.jackson.databind.ObjectMapper', -] +) dependencyLicenses { mapping from: /jackson-.*/, to: 'jackson' diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index ad0e59c276779..0059f7460a873 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -171,6 +171,8 @@ public List> getContexts() { public Map>> getAnalyzers() { Map>> analyzers = new TreeMap<>(); analyzers.put("fingerprint", FingerprintAnalyzerProvider::new); + + // TODO remove in 8.0 analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new); analyzers.put("pattern", PatternAnalyzerProvider::new); analyzers.put("snowball", SnowballAnalyzerProvider::new); @@ -320,6 +322,7 @@ public Map> getTokenizers() { @Override public List getPreBuiltAnalyzerProviderFactories() { List analyzers = new ArrayList<>(); + // TODO remove in 8.0 analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.ELASTICSEARCH, () -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET))); analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java index e2ee540fe3e70..a35a0ea2a4a0b 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java @@ -37,7 +37,10 @@ public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase { public StandardHtmlStripAnalyzer() { super(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); } - + /** + * @deprecated in 6.5, can not create in 7.0, and we remove this in 8.0 + */ + @Deprecated StandardHtmlStripAnalyzer(CharArraySet stopwords) { super(stopwords); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzerProvider.java index 89535b78962b0..5dd475cc5e408 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzerProvider.java @@ -19,7 +19,10 @@ package org.elasticsearch.analysis.common; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.CharArraySet; +import org.elasticsearch.Version; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -28,14 +31,29 @@ public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProvider { + private static final DeprecationLogger DEPRECATION_LOGGER = + new DeprecationLogger(LogManager.getLogger(StandardHtmlStripAnalyzerProvider.class)); + private final StandardHtmlStripAnalyzer analyzer; + /** + * @deprecated in 6.5, can not create in 7.0, and we remove this in 8.0 + */ + @Deprecated StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET; CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords); analyzer = new StandardHtmlStripAnalyzer(stopWords); analyzer.setVersion(version); + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { + throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " + + "use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"); + } else { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("standard_html_strip_deprecation", + "Deprecated analyzer [standard_html_strip] used, " + + "replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"); + } } @Override diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java index b5dc23fbdb893..c52c78ffe27e3 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Tokenizer; import org.elasticsearch.Version; @@ -26,6 +27,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -116,4 +119,47 @@ public void testEdgeNGramNoDeprecationWarningPre6_4() throws IOException { assertNotNull(tokenFilterFactory.create(tokenizer)); } } + + + /** + * Check that the deprecated analyzer name "standard_html_strip" throws exception for indices created since 7.0.0 + */ + public void testStandardHtmlStripAnalyzerDeprecationError() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetaData.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT)) + .put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip") + .putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b") + .build(); + + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin(); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> createTestAnalysis(idxSettings, settings, commonAnalysisPlugin)); + assertEquals("[standard_html_strip] analyzer is not supported for new indices, " + + "use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter", ex.getMessage()); + } + + /** + * Check that the deprecated analyzer name "standard_html_strip" issues a deprecation warning for indices created since 6.5.0 until 7 + */ + public void testStandardHtmlStripAnalyzerDeprecationWarning() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetaData.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, + VersionUtils.getPreviousVersion(Version.V_7_0_0))) + .put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip") + .putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b") + .build(); + + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + IndexAnalyzers analyzers = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).indexAnalyzers; + Analyzer analyzer = analyzers.get("custom_analyzer"); + assertNotNull(((NamedAnalyzer) analyzer).analyzer()); + assertWarnings( + "Deprecated analyzer [standard_html_strip] used, " + + "replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"); + } + } } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index fa8f6eef8b924..fe5b997974ad1 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -69,14 +69,15 @@ --- "standard_html_strip": + - skip: + version: " - 6.99.99" + reason: only starting from version 7.x this throws an error - do: + catch: /\[standard_html_strip\] analyzer is not supported for new indices, use a custom analyzer using \[standard\] tokenizer and \[html_strip\] char_filter, plus \[lowercase\] filter/ indices.analyze: body: text: analyzer: standard_html_strip - - length: { tokens: 2 } - - match: { tokens.0.token: bold } - - match: { tokens.1.token: italic } --- "pattern": diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java index d51cb368e4317..4544140737612 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java @@ -95,7 +95,7 @@ public void testCreateNullValue() throws Exception { public void testInvalidMustacheTemplate() throws Exception { AppendProcessor.Factory factory = new AppendProcessor.Factory(TestTemplateService.instance(true)); Map config = new HashMap<>(); - config.put("field", "field1"); + config.put("field", "{{field1}}"); config.put("value", "value1"); String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java index 3c89778f0e825..78891e0a02a64 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java @@ -62,7 +62,7 @@ public void testCreateMissingMessageField() throws Exception { public void testInvalidMustacheTemplate() throws Exception { FailProcessor.Factory factory = new FailProcessor.Factory(TestTemplateService.instance(true)); Map config = new HashMap<>(); - config.put("message", "error"); + config.put("message", "{{error}}"); String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java index bebe780276208..61c1f73142798 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java @@ -75,7 +75,7 @@ public void testCreateMissingField() throws Exception { public void testInvalidMustacheTemplate() throws Exception { RemoveProcessor.Factory factory = new RemoveProcessor.Factory(TestTemplateService.instance(true)); Map config = new HashMap<>(); - config.put("field", "field1"); + config.put("field", "{{field1}}"); String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java index 9602f34f698f7..b3e183a8ab9ce 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java @@ -103,7 +103,7 @@ public void testCreateNullValue() throws Exception { public void testInvalidMustacheTemplate() throws Exception { SetProcessor.Factory factory = new SetProcessor.Factory(TestTemplateService.instance(true)); Map config = new HashMap<>(); - config.put("field", "field1"); + config.put("field", "{{field1}}"); config.put("value", "value1"); String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index 02f8e465c304f..1f356792032b6 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -50,7 +50,7 @@ bundlePlugin { } } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // geoip WebServiceClient needs apache http client, but we're not using WebServiceClient: 'org.apache.http.HttpEntity', 'org.apache.http.HttpHost', @@ -66,4 +66,4 @@ thirdPartyAudit.excludes = [ 'org.apache.http.impl.client.CloseableHttpClient', 'org.apache.http.impl.client.HttpClientBuilder', 'org.apache.http.util.EntityUtils' -] +) diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-774e9aefbc.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 2e1ae79a4f7fd..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -69279f27885c43662ca7216a6939dacbdf9b4795 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..6f055c7ac5eda --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +825a0eb2b9ff58df36753971217cba80466d7132 \ No newline at end of file diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java index 30543ea236a14..7773322357452 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java @@ -49,7 +49,7 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler { static { final Set responseParams = new HashSet<>( - Arrays.asList(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HIT_AS_INT_PARAM) + Arrays.asList(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM) ); RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } @@ -103,6 +103,7 @@ public static MultiSearchTemplateRequest parseRequest(RestRequest restRequest, b } else { throw new IllegalArgumentException("Malformed search template"); } + RestSearchAction.checkRestTotalHits(restRequest, searchRequest); }); return multiRequest; } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index 196147bb7308a..70a12f0c8bf56 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -43,7 +43,7 @@ public class RestSearchTemplateAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS; static { - final Set responseParams = new HashSet<>(Arrays.asList(TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HIT_AS_INT_PARAM)); + final Set responseParams = new HashSet<>(Arrays.asList(TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM)); RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } @@ -77,6 +77,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client searchTemplateRequest = SearchTemplateRequest.fromXContent(parser); } searchTemplateRequest.setRequest(searchRequest); + RestSearchAction.checkRestTotalHits(request, searchRequest); return channel -> client.execute(SearchTemplateAction.INSTANCE, searchTemplateRequest, new RestStatusToXContentListener<>(channel)); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 5b120a8eba732..d8815ef9026c4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -582,7 +582,7 @@ private static Response prepareRamIndex(Request request, String type = indexService.mapperService().documentMapper().type(); BytesReference document = request.contextSetup.document; XContentType xContentType = request.contextSetup.xContentType; - SourceToParse sourceToParse = SourceToParse.source(index, type, "_id", document, xContentType); + SourceToParse sourceToParse = new SourceToParse(index, type, "_id", document, xContentType); ParsedDocument parsedDocument = indexService.mapperService().documentMapper().parse(sourceToParse); indexWriter.addDocuments(parsedDocument.docs()); try (IndexReader indexReader = DirectoryReader.open(indexWriter)) { diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 2b4946d1ca852..88cb4d73adf6f 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -87,6 +87,7 @@ class org.elasticsearch.script.JodaCompatibleZonedDateTime { int getNano() int getSecond() int getYear() + ZoneId getZone() ZonedDateTime minus(TemporalAmount) ZonedDateTime minus(long,TemporalUnit) ZonedDateTime minusYears(long) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 07ee5b5dc6243..d3719ec884fa1 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -62,6 +62,7 @@ import org.joda.time.DateTimeZone; import java.io.IOException; +import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -227,8 +228,7 @@ public Query existsQuery(QueryShardContext context) { @Override public Query termQuery(Object value, QueryShardContext context) { failIfNotIndexed(); - double queryValue = parse(value); - long scaledValue = Math.round(queryValue * scalingFactor); + long scaledValue = Math.round(scale(value)); Query query = NumberFieldMapper.NumberType.LONG.termQuery(name(), scaledValue); if (boost() != 1f) { query = new BoostQuery(query, boost()); @@ -241,8 +241,7 @@ public Query termsQuery(List values, QueryShardContext context) { failIfNotIndexed(); List scaledValues = new ArrayList<>(values.size()); for (Object value : values) { - double queryValue = parse(value); - long scaledValue = Math.round(queryValue * scalingFactor); + long scaledValue = Math.round(scale(value)); scaledValues.add(scaledValue); } Query query = NumberFieldMapper.NumberType.LONG.termsQuery(name(), Collections.unmodifiableList(scaledValues)); @@ -257,7 +256,7 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower failIfNotIndexed(); Long lo = null; if (lowerTerm != null) { - double dValue = parse(lowerTerm) * scalingFactor; + double dValue = scale(lowerTerm); if (includeLower == false) { dValue = Math.nextUp(dValue); } @@ -265,7 +264,7 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower } Long hi = null; if (upperTerm != null) { - double dValue = parse(upperTerm) * scalingFactor; + double dValue = scale(upperTerm); if (includeUpper == false) { dValue = Math.nextDown(dValue); } @@ -326,6 +325,19 @@ public boolean equals(Object o) { public int hashCode() { return 31 * super.hashCode() + Double.hashCode(scalingFactor); } + + /** + * Parses input value and multiplies it with the scaling factor. + * Uses the round-trip of creating a {@link BigDecimal} from the stringified {@code double} + * input to ensure intuitively exact floating point operations. + * (e.g. for a scaling factor of 100, JVM behaviour results in {@code 79.99D * 100 ==> 7998.99..} compared to + * {@code scale(79.99) ==> 7999}) + * @param input Input value to parse floating point num from + * @return Scaled value + */ + private double scale(Object input) { + return new BigDecimal(Double.toString(parse(input))).multiply(BigDecimal.valueOf(scalingFactor)).doubleValue(); + } } private Explicit ignoreMalformed; diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/DenseVectorFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/DenseVectorFieldMapperTests.java index db0bf5d48d444..2239c99a310f5 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/DenseVectorFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/DenseVectorFieldMapperTests.java @@ -58,7 +58,7 @@ public void testDefaults() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); float[] expectedArray = {-12.1f, 100.7f, -4}; - ParsedDocument doc1 = mapper.parse(SourceToParse.source("test-index", "_doc", "1", BytesReference + ParsedDocument doc1 = mapper.parse(new SourceToParse("test-index", "_doc", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("my-dense-vector").value(expectedArray[0]).value(expectedArray[1]).value(expectedArray[2]).endArray() diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/FeatureFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/FeatureFieldMapperTests.java index 8e400815bec77..73879bb0225b9 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/FeatureFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/FeatureFieldMapperTests.java @@ -72,7 +72,7 @@ public void testDefaults() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 10) @@ -84,7 +84,7 @@ public void testDefaults() throws Exception { assertThat(fields[0], Matchers.instanceOf(FeatureField.class)); FeatureField featureField1 = (FeatureField) fields[0]; - ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc2 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 12) @@ -108,7 +108,7 @@ public void testNegativeScoreImpact() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 10) @@ -120,7 +120,7 @@ public void testNegativeScoreImpact() throws Exception { assertThat(fields[0], Matchers.instanceOf(FeatureField.class)); FeatureField featureField1 = (FeatureField) fields[0]; - ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc2 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 12) @@ -145,7 +145,7 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep assertEquals(mapping, mapper.mappingSource().toString()); MapperParsingException e = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", Arrays.asList(10, 20)) @@ -155,7 +155,7 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep e.getCause().getMessage()); e = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("foo") diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/FeatureVectorFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/FeatureVectorFieldMapperTests.java index fccb62b1a3439..852b6f6fc3c5c 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/FeatureVectorFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/FeatureVectorFieldMapperTests.java @@ -61,7 +61,7 @@ public void testDefaults() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") @@ -95,7 +95,7 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep assertEquals(mapping, mapper.mappingSource().toString()); MapperParsingException e = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") @@ -107,7 +107,7 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep "START_ARRAY", e.getCause().getMessage()); e = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("foo") diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java index 67e0fad53ec49..366f9a2e5745b 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -65,7 +65,7 @@ public void testDefaults() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 123) @@ -115,7 +115,7 @@ public void testNotIndexed() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 123) @@ -139,7 +139,7 @@ public void testNoDocValues() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 123) @@ -163,7 +163,7 @@ public void testStore() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 123) @@ -192,7 +192,7 @@ public void testCoerce() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "123") @@ -216,7 +216,7 @@ public void testCoerce() throws Exception { assertEquals(mapping, mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + ThrowingRunnable runnable = () -> mapper2.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "123") @@ -245,7 +245,7 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ThrowingRunnable runnable = () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", value) @@ -261,7 +261,7 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper2.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", value) @@ -286,7 +286,7 @@ public void testNullValue() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -308,7 +308,7 @@ public void testNullValue() throws IOException { mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java index 1d88022b3e0e0..4389e809bfb2f 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java @@ -140,6 +140,8 @@ public void testRoundsUpperBoundCorrectly() { assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); scaledFloatQ = ft.rangeQuery(null, 0.105, true, true, null); assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + scaledFloatQ = ft.rangeQuery(null, 79.99, true, true, null); + assertEquals("scaled_float:[-9223372036854775808 TO 7999]", scaledFloatQ.toString()); } public void testRoundsLowerBoundCorrectly() { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SparseVectorFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SparseVectorFieldMapperTests.java index e805cf81bdf8c..06710e39592cc 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SparseVectorFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/SparseVectorFieldMapperTests.java @@ -65,7 +65,7 @@ protected Collection> getPlugins() { public void testDefaults() throws Exception { int[] indexedDims = {65535, 50, 2}; float[] indexedValues = {0.5f, 1800f, -34567.11f}; - ParsedDocument doc1 = mapper.parse(SourceToParse.source("test-index", "_doc", "1", BytesReference + ParsedDocument doc1 = mapper.parse(new SourceToParse("test-index", "_doc", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("my-sparse-vector") @@ -103,7 +103,7 @@ public void testDefaults() throws Exception { public void testErrors() { // 1. test for an error on negative dimension MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - mapper.parse(SourceToParse.source("test-index", "_doc", "1", BytesReference + mapper.parse(new SourceToParse("test-index", "_doc", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("my-sparse-vector") @@ -118,7 +118,7 @@ public void testErrors() { // 2. test for an error on a dimension greater than MAX_DIMS_NUMBER e = expectThrows(MapperParsingException.class, () -> { - mapper.parse(SourceToParse.source("test-index", "_doc", "1", BytesReference + mapper.parse(new SourceToParse("test-index", "_doc", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("my-sparse-vector") @@ -133,7 +133,7 @@ public void testErrors() { // 3. test for an error on a wrong formatted dimension e = expectThrows(MapperParsingException.class, () -> { - mapper.parse(SourceToParse.source("test-index", "_doc", "1", BytesReference + mapper.parse(new SourceToParse("test-index", "_doc", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("my-sparse-vector") @@ -148,7 +148,7 @@ public void testErrors() { // 4. test for an error on a wrong format for the map of dims to values e = expectThrows(MapperParsingException.class, () -> { - mapper.parse(SourceToParse.source("test-index", "_doc", "1", BytesReference + mapper.parse(new SourceToParse("test-index", "_doc", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("my-sparse-vector") diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java index 34ad1934a3e09..1965cbad06673 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java @@ -192,7 +192,7 @@ private SourceToParse createDocument(String fieldValue) throws Exception { .field("test", fieldValue) .endObject()); - return SourceToParse.source("test", "person", "1", request, XContentType.JSON); + return new SourceToParse("test", "person", "1", request, XContentType.JSON); } private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) { diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java index 97ca8900ea0e0..6653117c62afb 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/mapper/ParentJoinFieldMapperTests.java @@ -63,12 +63,12 @@ public void testSingleLevel() throws Exception { assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(service.mapperService())); // Doc without join - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)); assertNull(doc.rootDoc().getBinaryValue("join_field")); // Doc parent - doc = docMapper.parse(SourceToParse.source("test", "type", "1", + doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "parent") .endObject()), XContentType.JSON)); @@ -76,19 +76,19 @@ public void testSingleLevel() throws Exception { assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child - doc = docMapper.parse(SourceToParse.source("test", "type", "2", + doc = docMapper.parse(new SourceToParse("test", "type", "2", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", "1") .endObject() - .endObject()), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON, "1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); - // Unkwnown join name + // Unknown join name MapperException exc = expectThrows(MapperParsingException.class, - () -> docMapper.parse(SourceToParse.source("test", "type", "1", + () -> docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "unknown") .endObject()), XContentType.JSON))); @@ -109,22 +109,22 @@ public void testParentIdSpecifiedAsNumber() throws Exception { IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "2", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "2", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", 1) .endObject() - .endObject()), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON, "1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); - doc = docMapper.parse(SourceToParse.source("test", "type", "2", + doc = docMapper.parse(new SourceToParse("test", "type", "2", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", 1.0) .endObject() - .endObject()), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON, "1")); assertEquals("1.0", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); } @@ -147,12 +147,12 @@ public void testMultipleLevels() throws Exception { assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(service.mapperService())); // Doc without join - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)); assertNull(doc.rootDoc().getBinaryValue("join_field")); // Doc parent - doc = docMapper.parse(SourceToParse.source("test", "type", "1", + doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("join_field", "parent") @@ -161,28 +161,28 @@ public void testMultipleLevels() throws Exception { assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child - doc = docMapper.parse(SourceToParse.source("test", "type", "2", + doc = docMapper.parse(new SourceToParse("test", "type", "2", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") .field("parent", "1") .endObject() - .endObject()), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON, "1")); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); // Doc child missing parent MapperException exc = expectThrows(MapperParsingException.class, - () -> docMapper.parse(SourceToParse.source("test", "type", "2", + () -> docMapper.parse(new SourceToParse("test", "type", "2", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "child") - .endObject()), XContentType.JSON).routing("1"))); + .endObject()), XContentType.JSON, "1"))); assertThat(exc.getRootCause().getMessage(), containsString("[parent] is missing for join field [join_field]")); // Doc child missing routing exc = expectThrows(MapperParsingException.class, - () -> docMapper.parse(SourceToParse.source("test", "type", "2", + () -> docMapper.parse(new SourceToParse("test", "type", "2", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "child") @@ -192,19 +192,19 @@ public void testMultipleLevels() throws Exception { assertThat(exc.getRootCause().getMessage(), containsString("[routing] is missing for join field [join_field]")); // Doc grand_child - doc = docMapper.parse(SourceToParse.source("test", "type", "3", + doc = docMapper.parse(new SourceToParse("test", "type", "3", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("join_field") .field("name", "grand_child") .field("parent", "2") .endObject() - .endObject()), XContentType.JSON).routing("1")); + .endObject()), XContentType.JSON, "1")); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("grand_child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); - // Unkwnown join name + // Unknown join name exc = expectThrows(MapperParsingException.class, - () -> docMapper.parse(SourceToParse.source("test", "type", "1", + () -> docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("join_field", "unknown") .endObject()), XContentType.JSON))); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 3c0076ea18f9e..2cb37cb794dda 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -71,6 +71,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -89,7 +90,6 @@ import java.util.Objects; import java.util.function.Supplier; -import static org.elasticsearch.index.mapper.SourceToParse.source; import static org.elasticsearch.percolator.PercolatorFieldMapper.parseQuery; public class PercolateQueryBuilder extends AbstractQueryBuilder { @@ -585,7 +585,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } docMapper = mapperService.documentMapper(type); for (BytesReference document : documents) { - docs.add(docMapper.parse(source(context.index().getName(), type, "_temp_id", document, documentXContentType))); + docs.add(docMapper.parse(new SourceToParse(context.index().getName(), type, "_temp_id", document, documentXContentType))); } FieldNameAnalyzer fieldNameAnalyzer = (FieldNameAnalyzer) docMapper.mappers().indexAnalyzer(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index d4fe0fe1dddd4..e4731919fa7d0 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -269,7 +269,7 @@ private static BytesReference randomSource(Set usedFields) { } @Override - protected boolean isCachable(PercolateQueryBuilder queryBuilder) { + protected boolean isCacheable(PercolateQueryBuilder queryBuilder) { return false; } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 80524a2f862fe..f1747d1977561 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -479,7 +479,7 @@ public void testExtractTermsAndRanges_numberFields() throws Exception { public void testPercolatorFieldMapper() throws Exception { addQueryFieldMappings(); QueryBuilder queryBuilder = termQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() @@ -498,7 +498,7 @@ public void testPercolatorFieldMapper() throws Exception { // add an query for which we don't extract terms from queryBuilder = rangeQuery("field").from("a").to("z"); - doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory + doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .field(fieldName, queryBuilder) @@ -524,7 +524,7 @@ public void testStoringQueries() throws Exception { // (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex) for (QueryBuilder query : queries) { - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, query) .endObject()), @@ -537,8 +537,8 @@ public void testStoringQueries() throws Exception { public void testQueryWithRewrite() throws Exception { addQueryFieldMappings(); client().prepareIndex("remote", "doc", "1").setSource("field", "value").get(); - QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "doc", "1", "field")); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "1", "field")); + ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() @@ -559,7 +559,7 @@ public void testQueryWithRewrite() throws Exception { public void testPercolatorFieldMapperUnMappedField() throws Exception { addQueryFieldMappings(); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory + mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .field(fieldName, termQuery("unmapped_field", "value")) @@ -573,7 +573,7 @@ public void testPercolatorFieldMapperUnMappedField() throws Exception { public void testPercolatorFieldMapper_noQuery() throws Exception { addQueryFieldMappings(); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference + ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference .bytes(XContentFactory .jsonBuilder() .startObject() @@ -582,7 +582,7 @@ public void testPercolatorFieldMapper_noQuery() throws Exception { assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); try { - mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory + mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory .jsonBuilder() .startObject() .nullField(fieldName) @@ -619,7 +619,7 @@ public void testMultiplePercolatorFields() throws Exception { mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", + ParsedDocument doc = mapperService.documentMapper(typeName).parse(new SourceToParse("test", typeName, "1", BytesReference.bytes(jsonBuilder().startObject() .field("query_field1", queryBuilder) .field("query_field2", queryBuilder) @@ -650,7 +650,7 @@ public void testNestedPercolatorField() throws Exception { mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", + ParsedDocument doc = mapperService.documentMapper(typeName).parse(new SourceToParse("test", typeName, "1", BytesReference.bytes(jsonBuilder().startObject().startObject("object_field") .field("query_field", queryBuilder) .endObject().endObject()), @@ -659,7 +659,7 @@ public void testNestedPercolatorField() throws Exception { BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); - doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", + doc = mapperService.documentMapper(typeName).parse(new SourceToParse("test", typeName, "1", BytesReference.bytes(jsonBuilder().startObject() .startArray("object_field") .startObject().field("query_field", queryBuilder).endObject() @@ -671,7 +671,7 @@ public void testNestedPercolatorField() throws Exception { assertQueryBuilder(queryBuilderAsBytes, queryBuilder); MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", + mapperService.documentMapper(typeName).parse(new SourceToParse("test", typeName, "1", BytesReference.bytes(jsonBuilder().startObject() .startArray("object_field") .startObject().field("query_field", queryBuilder).endObject() @@ -756,7 +756,7 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); query.endObject(); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) .endObject()), @@ -794,7 +794,7 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); query.endObject(); - doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) .endObject()), @@ -880,7 +880,7 @@ public void testDuplicatedClauses() throws Exception { QueryBuilder qb = boolQuery() .must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))); - ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + ParsedDocument doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) .endObject()), @@ -902,7 +902,7 @@ public void testDuplicatedClauses() throws Exception { .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))) .must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4"))) .must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); - doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) .endObject()), @@ -927,7 +927,7 @@ public void testDuplicatedClauses() throws Exception { .should(boolQuery().should(termQuery("field", "value2")).should(termQuery("field", "value3"))) .should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4"))) .should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); - doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", + doc = mapperService.documentMapper("doc").parse(new SourceToParse("test", "doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field(fieldName, qb) .endObject()), diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 96b49e0a50c62..d10d8c34a0c81 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -62,14 +62,14 @@ dependencies { testCompile project(path: ':modules:parent-join', configuration: 'runtime') } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // Commons logging 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', -] +) // Support for testing reindex-from-remote against old Elaticsearch versions configurations { diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 150853b3170a4..ae9ca0be7ca65 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -19,11 +19,13 @@ package org.elasticsearch.index.reindex; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; @@ -56,6 +58,8 @@ */ public class RestReindexAction extends AbstractBaseReindexRestHandler { static final ObjectParser PARSER = new ObjectParser<>("reindex"); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in reindex requests is deprecated."; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestReindexAction.class)); static { ObjectParser.Parser sourceParser = (parser, request, context) -> { @@ -67,6 +71,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler destParser = new ObjectParser<>("dest"); destParser.declareString(IndexRequest::index, new ParseField("index")); - destParser.declareString(IndexRequest::type, new ParseField("type")); + destParser.declareString((request, type) -> { + deprecationLogger.deprecatedAndMaybeLog("reindex_with_types", TYPES_DEPRECATION_MESSAGE); + request.type(type); + }, new ParseField("type")); destParser.declareString(IndexRequest::routing, new ParseField("routing")); destParser.declareString(IndexRequest::opType, new ParseField("op_type")); destParser.declareString(IndexRequest::setPipeline, new ParseField("pipeline")); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java index 6aa1046492ccc..b86f28452cc96 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java @@ -44,7 +44,7 @@ public void setupTestIndex() throws IOException { bulk.append("{\"index\":{}}\n"); bulk.append("{\"test\":\"test\"}\n"); } - Request request = new Request("POST", "/test/test/_bulk"); + Request request = new Request("POST", "/test/_bulk"); request.addParameter("refresh", "true"); request.setJsonEntity(bulk.toString()); client().performRequest(request); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java index 1c6b60b705a3a..f0aca38545b4c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -26,19 +26,28 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; +import org.junit.Before; import java.io.IOException; +import java.util.Arrays; import java.util.HashMap; import java.util.Map; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; -import static org.mockito.Mockito.mock; -public class RestReindexActionTests extends ESTestCase { +public class RestReindexActionTests extends RestActionTestCase { + + private RestReindexAction action; + + @Before + public void setUpAction() { + action = new RestReindexAction(Settings.EMPTY, controller()); + } + public void testBuildRemoteInfoNoRemote() throws IOException { assertNull(RestReindexAction.buildRemoteInfo(new HashMap<>())); } @@ -160,8 +169,6 @@ public void testReindexFromRemoteRequestParsing() throws IOException { } public void testPipelineQueryParameterIsError() throws IOException { - RestReindexAction action = new RestReindexAction(Settings.EMPTY, mock(RestController.class)); - FakeRestRequest.Builder request = new FakeRestRequest.Builder(xContentRegistry()); try (XContentBuilder body = JsonXContent.contentBuilder().prettyPrint()) { body.startObject(); { @@ -185,14 +192,12 @@ public void testPipelineQueryParameterIsError() throws IOException { public void testSetScrollTimeout() throws IOException { { - RestReindexAction action = new RestReindexAction(Settings.EMPTY, mock(RestController.class)); FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withContent(new BytesArray("{}"), XContentType.JSON); ReindexRequest request = action.buildRequest(requestBuilder.build()); assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT, request.getScrollTime()); } { - RestReindexAction action = new RestReindexAction(Settings.EMPTY, mock(RestController.class)); FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()); requestBuilder.withParams(singletonMap("scroll", "10m")); requestBuilder.withContent(new BytesArray("{}"), XContentType.JSON); @@ -210,4 +215,46 @@ private RemoteInfo buildRemoteInfoHostTestCase(String hostInRest) throws IOExcep return RestReindexAction.buildRemoteInfo(source); } + + /** + * test deprecation is logged if one or more types are used in source search request inside reindex + */ + public void testTypeInSource() throws IOException { + FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(Method.POST) + .withPath("/_reindex"); + XContentBuilder b = JsonXContent.contentBuilder().startObject(); + { + b.startObject("source"); + { + b.field("type", randomFrom(Arrays.asList("\"t1\"", "[\"t1\", \"t2\"]", "\"_doc\""))); + } + b.endObject(); + } + b.endObject(); + requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON); + dispatchRequest(requestBuilder.build()); + assertWarnings(RestReindexAction.TYPES_DEPRECATION_MESSAGE); + } + + /** + * test deprecation is logged if a type is used in the destination index request inside reindex + */ + public void testTypeInDestination() throws IOException { + FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(Method.POST) + .withPath("/_reindex"); + XContentBuilder b = JsonXContent.contentBuilder().startObject(); + { + b.startObject("dest"); + { + b.field("type", (randomBoolean() ? "_doc" : randomAlphaOfLength(4))); + } + b.endObject(); + } + b.endObject(); + requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON); + dispatchRequest(requestBuilder.build()); + assertWarnings(RestReindexAction.TYPES_DEPRECATION_MESSAGE); + } } diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java index 7b72871f4f78d..91676037043e3 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java @@ -112,6 +112,11 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b throw new UnsupportedOperationException("URL repository doesn't support this operation"); } + @Override + public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + throw new UnsupportedOperationException("URL repository doesn't support this operation"); + } + @SuppressForbidden(reason = "We call connect in doPrivileged and provide SocketPermission") private static InputStream getInputStream(URL url) throws IOException { try { diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 58eda4445c646..787a4b6e9ecff 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -63,7 +63,8 @@ integTestRunner { systemProperty 'es.set.netty.runtime.available.processors', 'false' } -thirdPartyAudit.excludes = [ +thirdPartyAudit { + ignoreMissingClasses ( // classes are missing // from io.netty.handler.codec.protobuf.ProtobufDecoder (netty) @@ -143,7 +144,14 @@ thirdPartyAudit.excludes = [ 'org.eclipse.jetty.alpn.ALPN$ServerProvider', 'org.eclipse.jetty.alpn.ALPN', - 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + + 'org.conscrypt.AllocatedBuffer', + 'org.conscrypt.BufferAllocator', + 'org.conscrypt.Conscrypt', + 'org.conscrypt.HandshakeListener' + ) + + ignoreViolations ( 'io.netty.util.internal.PlatformDependent0', 'io.netty.util.internal.PlatformDependent0$1', 'io.netty.util.internal.PlatformDependent0$2', @@ -160,17 +168,14 @@ thirdPartyAudit.excludes = [ 'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess', - - 'org.conscrypt.AllocatedBuffer', - 'org.conscrypt.BufferAllocator', - 'org.conscrypt.Conscrypt', - 'org.conscrypt.HandshakeListener' -] + 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator' + ) +} if (project.inFipsJvm == false) { // BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in // a FIPS JVM with BouncyCastleFIPS Provider - thirdPartyAudit.excludes += [ + thirdPartyAudit.ignoreMissingClasses ( 'org.bouncycastle.asn1.x500.X500Name' - ] + ) } diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index bb3b62e257cb8..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -143e925924dcc9cb8ad1b584727c2c3b6c9e5633 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..4d7a031054064 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +5b0beeaa95c28e5e0679d684d4e2e30e90cf53e7 \ No newline at end of file diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java index f39ae886dc45b..103098d5a4620 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java @@ -72,7 +72,7 @@ public void testDefaults() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -112,7 +112,7 @@ public void testNullValue() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -129,7 +129,7 @@ public void testNullValue() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .endObject()), @@ -138,7 +138,7 @@ public void testNullValue() throws IOException { IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -164,7 +164,7 @@ public void testEnableStore() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -186,7 +186,7 @@ public void testDisableIndex() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -209,7 +209,7 @@ public void testDisableDocValues() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -230,7 +230,7 @@ public void testMultipleValues() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", Arrays.asList("1234", "5678")) @@ -293,7 +293,7 @@ public void testIndexOptions() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -326,7 +326,7 @@ public void testEnableNorms() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -350,7 +350,7 @@ public void testCollator() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "I WÄ°LL USE TURKÄ°SH CASING") diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 6619c51a126ce..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -53281a354724cf52babb0460e51d5b6ec99ecad4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..0c588e1607a0d --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +fff58bb761b71ded4bf1bfd41bad522df5c67f5c \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 1c3043e79f44b..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d12356cdbcf4ed17586fef5e6fd1a6ea068821b5 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..78a9e33ed22e1 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +167a2b06379c4a1233a866ea88df17bb481e7200 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index d1aaaa89ee9d7..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b0f3b0409db20717a5229bc639f703eca97ebd4c \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..1ea10265dcfef --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +77a1ed8e4544c31b7c949c9f0ddb7bc2a53adfb9 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 3c9b69836435e..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -79f18e781a83062919eb60e06a96184ffda4a0c3 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..0e69798085eb1 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +49b825fc84de3f993eb161d1a38fdeefa9b5511a \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index f73173c28e332..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3c78ca17dd641a3efe1bea980e5290159867b85d \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..85d83e7674670 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +6c60a0b708cd1c61ec34191df1bcf99b9211c08f \ No newline at end of file diff --git a/plugins/analysis-ukrainian/build.gradle b/plugins/analysis-ukrainian/build.gradle index 61f67021a3dcf..1f97bced9192f 100644 --- a/plugins/analysis-ukrainian/build.gradle +++ b/plugins/analysis-ukrainian/build.gradle @@ -34,7 +34,7 @@ dependencyLicenses { mapping from: /morfologik-.*/, to: 'lucene' } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // we don't use the morfologik-fsa polish stemmer 'morfologik.stemming.polish.PolishStemmer' -] +) diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-774e9aefbc.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index d54fe8faafbd2..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -019b424ea61617788f460218fbdd9c2107a7ff5a \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..eab26671dcaa3 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +553be14e6c3bb82b7e70f76ce2d294e4fa26fc20 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 3dae3d3642c54..6b6498a9a5560 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -91,8 +91,7 @@ dependencyLicenses { mapping from: /jaxb-.*/, to: 'jaxb' } -thirdPartyAudit.excludes = [ - // classes are missing +thirdPartyAudit.ignoreMissingClasses ( 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', 'org.apache.avalon.framework.logger.Logger', @@ -124,12 +123,12 @@ thirdPartyAudit.excludes = [ 'org.osgi.framework.BundleEvent', 'org.osgi.framework.SynchronousBundleListener', 'com.sun.xml.fastinfoset.stax.StAXDocumentParser', - 'com.sun.xml.fastinfoset.stax.StAXDocumentSerializer', -] + 'com.sun.xml.fastinfoset.stax.StAXDocumentSerializer' +) // jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9) if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += [ + thirdPartyAudit.ignoreJarHellWithJDK ( 'javax.xml.bind.Binder', 'javax.xml.bind.ContextFinder$1', 'javax.xml.bind.ContextFinder', @@ -231,10 +230,9 @@ if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { 'javax.xml.bind.util.JAXBSource', 'javax.xml.bind.util.Messages', 'javax.xml.bind.util.ValidationEventCollector' - ] + ) } else { - // jarhell with jdk (intentionally, because we still expect to run again JDK 8) - thirdPartyAudit.excludes += [ + thirdPartyAudit.ignoreMissingClasses ( 'javax.activation.ActivationDataFlavor', 'javax.activation.DataContentHandler', 'javax.activation.DataHandler', @@ -243,5 +241,5 @@ if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { 'javax.activation.FileTypeMap', 'javax.activation.MimeType', 'javax.activation.MimeTypeParseException', - ] + ) } diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 96fe8bb3fded4..31a20f09f0b55 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -79,7 +79,7 @@ check { dependsOn 'qa:amazon-ec2:check' } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // classes are missing 'com.amazonaws.jmespath.JmesPathEvaluationVisitor', 'com.amazonaws.jmespath.JmesPathExpression', @@ -105,12 +105,12 @@ thirdPartyAudit.excludes = [ 'javax.servlet.ServletContextListener', 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', -] + 'org.apache.log.Logger' +) if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += [ + thirdPartyAudit.ignoreMissingClasses ( 'javax.xml.bind.DatatypeConverter', 'javax.xml.bind.JAXBContext' - ] + ) } diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 4d02cff011932..11d4a7e25fe46 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.LoggedExec - esplugin { description 'The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism.' classname 'org.elasticsearch.plugin.discovery.gce.GceDiscoveryPlugin' @@ -36,7 +34,7 @@ unitTest { systemProperty 'tests.artifact', project.name } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // classes are missing 'com.google.common.base.Splitter', 'com.google.common.collect.Lists', @@ -44,5 +42,5 @@ thirdPartyAudit.excludes = [ 'javax.servlet.ServletContextListener', 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', -] + 'org.apache.log.Logger' +) diff --git a/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java b/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java index fafe3615f639d..ffc7b4366b587 100644 --- a/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java +++ b/plugins/examples/custom-settings/src/main/java/org/elasticsearch/example/customsettings/ExampleCustomSettingsConfig.java @@ -49,7 +49,7 @@ public class ExampleCustomSettingsConfig { /** * A string setting that can be dynamically updated and that is validated by some logic */ - static final Setting VALIDATED_SETTING = Setting.simpleString("custom.validated", (value, settings) -> { + static final Setting VALIDATED_SETTING = Setting.simpleString("custom.validated", value -> { if (value != null && value.contains("forbidden")) { throw new IllegalArgumentException("Setting must not contain [forbidden]"); } diff --git a/plugins/examples/rest-handler/build.gradle b/plugins/examples/rest-handler/build.gradle index bae98c247d134..025e570bedea4 100644 --- a/plugins/examples/rest-handler/build.gradle +++ b/plugins/examples/rest-handler/build.gradle @@ -41,4 +41,10 @@ integTestCluster { } integTestRunner { systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }" +} + +testingConventions.naming { + IT { + baseClass 'org.elasticsearch.test.ESTestCase' + } } \ No newline at end of file diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index 6ffda42899e98..f75d7884db5aa 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -85,2067 +85,8 @@ forbiddenPatterns { exclude '**/*.vsdx' } -thirdPartyAudit.excludes = [ - // classes are missing: some due to our whitelisting of parsers - 'com.coremedia.iso.IsoFile', - 'com.coremedia.iso.boxes.Box', - 'com.coremedia.iso.boxes.Container', - 'com.coremedia.iso.boxes.FileTypeBox', - 'com.coremedia.iso.boxes.MetaBox', - 'com.coremedia.iso.boxes.MovieBox', - 'com.coremedia.iso.boxes.MovieHeaderBox', - 'com.coremedia.iso.boxes.SampleTableBox', - 'com.coremedia.iso.boxes.TrackBox', - 'com.coremedia.iso.boxes.TrackHeaderBox', - 'com.coremedia.iso.boxes.UserDataBox', - 'com.coremedia.iso.boxes.apple.AppleItemListBox', - 'com.coremedia.iso.boxes.sampleentry.AudioSampleEntry', - 'com.drew.imaging.jpeg.JpegMetadataReader', - 'com.drew.imaging.tiff.TiffMetadataReader', - 'com.drew.imaging.webp.WebpMetadataReader', - 'com.drew.lang.ByteArrayReader', - 'com.drew.lang.GeoLocation', - 'com.drew.lang.Rational', - 'com.drew.metadata.Directory', - 'com.drew.metadata.Metadata', - 'com.drew.metadata.Tag', - 'com.drew.metadata.exif.ExifIFD0Directory', - 'com.drew.metadata.exif.ExifReader', - 'com.drew.metadata.exif.ExifSubIFDDirectory', - 'com.drew.metadata.exif.ExifThumbnailDirectory', - 'com.drew.metadata.exif.GpsDirectory', - 'com.drew.metadata.iptc.IptcDirectory', - 'com.drew.metadata.jpeg.JpegCommentDirectory', - 'com.drew.metadata.jpeg.JpegDirectory', - 'com.epam.parso.Column', - 'com.epam.parso.DataWriterUtil', - 'com.epam.parso.SasFileProperties', - 'com.epam.parso.SasFileReader', - 'com.epam.parso.impl.SasFileReaderImpl', - 'com.github.junrar.Archive', - 'com.github.junrar.impl.FileVolumeManager', - 'com.github.junrar.rarfile.FileHeader', - 'com.github.luben.zstd.ZstdInputStream', - 'com.github.luben.zstd.ZstdOutputStream', - 'com.github.openjson.JSONArray', - 'com.github.openjson.JSONObject', - 'com.google.common.reflect.TypeToken', - 'com.google.gson.Gson', - 'com.googlecode.mp4parser.DataSource', - 'com.googlecode.mp4parser.boxes.apple.AppleAlbumBox', - 'com.googlecode.mp4parser.boxes.apple.AppleArtist2Box', - 'com.googlecode.mp4parser.boxes.apple.AppleArtistBox', - 'com.googlecode.mp4parser.boxes.apple.AppleCommentBox', - 'com.googlecode.mp4parser.boxes.apple.AppleCompilationBox', - 'com.googlecode.mp4parser.boxes.apple.AppleDiskNumberBox', - 'com.googlecode.mp4parser.boxes.apple.AppleEncoderBox', - 'com.googlecode.mp4parser.boxes.apple.AppleGenreBox', - 'com.googlecode.mp4parser.boxes.apple.AppleNameBox', - 'com.googlecode.mp4parser.boxes.apple.AppleRecordingYear2Box', - 'com.googlecode.mp4parser.boxes.apple.AppleTrackAuthorBox', - 'com.googlecode.mp4parser.boxes.apple.AppleTrackNumberBox', - 'com.googlecode.mp4parser.boxes.apple.Utf8AppleDataBox', - 'com.googlecode.mp4parser.util.CastUtils', - 'com.graphbuilder.curve.ControlPath', - 'com.graphbuilder.curve.GroupIterator', - 'com.graphbuilder.curve.NURBSpline', - 'com.graphbuilder.curve.ShapeMultiPath', - 'com.graphbuilder.curve.ValueVector', - 'com.graphbuilder.geom.PointFactory', - 'com.healthmarketscience.jackcess.Column', - 'com.healthmarketscience.jackcess.CryptCodecProvider', - 'com.healthmarketscience.jackcess.Database', - 'com.healthmarketscience.jackcess.DatabaseBuilder', - 'com.healthmarketscience.jackcess.DataType', - 'com.healthmarketscience.jackcess.impl.ByteUtil', - 'com.healthmarketscience.jackcess.impl.CustomToStringStyle', - 'com.healthmarketscience.jackcess.impl.PageChannel', - 'com.healthmarketscience.jackcess.PropertyMap', - 'com.healthmarketscience.jackcess.PropertyMap$Property', - 'com.healthmarketscience.jackcess.query.Query', - 'com.healthmarketscience.jackcess.Row', - 'com.healthmarketscience.jackcess.RuntimeIOException', - 'com.healthmarketscience.jackcess.Table', - 'com.healthmarketscience.jackcess.util.LinkResolver', - 'com.healthmarketscience.jackcess.util.MemFileChannel', - 'com.healthmarketscience.jackcess.util.OleBlob', - 'com.healthmarketscience.jackcess.util.OleBlob$Builder', - 'com.healthmarketscience.jackcess.util.OleBlob$CompoundContent', - 'com.healthmarketscience.jackcess.util.OleBlob$CompoundContent$Entry', - 'com.healthmarketscience.jackcess.util.OleBlob$Content', - 'com.healthmarketscience.jackcess.util.OleBlob$ContentType', - 'com.healthmarketscience.jackcess.util.OleBlob$EmbeddedContent', - 'com.healthmarketscience.jackcess.util.OleBlob$LinkContent', - 'com.healthmarketscience.jackcess.util.OleBlob$OtherContent', - 'com.healthmarketscience.jackcess.util.OleBlob$PackageContent', - 'com.healthmarketscience.jackcess.util.OleBlob$SimplePackageContent', - 'com.healthmarketscience.jackcess.util.TableIterableBuilder', - 'com.jmatio.io.MatFileHeader', - 'com.jmatio.io.MatFileReader', - 'com.jmatio.types.MLArray', - 'com.jmatio.types.MLStructure', - 'com.microsoft.schemas.compatibility.impl.AlternateContentDocumentImpl$AlternateContentImpl$1ChoiceList', - 'com.microsoft.schemas.office.excel.STCF', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1Accel2List', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AccelList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AnchorList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoFillList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoLineList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoPictList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoScaleList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CFList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CameraList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CancelList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CheckedList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColHiddenList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColoredList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColumnList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DDEList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DefaultList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DefaultSizeList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DisabledList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DismissList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DropLinesList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DropStyleList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DxList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FirstButtonList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaGroupList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaLinkList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaMacroList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaPictList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaRangeList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaTxbxList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1HelpList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1HorizList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1IncList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1JustLastXList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LCTList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ListItemList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LockTextList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LockedList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MapOCXList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MaxList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MinList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MoveWithCellsList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MultiLineList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MultiSelList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1NoThreeD2List', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1NoThreeDList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1PageList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1PrintObjectList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RecalcAlwaysList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RowHiddenList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RowList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptExtendedList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptLanguageList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptLocationList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptTextList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SecretEditList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SelList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SelTypeList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SizeWithCellsList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1TextHAlignList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1TextVAlignList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1UIObjList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1VScrollList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1VTEditList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ValList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ValidIdsList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1WidthMinList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2Accel2List', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AccelList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AnchorList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoFillList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoLineList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoPictList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoScaleList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CFList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CameraList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CancelList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CheckedList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColHiddenList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColoredList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColumnList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DDEList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DefaultList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DefaultSizeList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DisabledList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DismissList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DropLinesList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DropStyleList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DxList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FirstButtonList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaGroupList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaLinkList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaMacroList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaPictList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaRangeList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaTxbxList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2HelpList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2HorizList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2IncList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2JustLastXList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LCTList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ListItemList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LockTextList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LockedList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MapOCXList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MaxList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MinList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MoveWithCellsList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MultiLineList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MultiSelList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2NoThreeD2List', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2NoThreeDList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2PageList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2PrintObjectList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RecalcAlwaysList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RowHiddenList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RowList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptExtendedList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptLanguageList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptLocationList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptTextList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SecretEditList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SelList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SelTypeList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SizeWithCellsList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2TextHAlignList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2TextVAlignList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2UIObjList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VScrollList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VTEditList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ValList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ValidIdsList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VisibleList', - 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2WidthMinList', - 'com.microsoft.schemas.office.office.CTCallout', - 'com.microsoft.schemas.office.office.CTClipPath', - 'com.microsoft.schemas.office.office.CTComplex', - 'com.microsoft.schemas.office.office.CTDiagram', - 'com.microsoft.schemas.office.office.CTExtrusion', - 'com.microsoft.schemas.office.office.CTFill', - 'com.microsoft.schemas.office.office.CTInk', - 'com.microsoft.schemas.office.office.CTRegroupTable', - 'com.microsoft.schemas.office.office.CTRules', - 'com.microsoft.schemas.office.office.CTSignatureLine', - 'com.microsoft.schemas.office.office.CTSkew', - 'com.microsoft.schemas.office.office.CTStrokeChild', - 'com.microsoft.schemas.office.office.STBWMode', - 'com.microsoft.schemas.office.office.STConnectorType', - 'com.microsoft.schemas.office.office.STHrAlign', - 'com.microsoft.schemas.office.office.STRelationshipId', - 'com.microsoft.schemas.office.office.STTrueFalse', - 'com.microsoft.schemas.office.office.STTrueFalseBlank', - 'com.microsoft.schemas.office.powerpoint.CTEmpty', - 'com.microsoft.schemas.office.powerpoint.CTRel', - 'com.microsoft.schemas.office.visio.x2012.main.AttachedToolbarsType', - 'com.microsoft.schemas.office.visio.x2012.main.ColorsType', - 'com.microsoft.schemas.office.visio.x2012.main.CpType', - 'com.microsoft.schemas.office.visio.x2012.main.CustomMenusFileType', - 'com.microsoft.schemas.office.visio.x2012.main.CustomToolbarsFileType', - 'com.microsoft.schemas.office.visio.x2012.main.DataType', - 'com.microsoft.schemas.office.visio.x2012.main.DocumentSheetType', - 'com.microsoft.schemas.office.visio.x2012.main.DynamicGridEnabledType', - 'com.microsoft.schemas.office.visio.x2012.main.EventListType', - 'com.microsoft.schemas.office.visio.x2012.main.FaceNamesType', - 'com.microsoft.schemas.office.visio.x2012.main.FldType', - 'com.microsoft.schemas.office.visio.x2012.main.ForeignDataType', - 'com.microsoft.schemas.office.visio.x2012.main.GlueSettingsType', - 'com.microsoft.schemas.office.visio.x2012.main.HeaderFooterType', - 'com.microsoft.schemas.office.visio.x2012.main.IconType', - 'com.microsoft.schemas.office.visio.x2012.main.MasterShortcutType', - 'com.microsoft.schemas.office.visio.x2012.main.PpType', - 'com.microsoft.schemas.office.visio.x2012.main.ProtectBkgndsType', - 'com.microsoft.schemas.office.visio.x2012.main.ProtectMastersType', - 'com.microsoft.schemas.office.visio.x2012.main.ProtectShapesType', - 'com.microsoft.schemas.office.visio.x2012.main.ProtectStylesType', - 'com.microsoft.schemas.office.visio.x2012.main.PublishSettingsType', - 'com.microsoft.schemas.office.visio.x2012.main.RefByType', - 'com.microsoft.schemas.office.visio.x2012.main.SnapAnglesType', - 'com.microsoft.schemas.office.visio.x2012.main.SnapExtensionsType', - 'com.microsoft.schemas.office.visio.x2012.main.SnapSettingsType', - 'com.microsoft.schemas.office.visio.x2012.main.TpType', - 'com.microsoft.schemas.office.visio.x2012.main.TriggerType', - 'com.microsoft.schemas.office.visio.x2012.main.impl.CellTypeImpl$1RefByList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.ConnectsTypeImpl$1ConnectList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.MastersTypeImpl$1MasterList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.MastersTypeImpl$1MasterShortcutList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.PagesTypeImpl$1PageList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.RowTypeImpl$1CellList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.RowTypeImpl$1TriggerList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1CellList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1RowList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1TriggerList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.ShapesTypeImpl$1ShapeList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1CellList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1SectionList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1TriggerList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.StyleSheetsTypeImpl$1StyleSheetList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1CpList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1FldList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1PpList', - 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1TpList', - 'com.microsoft.schemas.office.word.CTAnchorLock', - 'com.microsoft.schemas.office.word.CTBorder', - 'com.microsoft.schemas.office.word.CTWrap', - 'com.microsoft.schemas.office.x2006.digsig.STPositiveInteger', - 'com.microsoft.schemas.office.x2006.digsig.STSignatureProviderUrl', - 'com.microsoft.schemas.office.x2006.digsig.STSignatureText', - 'com.microsoft.schemas.office.x2006.digsig.STSignatureType', - 'com.microsoft.schemas.office.x2006.digsig.STUniqueIdentifierWithBraces', - 'com.microsoft.schemas.office.x2006.digsig.STVersion', - 'com.microsoft.schemas.vml.CTArc', - 'com.microsoft.schemas.vml.CTCurve', - 'com.microsoft.schemas.vml.CTImage', - 'com.microsoft.schemas.vml.CTImageData', - 'com.microsoft.schemas.vml.CTLine', - 'com.microsoft.schemas.vml.CTOval', - 'com.microsoft.schemas.vml.CTPolyLine', - 'com.microsoft.schemas.vml.CTRect', - 'com.microsoft.schemas.vml.CTRoundRect', - 'com.microsoft.schemas.vml.STEditAs', - 'com.microsoft.schemas.vml.STFillMethod', - 'com.microsoft.schemas.vml.STFillType', - 'com.microsoft.schemas.vml.STImageAspect', - 'com.microsoft.schemas.vml.STShadowType', - 'com.microsoft.schemas.vml.STStrokeArrowLength', - 'com.microsoft.schemas.vml.STStrokeArrowType', - 'com.microsoft.schemas.vml.STStrokeArrowWidth', - 'com.microsoft.schemas.vml.STStrokeEndCap', - 'com.microsoft.schemas.vml.STStrokeLineStyle', - 'com.microsoft.schemas.vml.STTrueFalseBlank', - 'com.microsoft.schemas.vml.impl.CTFormulasImpl$1FList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1AnchorlockList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ArcList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderbottomList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderleftList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderrightList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BordertopList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1CalloutList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ClientDataList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ClippathList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1CurveList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1DiagramList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ExtrusionList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1FillList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1FormulasList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1GroupList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1HandlesList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ImageList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ImagedataList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1LineList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1LockList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1OvalList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1PathList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1PolylineList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1RectList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1RoundrectList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShadowList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShapeList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShapetypeList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1SignaturelineList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1SkewList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1StrokeList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextboxList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextdataList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextpathList', - 'com.microsoft.schemas.vml.impl.CTGroupImpl$1WrapList', - 'com.microsoft.schemas.vml.impl.CTHandlesImpl$1HList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1AnchorlockList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderbottomList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderleftList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderrightList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BordertopList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1CalloutList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ClippathList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ExtrusionList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1FillList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1FormulasList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1HandlesList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ImagedataList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1InkList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1IscommentList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1LockList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1PathList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ShadowList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1SignaturelineList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1SkewList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1StrokeList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextboxList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextdataList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextpathList', - 'com.microsoft.schemas.vml.impl.CTShapeImpl$1WrapList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1AnchorlockList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderbottomList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderleftList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderrightList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BordertopList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1CalloutList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ClientDataList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ClippathList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ExtrusionList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1FillList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1FormulasList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1HandlesList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ImagedataList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1LockList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1PathList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ShadowList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1SignaturelineList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1SkewList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1StrokeList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextboxList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextdataList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextpathList', - 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1WrapList', - 'com.pff.PSTAttachment', - 'com.pff.PSTFile', - 'com.pff.PSTFolder', - 'com.pff.PSTMessage', - 'com.pff.PSTRecipient', - 'com.rometools.rome.feed.synd.SyndContent', - 'com.rometools.rome.feed.synd.SyndEntry', - 'com.rometools.rome.feed.synd.SyndFeed', - 'com.rometools.rome.io.SyndFeedInput', - 'com.uwyn.jhighlight.renderer.Renderer', - 'com.uwyn.jhighlight.renderer.XhtmlRendererFactory', - 'de.l3s.boilerpipe.BoilerpipeExtractor', - 'de.l3s.boilerpipe.document.TextBlock', - 'de.l3s.boilerpipe.document.TextDocument', - 'de.l3s.boilerpipe.extractors.DefaultExtractor', - 'de.l3s.boilerpipe.sax.BoilerpipeHTMLContentHandler', - 'javax.mail.BodyPart', - 'javax.mail.Header', - 'javax.mail.Message$RecipientType', - 'javax.mail.MessagingException', - 'javax.mail.Multipart', - 'javax.mail.Part', - 'javax.mail.Session', - 'javax.mail.Transport', - 'javax.mail.internet.ContentType', - 'javax.mail.internet.InternetAddress', - 'javax.mail.internet.InternetHeaders', - 'javax.mail.internet.MimeBodyPart', - 'javax.mail.internet.MimeMessage', - 'javax.mail.internet.MimeMultipart', - 'javax.mail.internet.MimePart', - 'javax.mail.internet.SharedInputStream', - 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener', - 'javax.ws.rs.core.Response', - 'javax.ws.rs.core.UriBuilder', - 'opennlp.tools.namefind.NameFinderME', - 'opennlp.tools.namefind.TokenNameFinderModel', - 'opennlp.tools.sentiment.SentimentME', - 'opennlp.tools.sentiment.SentimentModel', - 'opennlp.tools.util.Span', - 'org.apache.avalon.framework.logger.Logger', - 'org.apache.commons.csv.CSVFormat', - 'org.apache.commons.csv.CSVParser', - 'org.apache.commons.csv.CSVRecord', - 'org.apache.commons.exec.CommandLine', - 'org.apache.commons.exec.DefaultExecutor', - 'org.apache.commons.exec.ExecuteWatchdog', - 'org.apache.commons.exec.PumpStreamHandler', - 'org.apache.commons.exec.environment.EnvironmentUtils', - 'org.apache.commons.lang.builder.ToStringBuilder', - 'org.apache.commons.lang.NotImplementedException', - 'org.apache.commons.lang.StringUtils', - 'org.apache.commons.lang.SystemUtils', - 'org.apache.commons.math3.linear.Array2DRowRealMatrix', - 'org.apache.commons.math3.linear.LUDecomposition', - 'org.apache.commons.math3.linear.MatrixUtils', - 'org.apache.commons.math3.linear.RealMatrix', - 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', - 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', - 'org.apache.cxf.jaxrs.client.WebClient', - 'org.apache.cxf.jaxrs.ext.multipart.Attachment', - 'org.apache.cxf.jaxrs.ext.multipart.ContentDisposition', - 'org.apache.cxf.jaxrs.ext.multipart.MultipartBody', - 'org.apache.http.HttpEntity', - 'org.apache.http.HttpResponse', - 'org.apache.http.StatusLine', - 'org.apache.http.client.HttpClient', - 'org.apache.http.client.methods.HttpGet', - 'org.apache.http.client.methods.HttpPost', - 'org.apache.http.client.utils.URIBuilder', - 'org.apache.http.entity.ByteArrayEntity', - 'org.apache.http.impl.client.DefaultHttpClient', - 'org.apache.jcp.xml.dsig.internal.dom.ApacheNodeSetData', - 'org.apache.jcp.xml.dsig.internal.dom.DOMKeyInfo', - 'org.apache.jcp.xml.dsig.internal.dom.DOMReference', - 'org.apache.jcp.xml.dsig.internal.dom.DOMSignedInfo', - 'org.apache.jcp.xml.dsig.internal.dom.DOMSubTreeData', - 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', - 'org.apache.pdfbox.tools.imageio.ImageIOUtil', - 'org.apache.sis.internal.util.CheckedArrayList', - 'org.apache.sis.internal.util.CheckedHashSet', - 'org.apache.sis.metadata.iso.DefaultMetadata', - 'org.apache.sis.metadata.iso.DefaultMetadataScope', - 'org.apache.sis.metadata.iso.constraint.DefaultLegalConstraints', - 'org.apache.sis.metadata.iso.extent.DefaultGeographicBoundingBox', - 'org.apache.sis.metadata.iso.extent.DefaultGeographicDescription', - 'org.apache.sis.metadata.iso.identification.DefaultDataIdentification', - 'org.apache.sis.storage.DataStore', - 'org.apache.sis.storage.DataStores', - 'org.apache.sis.util.collection.CodeListSet', - 'org.apache.tools.ant.BuildException', - 'org.apache.tools.ant.FileScanner', - 'org.apache.tools.ant.Project', - 'org.apache.tools.ant.taskdefs.Jar', - 'org.apache.tools.ant.taskdefs.Javac', - 'org.apache.tools.ant.taskdefs.MatchingTask', - 'org.apache.tools.ant.types.FileSet', - 'org.apache.tools.ant.types.Path$PathElement', - 'org.apache.tools.ant.types.Path', - 'org.apache.tools.ant.types.Reference', - 'org.apache.uima.UIMAFramework', - 'org.apache.uima.analysis_engine.AnalysisEngine', - 'org.apache.uima.cas.Type', - 'org.apache.uima.cas.impl.XCASSerializer', - 'org.apache.uima.cas.impl.XmiCasSerializer', - 'org.apache.uima.cas.impl.XmiSerializationSharedData', - 'org.apache.uima.fit.util.JCasUtil', - 'org.apache.uima.jcas.JCas', - 'org.apache.uima.jcas.cas.FSArray', - 'org.apache.uima.util.XMLInputSource', - 'org.apache.uima.util.XMLParser', - 'org.apache.uima.util.XmlCasSerializer', - 'org.apache.xml.security.Init', - 'org.apache.xml.security.c14n.Canonicalizer', - 'org.apache.xml.security.signature.XMLSignatureInput', - 'org.apache.xml.security.utils.XMLUtils', - 'org.brotli.dec.BrotliInputStream', - 'org.etsi.uri.x01903.v13.CertifiedRolesListType', - 'org.etsi.uri.x01903.v13.CounterSignatureType', - 'org.etsi.uri.x01903.v13.DataObjectFormatType$Factory', - 'org.etsi.uri.x01903.v13.DataObjectFormatType', - 'org.etsi.uri.x01903.v13.IdentifierType', - 'org.etsi.uri.x01903.v13.IncludeType', - 'org.etsi.uri.x01903.v13.ObjectIdentifierType', - 'org.etsi.uri.x01903.v13.OtherCertStatusRefsType', - 'org.etsi.uri.x01903.v13.OtherCertStatusValuesType', - 'org.etsi.uri.x01903.v13.ReferenceInfoType', - 'org.etsi.uri.x01903.v13.SigPolicyQualifiersListType', - 'org.etsi.uri.x01903.v13.SignaturePolicyIdType', - 'org.etsi.uri.x01903.v13.SignatureProductionPlaceType', - 'org.etsi.uri.x01903.v13.SignedDataObjectPropertiesType', - 'org.etsi.uri.x01903.v13.UnsignedDataObjectPropertiesType', - 'org.etsi.uri.x01903.v13.impl.ClaimedRolesListTypeImpl$1ClaimedRoleList', - 'org.etsi.uri.x01903.v13.impl.CRLRefsTypeImpl$1CRLRefList', - 'org.etsi.uri.x01903.v13.impl.CRLValuesTypeImpl$1EncapsulatedCRLValueList', - 'org.etsi.uri.x01903.v13.impl.CertIDListTypeImpl$1CertList', - 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1EncapsulatedX509CertificateList', - 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1OtherCertificateList', - 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1EncapsulatedTimeStampList', - 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1IncludeList', - 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1ReferenceInfoList', - 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1XMLTimeStampList', - 'org.etsi.uri.x01903.v13.impl.OCSPRefsTypeImpl$1OCSPRefList', - 'org.etsi.uri.x01903.v13.impl.OCSPValuesTypeImpl$1EncapsulatedOCSPValueList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1ArchiveTimeStampList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttrAuthoritiesCertValuesList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeCertificateRefsList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationRefsList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationValuesList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CertificateValuesList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteCertificateRefsList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteRevocationRefsList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CounterSignatureList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RefsOnlyTimeStampList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RevocationValuesList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SigAndRefsTimeStampList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SignatureTimeStampList', - 'org.etsi.uri.x01903.v14.ValidationDataType$Factory', - 'org.etsi.uri.x01903.v14.ValidationDataType', - 'org.json.simple.JSONArray', - 'org.json.simple.JSONObject', - 'org.json.simple.parser.JSONParser', - 'org.objectweb.asm.AnnotationVisitor', - 'org.objectweb.asm.Attribute', - 'org.objectweb.asm.ClassReader', - 'org.objectweb.asm.ClassVisitor', - 'org.objectweb.asm.FieldVisitor', - 'org.objectweb.asm.MethodVisitor', - 'org.objectweb.asm.Type', - 'org.opengis.metadata.Identifier', - 'org.opengis.metadata.citation.Address', - 'org.opengis.metadata.citation.Citation', - 'org.opengis.metadata.citation.CitationDate', - 'org.opengis.metadata.citation.Contact', - 'org.opengis.metadata.citation.DateType', - 'org.opengis.metadata.citation.OnLineFunction', - 'org.opengis.metadata.citation.OnlineResource', - 'org.opengis.metadata.citation.ResponsibleParty', - 'org.opengis.metadata.citation.Role', - 'org.opengis.metadata.constraint.Restriction', - 'org.opengis.metadata.distribution.DigitalTransferOptions', - 'org.opengis.metadata.distribution.Distribution', - 'org.opengis.metadata.distribution.Distributor', - 'org.opengis.metadata.distribution.Format', - 'org.opengis.metadata.extent.Extent', - 'org.opengis.metadata.identification.Identification', - 'org.opengis.metadata.identification.KeywordType', - 'org.opengis.metadata.identification.Keywords', - 'org.opengis.metadata.identification.Progress', - 'org.opengis.metadata.identification.TopicCategory', - 'org.opengis.metadata.maintenance.ScopeCode', - 'org.opengis.util.InternationalString', - - // Missing openxml schema classes are explained by the fact we use the smaller jar: - // "The full jar of all of the schemas is ooxml-schemas-xx.jar, and it is currently around 15mb. - // The smaller poi-ooxml-schemas jar is only about 4mb. - // This latter jar file only contains the typically used parts though." - // http://poi.apache.org/faq.html#faq-N10025 - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTArea3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAreaChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBar3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBubbleChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispBlanksAs', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispUnits', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLbl', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLblPos', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDoughnutChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDPt', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDTable', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTErrBars', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExtension', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTFirstSliceAng', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblOffset', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLine3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMultiLvlStrRef', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTOfPieChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTOverlap', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPictureOptions', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPie3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotFmts', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotSource', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTProtection', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTShape', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSkip', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStockChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStyle', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurfaceChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTextLanguageID', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTimeUnit', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTrendline', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTUpDownBars', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTView3D', - 'org.openxmlformats.schemas.drawingml.x2006.chart.STAxisUnit', - 'org.openxmlformats.schemas.drawingml.x2006.chart.STMarkerSize', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTBarChartImpl$1AxIdList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTBarChartImpl$1SerLinesList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTBarSerImpl$1DPtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTBarSerImpl$1TrendlineList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTDLblsImpl$1DLblList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTExtensionListImpl$1ExtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLegendImpl$1LegendEntryList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1AxIdList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1DPtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1TrendlineList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTNumDataImpl$1PtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieSerImpl$1DPtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Area3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1AreaChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Bar3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BarChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BubbleChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1CatAxList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DateAxList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DoughnutChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Line3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1LineChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1OfPieChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Pie3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1PieChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1RadarChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ScatterChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SerAxList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1StockChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Surface3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SurfaceChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ValAxList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTRadarChartImpl$1AxIdList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTRadarSerImpl$1DPtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1AxIdList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1DPtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1ErrBarsList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1TrendlineList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTStrDataImpl$1PtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.STPageSetupOrientation', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaBiLevelEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaCeilingEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaFloorEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaInverseEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaModulateEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaOutsetEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaReplaceEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAngle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioCD', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTBackdrop', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTBevel', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTBiLevelEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTBlendEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTBlurEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTCamera', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTCell3D', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorChangeEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorReplaceEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorSchemeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTComplementTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectorLocking', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTCustomColorList', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTDuotoneEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTEffectReference', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTEmbeddedWAVAudioFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTFillEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTFillOverlayEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTFlatText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGammaTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGlowEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupLocking', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTHSLEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTInnerShadowEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseGammaTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTLightRig', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTLuminanceEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTObjectStyleDefaults', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPath2DArcTo', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPositiveFixedAngle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetShadowEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetTextShape', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTQuickTimeFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTReflectionEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTRelativeOffsetEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTShapeLocking', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTSoftEdgesEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTSupplementalFont', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableBackgroundStyle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableCellBorderStyle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillGroupWrapper', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineLineFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTintEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTransformEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTVideoFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhPolarList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhXYList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1BlipFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GradFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GrpFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1NoFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1PattFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1SolidFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaBiLevelList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaCeilingList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaFloorList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaInvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModFixList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaReplList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BiLevelList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BlurList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrChangeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrReplList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1DuotoneList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1FillOverlayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1GraysclList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1HslList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTConnectionSiteListImpl$1CxnList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTDashStopListImpl$1DsList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaBiLevelList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaCeilingList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaFloorList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaInvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaModFixList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaOutsetList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1AlphaReplList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1BiLevelList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1BlendList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1BlurList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1ClrChangeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1ClrReplList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1ContList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1DuotoneList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1EffectList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1FillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1FillOverlayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1GlowList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1GraysclList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1HslList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1InnerShdwList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1OuterShdwList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1PrstShdwList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1ReflectionList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1RelOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1SoftEdgeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectContainerImpl$1XfrmList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectStyleListImpl$1EffectStyleList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1BlipFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GradFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GrpFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1NoFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1PattFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1SolidFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFontCollectionImpl$1FontList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGeomGuideListImpl$1GdList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGradientStopListImpl$1GsList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTLineStyleListImpl$1LnList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTOfficeArtExtensionListImpl$1ExtList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DCubicBezierToImpl$1PtList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1ArcToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CloseList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CubicBezToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1LnToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1MoveToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1QuadBezToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DListImpl$1PathList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTScRgbColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableGridImpl$1GridColList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableRowImpl$1TcList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1BrList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1FldList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1RList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextTabStopListImpl$1TabList', - 'org.openxmlformats.schemas.drawingml.x2006.main.STAdjAngle', - 'org.openxmlformats.schemas.drawingml.x2006.main.STBlipCompression', - 'org.openxmlformats.schemas.drawingml.x2006.main.STEffectContainerType', - 'org.openxmlformats.schemas.drawingml.x2006.main.STFixedAngle', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPanose', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPathFillMode', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPresetMaterialType', - 'org.openxmlformats.schemas.drawingml.x2006.main.STShapeID', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTextColumnCount', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTextNonNegativePoint', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1AbsoluteAnchorList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1OneCellAnchorList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1TwoCellAnchorList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1CxnSpList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GraphicFrameList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GrpSpList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1PicList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1SpList', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTEffectExtent', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosH', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosV', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapNone', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapSquare', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapThrough', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTight', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTopBottom', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTArray', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTCf', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTEmpty', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTNull', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTVstream', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STCy', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STError', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STVectorBaseType', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BoolList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CfList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ClsidList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CyList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1DateList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ErrorList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1FiletimeList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I1List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I2List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpwstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui1List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui2List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1VariantList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BoolList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ClsidList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2CyList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2DateList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ErrorList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2FiletimeList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I1List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I2List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpwstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui1List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui2List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui8List', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTAcc', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBar', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBorderBox', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBox', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTD', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTEqArr', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTF', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTFunc', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTGroupChr', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimLow', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimUpp', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTM', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTMathPr', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTNary', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMath', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMathPara', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTPhant', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTR', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTRad', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSPre', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSub', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSubSup', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSup', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTControlList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomShowList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomerData', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTEmbeddedFontList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionListModify', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTHandoutMasterIdList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTKinsoku', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTModifyVerifier', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTOleObjectEmbed', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTOleObjectLink', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTPhotoAlbum', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideLayoutIdList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTiming', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTransition', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTSmartTags', - 'org.openxmlformats.schemas.presentationml.x2006.main.STBookmarkIdSeed', - 'org.openxmlformats.schemas.presentationml.x2006.main.STDirection', - 'org.openxmlformats.schemas.presentationml.x2006.main.STIndex', - 'org.openxmlformats.schemas.presentationml.x2006.main.STSlideSizeType', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentAuthorListImpl$1CmAuthorList', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentListImpl$1CmList', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCustomerDataListImpl$1CustDataList', - 'org.openxmlformats.schemas.schemaLibrary.x2006.main.CTSchemaLibrary', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTAutoSortScope', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTBoolean', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCacheHierarchies', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedItems', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedMembers', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellStyles', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellWatches', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartFormats', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetPr', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetProtection', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetViews', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColHierarchiesUsage', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColItems', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConditionalFormats', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConsolidation', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTControls', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCsPageSetup', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomChartsheetViews', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomProperties', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomSheetViews', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomWorkbookViews', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataBinding', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataConsolidate', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDateTime', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDdeLink', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDimensions', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTError', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExtensionList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExternalSheetDataSet', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFieldGroup', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileRecoveryPr', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileSharing', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileVersion', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFilterColumn', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFormats', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFunctionGroups', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTGradientFill', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMRUColors', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureDimensionMaps', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureGroups', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMissing', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTNumber', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleLink', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleSize', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPCDKPIs', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotFilters', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotHierarchies', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotSelection', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTProtectedRanges', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRecord', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowHierarchiesUsage', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowItems', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTScenarios', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSheetBackgroundPicture', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagPr', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagTypes', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTags', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSortState', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTString', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableFormula', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTupleCache', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishItems', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishObjects', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishing', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTX', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STCellSpans', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STDataValidationImeMode', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STDvAspect', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STFieldSortType', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STGuid', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STObjects', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STOleUpdate', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticAlignment', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticType', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPrintError', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STRefMode', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STSheetViewType', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STShowDataAs', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTableType', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTimePeriod', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTotalsRowFunction', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STUpdateLinks', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STVisibility', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$1AuthorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$2AuthorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAutoFilterImpl$1FilterColumnList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBookViewsImpl$1WorkbookViewList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBordersImpl$1BorderList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldImpl$1MpMapList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldsImpl$1CacheFieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellStyleXfsImpl$1XfList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellXfsImpl$1XfList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$1FormulaList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$2FormulaList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColFieldsImpl$1FieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1CfvoList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1ColorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCommentListImpl$1CommentList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTConditionalFormattingImpl$1CfRuleList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataBarImpl$1CfvoList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataValidationsImpl$1DataValidationList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDxfsImpl$1DxfList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalDefinedNamesImpl$1DefinedNameList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalReferencesImpl$1ExternalReferenceList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalSheetNamesImpl$1SheetNameList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFillsImpl$1FillList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1BList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CharsetList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ColorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CondenseList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ExtendList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1FamilyList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1IList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1NameList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1OutlineList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SchemeList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ShadowList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1StrikeList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SzList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1UList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1VertAlignList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontsImpl$1FontList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTHyperlinksImpl$1HyperlinkList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTIconSetImpl$1CfvoList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTItemsImpl$1ItemList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1MapList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1SchemaList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMergeCellsImpl$1MergeCellList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTNumFmtsImpl$1NumFmtList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTOleObjectsImpl$1OleObjectList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageBreakImpl$1BrkList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageFieldsImpl$1PageFieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCacheRecordsImpl$1RList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCachesImpl$1PivotCacheList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotFieldsImpl$1PivotFieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1BList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CharsetList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ColorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CondenseList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ExtendList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1FamilyList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1IList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1OutlineList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1RFontList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SchemeList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ShadowList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1StrikeList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SzList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1UList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1VertAlignList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowFieldsImpl$1FieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowImpl$1CList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1BList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1DList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1EList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1MList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1NList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1SList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1PivotSelectionList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1SelectionList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewsImpl$1SheetViewList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetsImpl$1SheetList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSingleXmlCellsImpl$1SingleXmlCellList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSstImpl$1SiList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTablePartsImpl$1TablePartList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTableStylesImpl$1TableStyleList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorkbookImpl$1FileRecoveryPrList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ColsList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ConditionalFormattingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAltChunk', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAttr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCaptions', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCellMergeTrackChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCharacterSpacing', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCnf', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColorSchemeMapping', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColumns', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCompat', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTControl', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlBlock', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlCell', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRow', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRun', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDataBinding', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocGrid', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocRsids', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocVars', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEastAsianLayout', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnDocProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFDDList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFHelpText', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFName', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFStatusText', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFTextInput', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFitText', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFramePr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnDocProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTKinsoku', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLevelSuffix', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLineNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLock', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLongHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLvlLegacy', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMacroName', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMailMerge', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMultiLevelType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTNumPicBullet', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageBorders', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageMar', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageSz', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPaperSource', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTParaRPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPerm', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPermStart', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPlaceholder', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTProof', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTReadingModeInkLockDown', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSaveThroughXslt', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtComboBox', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDate', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDropDownList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtRow', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtText', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShapeDefaults', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShortHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSmartTagType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblGridChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblOverlap', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPPr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrExChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblStylePr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcMar', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextDirection', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextEffect', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextboxTightWrap', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangeNumbering', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangesView', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTwipsMeasure', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTView', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWriteProtection', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWritingStyle', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDateTime', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDisplacedByCustomXml', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHint', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabAlignment', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabLeader', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabRelativeTo', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STProofErr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STRubyAlign', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STShortHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STUcharHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STZoom', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTAbstractNumImpl$1LvlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentsImpl$1CommentList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1AnchorList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1InlineList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1CalcOnExitList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1DdListList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EnabledList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EntryMacroList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1ExitMacroList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1HelpTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1NameList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1StatusTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1TextInputList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTLatentStylesImpl$1LsdExceptionList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumImpl$1LvlOverrideList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1AbstractNumList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumPicBulletList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1AnnotationRefList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1BrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CommentReferenceList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ContinuationSeparatorList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayLongList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayShortList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelInstrTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DrawingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FldCharList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1InstrTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1LastRenderedPageBreakList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthLongList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthShortList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1NoBreakHyphenList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ObjectList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PgNumList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PictList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PtabList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1RubyList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SeparatorList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SoftHyphenList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SymList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TabList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearLongList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearShortList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRubyContentImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1AccList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BarList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BorderBoxList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BoxList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1EqArrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FuncList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1GroupChrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimLowList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimUppList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1NaryList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PhantList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1R2List', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RadList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SPreList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubSupList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSupList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1TcList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtEndPrImpl$1RPrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1AliasList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1BibliographyList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1CitationList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ComboBoxList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DataBindingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DateList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartListList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartObjList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DropDownListList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1EquationList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1GroupList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1IdList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1LockList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PictureList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PlaceholderList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RPrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RichTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ShowingPlcHdrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TemporaryList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1FooterReferenceList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1HeaderReferenceList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1ActiveWritingStyleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1AttachedSchemaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1SmartTagTypeList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagPrImpl$1AttrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStyleImpl$1TblStylePrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStylesImpl$1StyleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTabsImpl$1TabList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblGridBaseImpl$1GridColList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CantSplitList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CnfStyleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1DivIdList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridAfterList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridBeforeList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1HiddenList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1JcList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblCellSpacingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblHeaderList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TrHeightList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WAfterList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WBeforeList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1TblList', - 'org.osgi.framework.BundleActivator', - 'org.osgi.framework.BundleContext', - 'org.osgi.framework.ServiceReference', - 'org.osgi.framework.ServiceRegistration', - 'org.osgi.util.tracker.ServiceTracker', - 'org.osgi.util.tracker.ServiceTrackerCustomizer', - 'org.slf4j.impl.StaticLoggerBinder', - 'org.slf4j.impl.StaticMDCBinder', - 'org.slf4j.impl.StaticMarkerBinder', - 'org.sqlite.SQLiteConfig', - 'org.w3.x2000.x09.xmldsig.KeyInfoType', - 'org.w3.x2000.x09.xmldsig.SignatureMethodType', - 'org.w3.x2000.x09.xmldsig.SignatureValueType', - 'org.w3.x2000.x09.xmldsig.TransformsType', - 'org.w3.x2000.x09.xmldsig.impl.SignatureTypeImpl$1ObjectList', - 'org.w3.x2000.x09.xmldsig.impl.SignedInfoTypeImpl$1ReferenceList', - 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$1XPathList', - 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$2XPathList', - 'ucar.ma2.DataType', - 'ucar.nc2.Attribute', - 'ucar.nc2.Dimension', - 'ucar.nc2.Group', - 'ucar.nc2.NetcdfFile', - 'ucar.nc2.Variable', - 'ucar.nc2.dataset.NetcdfDataset' -] - -if (project.runtimeJavaVersion == JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += [ - // TODO: Why is this needed ? - 'com.sun.javadoc.ClassDoc', - 'com.sun.javadoc.Doc', - 'com.sun.javadoc.Doclet', - 'com.sun.javadoc.ExecutableMemberDoc', - 'com.sun.javadoc.FieldDoc', - 'com.sun.javadoc.MethodDoc', - 'com.sun.javadoc.PackageDoc', - 'com.sun.javadoc.Parameter', - 'com.sun.javadoc.ProgramElementDoc', - 'com.sun.javadoc.RootDoc', - 'com.sun.javadoc.SourcePosition', - 'com.sun.javadoc.Tag', - 'com.sun.javadoc.Type', - 'com.sun.tools.javadoc.Main' - ] -} - -if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += [ - 'javax.activation.ActivationDataFlavor', - 'javax.activation.CommandMap', - 'javax.activation.DataContentHandler', - 'javax.activation.DataHandler', - 'javax.activation.DataSource', - 'javax.activation.FileDataSource', - 'javax.activation.MailcapCommandMap', - 'javax.xml.bind.DatatypeConverter', - 'javax.xml.bind.JAXBContext', - 'javax.xml.bind.JAXBElement', - 'javax.xml.bind.Unmarshaller' - ] +thirdPartyAudit{ + ignoreMissingClasses() } if (project.inFipsJvm) { diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index 4156fd206722d..aa0dbe07c8305 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -116,7 +116,7 @@ public void testAnnotationInjection() throws IOException { // Use example of typed and untyped annotations String annotatedText = "He paid [Stormy Daniels](Stephanie+Clifford&Payee) hush money"; - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + SourceToParse sourceToParse = new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", annotatedText) @@ -171,7 +171,7 @@ public void testToleranceForBadAnnotationMarkup() throws IOException { new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); String annotatedText = "foo [bar](MissingEndBracket baz"; - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + SourceToParse sourceToParse = new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", annotatedText) @@ -260,7 +260,7 @@ public void testDefaults() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -292,7 +292,7 @@ public void testEnableStore() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -316,7 +316,7 @@ public void testDisableNorms() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -347,7 +347,7 @@ public void testIndexOptions() throws IOException { for (String option : supportedOptions.keySet()) { jsonDoc.field(option, "1234"); } - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()), + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()), XContentType.JSON)); for (Map.Entry entry : supportedOptions.entrySet()) { @@ -369,7 +369,7 @@ public void testDefaultPositionIncrementGap() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + SourceToParse sourceToParse = new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .array("field", new String[] {"a", "b"}) @@ -411,7 +411,7 @@ public void testPositionIncrementGap() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + SourceToParse sourceToParse = new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .array("field", new String[]{"a", "b"}) @@ -570,7 +570,7 @@ public void testTermVectors() throws IOException { DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field1", "1234") diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index 1df613ab3f9b3..a26e2a4872841 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -78,7 +78,7 @@ public void testDefaults() throws Exception { .field("type", "murmur3") .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument parsedDoc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") .endObject()), diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index dc14373026430..58d9b696f6a4e 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -57,7 +57,7 @@ public void testSizeEnabled() throws Exception { .startObject() .field("field", "value") .endObject()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); boolean stored = false; boolean points = false; @@ -78,7 +78,7 @@ public void testSizeDisabled() throws Exception { .startObject() .field("field", "value") .endObject()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } @@ -92,7 +92,7 @@ public void testSizeNotSet() throws Exception { .startObject() .field("field", "value") .endObject()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 13ec8031c0fd8..27597e94976fa 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -37,23 +37,28 @@ dependencyLicenses { mapping from: /stax-.*/, to: 'stax' } -thirdPartyAudit.excludes = [ - // Optional and not enabled by Elasticsearch - 'org.slf4j.Logger', - 'org.slf4j.LoggerFactory', - // uses internal java api: sun.misc.Unsafe - 'com.google.common.cache.Striped64', - 'com.google.common.cache.Striped64$1', - 'com.google.common.cache.Striped64$Cell', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', -] +thirdPartyAudit { + ignoreMissingClasses ( + // Optional and not enabled by Elasticsearch + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory' + ) + + ignoreViolations ( + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1' + ) +} check { // also execute the QA tests when testing the plugin diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java index b4322119dafa7..002907a0a7eb1 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobContainer.java @@ -96,6 +96,11 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b } } + @Override + public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + } + @Override public void deleteBlob(String blobName) throws IOException { logger.trace("deleteBlob({})", blobName); diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index b86e5fb81f731..e7c4498633145 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -68,7 +68,8 @@ dependencyLicenses { mapping from: /proto-google.*/, to: 'proto-google' } -thirdPartyAudit.excludes = [ +thirdPartyAudit { + ignoreViolations ( // uses internal java api: sun.misc.Unsafe 'com.google.protobuf.UnsafeUtil', 'com.google.protobuf.UnsafeUtil$1', @@ -85,7 +86,9 @@ thirdPartyAudit.excludes = [ 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - // classes are missing + ) + + ignoreMissingClasses ( 'com.google.appengine.api.datastore.Blob', 'com.google.appengine.api.datastore.DatastoreService', 'com.google.appengine.api.datastore.DatastoreServiceFactory', @@ -112,7 +115,8 @@ thirdPartyAudit.excludes = [ // commons-logging provided dependencies 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' -] + ) +} check { // also execute the QA tests when testing the plugin diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java index f445aa412e1f5..8ad9b453a9092 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainer.java @@ -68,6 +68,11 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b blobStore.writeBlob(buildKey(blobName), inputStream, blobSize, failIfAlreadyExists); } + @Override + public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + } + @Override public void deleteBlob(String blobName) throws IOException { blobStore.deleteBlob(buildKey(blobName)); diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 0f18c6af2d923..ac22fa389c614 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -304,304 +304,28 @@ if (secureFixtureSupported) { testingConventions.enabled = false } -thirdPartyAudit.excludes = [ - // classes are missing, because we added hadoop jars one by one until tests pass. - 'com.google.gson.stream.JsonReader', - 'com.google.gson.stream.JsonWriter', - 'com.jcraft.jsch.ChannelExec', - 'com.jcraft.jsch.ChannelSftp', - 'com.jcraft.jsch.ChannelSftp$LsEntry', - 'com.jcraft.jsch.JSch', - 'com.jcraft.jsch.Logger', - 'com.jcraft.jsch.Session', - 'com.jcraft.jsch.SftpATTRS', - 'com.sun.jersey.api.ParamException', - 'com.sun.jersey.api.core.HttpContext', - 'com.sun.jersey.core.spi.component.ComponentContext', - 'com.sun.jersey.core.spi.component.ComponentScope', - 'com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable', - 'com.sun.jersey.spi.container.ContainerRequest', - 'com.sun.jersey.spi.container.ContainerRequestFilter', - 'com.sun.jersey.spi.container.ContainerResponseFilter', - 'com.sun.jersey.spi.container.ResourceFilter', - 'com.sun.jersey.spi.container.servlet.ServletContainer', - 'com.sun.jersey.spi.inject.Injectable', - 'com.sun.jersey.spi.inject.InjectableProvider', - 'io.netty.bootstrap.Bootstrap', - 'io.netty.bootstrap.ChannelFactory', - 'io.netty.bootstrap.ServerBootstrap', - 'io.netty.buffer.ByteBuf', - 'io.netty.buffer.Unpooled', - 'io.netty.channel.Channel', - 'io.netty.channel.ChannelFuture', - 'io.netty.channel.ChannelFutureListener', - 'io.netty.channel.ChannelHandler', - 'io.netty.channel.ChannelHandlerContext', - 'io.netty.channel.ChannelInboundHandlerAdapter', - 'io.netty.channel.ChannelInitializer', - 'io.netty.channel.ChannelOption', - 'io.netty.channel.ChannelPipeline', - 'io.netty.channel.EventLoopGroup', - 'io.netty.channel.SimpleChannelInboundHandler', - 'io.netty.channel.group.ChannelGroup', - 'io.netty.channel.group.ChannelGroupFuture', - 'io.netty.channel.group.DefaultChannelGroup', - 'io.netty.channel.nio.NioEventLoopGroup', - 'io.netty.channel.socket.SocketChannel', - 'io.netty.channel.socket.nio.NioServerSocketChannel', - 'io.netty.channel.socket.nio.NioSocketChannel', - 'io.netty.handler.codec.http.DefaultFullHttpRequest', - 'io.netty.handler.codec.http.DefaultFullHttpResponse', - 'io.netty.handler.codec.http.DefaultHttpResponse', - 'io.netty.handler.codec.http.HttpContent', - 'io.netty.handler.codec.http.HttpHeaders', - 'io.netty.handler.codec.http.HttpMethod', - 'io.netty.handler.codec.http.HttpRequest', - 'io.netty.handler.codec.http.HttpRequestDecoder', - 'io.netty.handler.codec.http.HttpRequestEncoder', - 'io.netty.handler.codec.http.HttpResponseEncoder', - 'io.netty.handler.codec.http.HttpResponseStatus', - 'io.netty.handler.codec.http.HttpVersion', - 'io.netty.handler.codec.http.QueryStringDecoder', - 'io.netty.handler.codec.string.StringEncoder', - 'io.netty.handler.ssl.SslHandler', - 'io.netty.handler.stream.ChunkedStream', - 'io.netty.handler.stream.ChunkedWriteHandler', - 'io.netty.util.concurrent.GlobalEventExecutor', - 'io.netty.util.ReferenceCountUtil', - 'javax.ws.rs.core.Context', - 'javax.ws.rs.core.MediaType', - 'javax.ws.rs.core.MultivaluedMap', - 'javax.ws.rs.core.Response$ResponseBuilder', - 'javax.ws.rs.core.Response$Status', - 'javax.ws.rs.core.Response', - 'javax.ws.rs.core.StreamingOutput', - 'javax.ws.rs.core.UriBuilder', - 'javax.ws.rs.ext.ExceptionMapper', - 'jdiff.JDiff', - 'org.apache.avalon.framework.logger.Logger', - 'org.apache.avro.Schema', - 'org.apache.avro.file.DataFileReader', - 'org.apache.avro.file.FileReader', - 'org.apache.avro.file.SeekableInput', - 'org.apache.avro.generic.GenericDatumReader', - 'org.apache.avro.generic.GenericDatumWriter', - 'org.apache.avro.io.BinaryDecoder', - 'org.apache.avro.io.BinaryEncoder', - 'org.apache.avro.io.DatumReader', - 'org.apache.avro.io.DatumWriter', - 'org.apache.avro.io.DecoderFactory', - 'org.apache.avro.io.EncoderFactory', - 'org.apache.avro.io.JsonEncoder', - 'org.apache.avro.reflect.ReflectData', - 'org.apache.avro.reflect.ReflectDatumReader', - 'org.apache.avro.reflect.ReflectDatumWriter', - 'org.apache.avro.specific.SpecificDatumReader', - 'org.apache.avro.specific.SpecificDatumWriter', - 'org.apache.avro.specific.SpecificRecord', - 'org.apache.commons.beanutils.BeanUtils', - 'org.apache.commons.beanutils.DynaBean', - 'org.apache.commons.beanutils.DynaClass', - 'org.apache.commons.beanutils.DynaProperty', - 'org.apache.commons.beanutils.PropertyUtils', - 'org.apache.commons.compress.archivers.tar.TarArchiveEntry', - 'org.apache.commons.compress.archivers.tar.TarArchiveInputStream', - 'org.apache.commons.daemon.Daemon', - 'org.apache.commons.daemon.DaemonContext', - 'org.apache.commons.digester.AbstractObjectCreationFactory', - 'org.apache.commons.digester.CallMethodRule', - 'org.apache.commons.digester.Digester', - 'org.apache.commons.digester.ObjectCreationFactory', - 'org.apache.commons.digester.substitution.MultiVariableExpander', - 'org.apache.commons.digester.substitution.VariableSubstitutor', - 'org.apache.commons.digester.xmlrules.DigesterLoader', - 'org.apache.commons.jxpath.JXPathContext', - 'org.apache.commons.jxpath.ri.JXPathContextReferenceImpl', - 'org.apache.commons.jxpath.ri.QName', - 'org.apache.commons.jxpath.ri.compiler.NodeNameTest', - 'org.apache.commons.jxpath.ri.compiler.NodeTest', - 'org.apache.commons.jxpath.ri.compiler.NodeTypeTest', - 'org.apache.commons.jxpath.ri.model.NodeIterator', - 'org.apache.commons.jxpath.ri.model.NodePointer', - 'org.apache.commons.jxpath.ri.model.NodePointerFactory', - 'org.apache.commons.math3.util.ArithmeticUtils', - 'org.apache.commons.net.ftp.FTPClient', - 'org.apache.commons.net.ftp.FTPFile', - 'org.apache.commons.net.ftp.FTPReply', - 'org.apache.commons.net.util.SubnetUtils$SubnetInfo', - 'org.apache.commons.net.util.SubnetUtils', - 'org.apache.curator.ensemble.fixed.FixedEnsembleProvider', - 'org.apache.curator.framework.CuratorFramework', - 'org.apache.curator.framework.CuratorFrameworkFactory$Builder', - 'org.apache.curator.framework.CuratorFrameworkFactory', - 'org.apache.curator.framework.api.ACLBackgroundPathAndBytesable', - 'org.apache.curator.framework.api.ACLProvider', - 'org.apache.curator.framework.api.BackgroundPathAndBytesable', - 'org.apache.curator.framework.api.ChildrenDeletable', - 'org.apache.curator.framework.api.CreateBuilder', - 'org.apache.curator.framework.api.DeleteBuilder', - 'org.apache.curator.framework.api.ExistsBuilder', - 'org.apache.curator.framework.api.GetChildrenBuilder', - 'org.apache.curator.framework.api.GetDataBuilder', - 'org.apache.curator.framework.api.ProtectACLCreateModePathAndBytesable', - 'org.apache.curator.framework.api.SetDataBuilder', - 'org.apache.curator.framework.api.WatchPathable', - 'org.apache.curator.framework.imps.DefaultACLProvider', - 'org.apache.curator.framework.listen.ListenerContainer', - 'org.apache.curator.framework.recipes.cache.ChildData', - 'org.apache.curator.framework.recipes.cache.PathChildrenCache$StartMode', - 'org.apache.curator.framework.recipes.cache.PathChildrenCache', - 'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent$Type', - 'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent', - 'org.apache.curator.framework.recipes.cache.PathChildrenCacheListener', - 'org.apache.curator.framework.recipes.locks.Reaper$Mode', - 'org.apache.curator.framework.recipes.locks.Reaper', - 'org.apache.curator.framework.recipes.shared.SharedCount', - 'org.apache.curator.framework.recipes.shared.VersionedValue', - 'org.apache.curator.retry.ExponentialBackoffRetry', - 'org.apache.curator.retry.RetryNTimes', - 'org.apache.curator.utils.CloseableScheduledExecutorService', - 'org.apache.curator.utils.CloseableUtils', - 'org.apache.curator.utils.EnsurePath', - 'org.apache.curator.utils.PathUtils', - 'org.apache.curator.utils.ThreadUtils', - 'org.apache.curator.utils.ZKPaths', - 'org.apache.directory.shared.kerberos.components.EncryptionKey', - 'org.apache.directory.server.kerberos.shared.keytab.Keytab', - 'org.apache.directory.server.kerberos.shared.keytab.KeytabEntry', - 'org.apache.http.NameValuePair', - 'org.apache.http.client.utils.URIBuilder', - 'org.apache.http.client.utils.URLEncodedUtils', - 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', - 'org.apache.tools.ant.BuildException', - 'org.apache.tools.ant.DirectoryScanner', - 'org.apache.tools.ant.Task', - 'org.apache.tools.ant.taskdefs.Execute', - 'org.apache.tools.ant.types.FileSet', - 'org.apache.xml.serialize.OutputFormat', - 'org.apache.xml.serialize.XMLSerializer', - 'org.apache.zookeeper.AsyncCallback$StatCallback', - 'org.apache.zookeeper.AsyncCallback$StringCallback', - 'org.apache.zookeeper.CreateMode', - 'org.apache.zookeeper.KeeperException$Code', - 'org.apache.zookeeper.KeeperException', - 'org.apache.zookeeper.WatchedEvent', - 'org.apache.zookeeper.Watcher$Event$EventType', - 'org.apache.zookeeper.Watcher$Event$KeeperState', - 'org.apache.zookeeper.Watcher', - 'org.apache.zookeeper.ZKUtil', - 'org.apache.zookeeper.ZooDefs$Ids', - 'org.apache.zookeeper.ZooKeeper', - 'org.apache.zookeeper.data.ACL', - 'org.apache.zookeeper.data.Id', - 'org.apache.zookeeper.data.Stat', - 'org.codehaus.jackson.JsonEncoding', - 'org.codehaus.jackson.JsonFactory', - 'org.codehaus.jackson.JsonGenerator', - 'org.codehaus.jackson.JsonGenerator$Feature', - 'org.codehaus.jackson.map.MappingJsonFactory', - 'org.codehaus.jackson.map.ObjectMapper', - 'org.codehaus.jackson.map.ObjectReader', - 'org.codehaus.jackson.map.ObjectWriter', - 'org.codehaus.jackson.node.ContainerNode', - 'org.codehaus.jackson.util.MinimalPrettyPrinter', - 'org.fusesource.leveldbjni.JniDBFactory', - 'org.iq80.leveldb.DB', - 'org.iq80.leveldb.Options', - 'org.iq80.leveldb.WriteBatch', - 'org.mortbay.jetty.Connector', - 'org.mortbay.jetty.Handler', - 'org.mortbay.jetty.InclusiveByteRange', - 'org.mortbay.jetty.MimeTypes', - 'org.mortbay.jetty.NCSARequestLog', - 'org.mortbay.jetty.RequestLog', - 'org.mortbay.jetty.Server', - 'org.mortbay.jetty.handler.ContextHandler$SContext', - 'org.mortbay.jetty.handler.ContextHandler', - 'org.mortbay.jetty.handler.ContextHandlerCollection', - 'org.mortbay.jetty.handler.HandlerCollection', - 'org.mortbay.jetty.handler.RequestLogHandler', - 'org.mortbay.jetty.nio.SelectChannelConnector', - 'org.mortbay.jetty.security.SslSelectChannelConnector', - 'org.mortbay.jetty.security.SslSocketConnector', - 'org.mortbay.jetty.servlet.AbstractSessionManager', - 'org.mortbay.jetty.servlet.Context', - 'org.mortbay.jetty.servlet.DefaultServlet', - 'org.mortbay.jetty.servlet.FilterHolder', - 'org.mortbay.jetty.servlet.FilterMapping', - 'org.mortbay.jetty.servlet.ServletHandler', - 'org.mortbay.jetty.servlet.ServletHolder', - 'org.mortbay.jetty.servlet.SessionHandler', - 'org.mortbay.jetty.webapp.WebAppContext', - 'org.mortbay.thread.QueuedThreadPool', - 'org.mortbay.util.MultiException', - 'org.mortbay.util.ajax.JSON$Convertible', - 'org.mortbay.util.ajax.JSON$Output', - 'org.mortbay.util.ajax.JSON', - 'org.znerd.xmlenc.XMLOutputter', - - // internal java api: sun.net.dns.ResolverConfiguration - // internal java api: sun.net.util.IPAddressUtil - 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver', - - // internal java api: sun.misc.Unsafe - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer', - 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1', - 'org.apache.hadoop.io.nativeio.NativeIO', - 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm', - 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot', - - // internal java api: sun.nio.ch.DirectBuffer - // internal java api: sun.misc.Cleaner - 'org.apache.hadoop.io.nativeio.NativeIO$POSIX', - 'org.apache.hadoop.crypto.CryptoStreamUtils', - - // internal java api: sun.misc.SignalHandler - 'org.apache.hadoop.util.SignalLogger$Handler', - - // we are not pulling in slf4j-ext, this is okay, Log4j will fallback gracefully - 'org.slf4j.ext.EventData', - - 'org.apache.log4j.AsyncAppender', - 'org.apache.log4j.helpers.ISO8601DateFormat', - 'org.apache.log4j.spi.ThrowableInformation', - - // New optional dependencies in 2.8 - 'com.nimbusds.jose.JWSObject$State', - 'com.nimbusds.jose.crypto.RSASSAVerifier', - 'com.nimbusds.jwt.ReadOnlyJWTClaimsSet', - 'com.nimbusds.jwt.SignedJWT', - 'com.squareup.okhttp.Call', - 'com.squareup.okhttp.MediaType', - 'com.squareup.okhttp.OkHttpClient', - 'com.squareup.okhttp.Request$Builder', - 'com.squareup.okhttp.RequestBody', - 'com.squareup.okhttp.Response', - 'com.squareup.okhttp.ResponseBody' -] - -if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += ['javax.xml.bind.annotation.adapters.HexBinaryAdapter'] -} - -if (project.runtimeJavaVersion == JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += [ - // TODO: Why is this needed ? - 'com.sun.javadoc.AnnotationDesc', - 'com.sun.javadoc.AnnotationTypeDoc', - 'com.sun.javadoc.ClassDoc', - 'com.sun.javadoc.ConstructorDoc', - 'com.sun.javadoc.Doc', - 'com.sun.javadoc.DocErrorReporter', - 'com.sun.javadoc.FieldDoc', - 'com.sun.javadoc.LanguageVersion', - 'com.sun.javadoc.MethodDoc', - 'com.sun.javadoc.PackageDoc', - 'com.sun.javadoc.ProgramElementDoc', - 'com.sun.javadoc.RootDoc', - 'com.sun.tools.doclets.standard.Standard' - ] +thirdPartyAudit { + ignoreMissingClasses() + ignoreViolations ( + // internal java api: sun.net.dns.ResolverConfiguration + // internal java api: sun.net.util.IPAddressUtil + 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver', + + // internal java api: sun.misc.Unsafe + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer', + 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1', + 'org.apache.hadoop.io.nativeio.NativeIO', + 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm', + 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot', + + // internal java api: sun.nio.ch.DirectBuffer + // internal java api: sun.misc.Cleaner + 'org.apache.hadoop.io.nativeio.NativeIO$POSIX', + 'org.apache.hadoop.crypto.CryptoStreamUtils', + + // internal java api: sun.misc.SignalHandler + 'org.apache.hadoop.util.SignalLogger$Handler', + ) } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 580d033354e58..e9b45a9b52e70 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -21,11 +21,13 @@ import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Options.CreateOpts; import org.apache.hadoop.fs.Path; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.fs.FsBlobContainer; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.repositories.hdfs.HdfsBlobStore.Operation; @@ -95,19 +97,13 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b store.execute((Operation) fileContext -> { Path blob = new Path(path, blobName); // we pass CREATE, which means it fails if a blob already exists. - EnumSet flags = failIfAlreadyExists ? EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK) : - EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE, CreateFlag.SYNC_BLOCK); - CreateOpts[] opts = {CreateOpts.bufferSize(bufferSize)}; - try (FSDataOutputStream stream = fileContext.create(blob, flags, opts)) { + EnumSet flags = failIfAlreadyExists ? EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK) + : EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE, CreateFlag.SYNC_BLOCK); + try (FSDataOutputStream stream = fileContext.create(blob, flags, CreateOpts.bufferSize(bufferSize))) { int bytesRead; byte[] buffer = new byte[bufferSize]; while ((bytesRead = inputStream.read(buffer)) != -1) { stream.write(buffer, 0, bytesRead); - // For safety we also hsync each write as well, because of its docs: - // SYNC_BLOCK - to force closed blocks to the disk device - // "In addition Syncable.hsync() should be called after each write, - // if true synchronous behavior is required" - stream.hsync(); } } catch (org.apache.hadoop.fs.FileAlreadyExistsException faee) { throw new FileAlreadyExistsException(blob.toString(), null, faee.getMessage()); @@ -116,6 +112,29 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b }); } + @Override + public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + final String tempBlob = FsBlobContainer.tempBlobName(blobName); + store.execute((Operation) fileContext -> { + final Path tempBlobPath = new Path(path, tempBlob); + try (FSDataOutputStream stream = fileContext.create( + tempBlobPath, EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK), CreateOpts.bufferSize(bufferSize))) { + int bytesRead; + byte[] buffer = new byte[bufferSize]; + while ((bytesRead = inputStream.read(buffer)) != -1) { + stream.write(buffer, 0, bytesRead); + } + } + final Path blob = new Path(path, blobName); + try { + fileContext.rename(tempBlobPath, blob, failIfAlreadyExists ? Options.Rename.NONE : Options.Rename.OVERWRITE); + } catch (org.apache.hadoop.fs.FileAlreadyExistsException faee) { + throw new FileAlreadyExistsException(blob.toString(), null, faee.getMessage()); + } + return null; + }); + } + @Override public Map listBlobsByPrefix(@Nullable final String prefix) throws IOException { FileStatus[] files = store.execute(fileContext -> (fileContext.util().listStatus(path, diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index a584ec2767b41..58dddb142c0dd 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -436,7 +436,7 @@ if (useFixture) { project.check.dependsOn(integTestECS) } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // classes are missing 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', @@ -451,12 +451,12 @@ thirdPartyAudit.excludes = [ 'software.amazon.ion.system.IonBinaryWriterBuilder', 'software.amazon.ion.system.IonSystemBuilder', 'software.amazon.ion.system.IonTextWriterBuilder', - 'software.amazon.ion.system.IonWriterBuilder', -] + 'software.amazon.ion.system.IonWriterBuilder' +) // jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9) if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += [ + thirdPartyAudit.ignoreJarHellWithJDK ( 'javax.xml.bind.Binder', 'javax.xml.bind.ContextFinder$1', 'javax.xml.bind.ContextFinder', @@ -558,9 +558,9 @@ if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { 'javax.xml.bind.util.JAXBSource', 'javax.xml.bind.util.Messages', 'javax.xml.bind.util.ValidationEventCollector' - ] + ) } else { - thirdPartyAudit.excludes += ['javax.activation.DataHandler'] + thirdPartyAudit.ignoreMissingClasses 'javax.activation.DataHandler' } // AWS SDK is exposing some deprecated methods which we call using a delegate: diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index b7cc2b89605d3..fc3f80b5b32a2 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -105,6 +105,11 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b }); } + @Override + public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + } + @Override public void deleteBlob(String blobName) throws IOException { if (blobExists(blobName) == false) { diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index c671eaac3f037..1bb0ca841bf6b 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -42,9 +42,8 @@ dependencyLicenses { mapping from: /netty-.*/, to: 'netty' } -thirdPartyAudit.excludes = [ - // classes are missing - +thirdPartyAudit { + ignoreMissingClasses ( // from io.netty.handler.codec.protobuf.ProtobufDecoder (netty) 'com.google.protobuf.ExtensionRegistry', 'com.google.protobuf.MessageLite$Builder', @@ -55,12 +54,6 @@ thirdPartyAudit.excludes = [ 'org.apache.commons.logging.Log', 'org.apache.commons.logging.LogFactory', - // from io.netty.handler.ssl.OpenSslEngine (netty) - 'io.netty.internal.tcnative.Buffer', - 'io.netty.internal.tcnative.Library', - 'io.netty.internal.tcnative.SSL', - 'io.netty.internal.tcnative.SSLContext', - // from io.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) 'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', @@ -114,15 +107,28 @@ thirdPartyAudit.excludes = [ 'net.jpountz.lz4.LZ4FastDecompressor', 'net.jpountz.xxhash.StreamingXXHash32', 'net.jpountz.xxhash.XXHashFactory', + 'org.eclipse.jetty.alpn.ALPN$ClientProvider', + 'org.eclipse.jetty.alpn.ALPN$ServerProvider', + 'org.eclipse.jetty.alpn.ALPN', + + 'org.conscrypt.AllocatedBuffer', + 'org.conscrypt.BufferAllocator', + 'org.conscrypt.Conscrypt', + 'org.conscrypt.HandshakeListener', + + // from io.netty.handler.ssl.OpenSslEngine (netty) + 'io.netty.internal.tcnative.Buffer', + 'io.netty.internal.tcnative.Library', + 'io.netty.internal.tcnative.SSL', + 'io.netty.internal.tcnative.SSLContext', 'io.netty.internal.tcnative.CertificateCallback', 'io.netty.internal.tcnative.CertificateVerifier', 'io.netty.internal.tcnative.SessionTicketKey', 'io.netty.internal.tcnative.SniHostNameMatcher', - 'org.eclipse.jetty.alpn.ALPN$ClientProvider', - 'org.eclipse.jetty.alpn.ALPN$ServerProvider', - 'org.eclipse.jetty.alpn.ALPN', + ) + + ignoreViolations ( - 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', 'io.netty.util.internal.PlatformDependent0', 'io.netty.util.internal.PlatformDependent0$1', 'io.netty.util.internal.PlatformDependent0$2', @@ -140,15 +146,13 @@ thirdPartyAudit.excludes = [ 'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess', 'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess', - 'org.conscrypt.AllocatedBuffer', - 'org.conscrypt.BufferAllocator', - 'org.conscrypt.Conscrypt', - 'org.conscrypt.HandshakeListener' -] + 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator' + ) +} if (project.inFipsJvm == false) { // BouncyCastleFIPS provides this class, so the exclusion is invalid when running CI in // a FIPS JVM with BouncyCastleFIPS Provider - thirdPartyAudit.excludes += [ + thirdPartyAudit.ignoreMissingClasses ( 'org.bouncycastle.asn1.x500.X500Name' - ] -} \ No newline at end of file + ) +} diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 738b95271be3e..62614ca36cda6 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -35,14 +35,17 @@ unitTest { systemProperty 'tests.security.manager', 'false' } -thirdPartyAudit.excludes = [ - // uses internal java api: sun.misc.Unsafe - 'com.google.common.cache.Striped64', - 'com.google.common.cache.Striped64$1', - 'com.google.common.cache.Striped64$Cell', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', +thirdPartyAudit { + ignoreMissingClasses ( + 'com.ibm.icu.lang.UCharacter' + ) - // missing class - 'com.ibm.icu.lang.UCharacter', -] + ignoreViolations ( + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1' + ) +} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index 8438c002c2a4e..e32447c47b092 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -126,7 +126,7 @@ public void testDeprecationLogger() throws IOException, UserException { assertLogLine( deprecationEvents.get(i), Level.WARN, - "org.elasticsearch.common.logging.DeprecationLogger.deprecated", + "org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run", "This is a deprecation message"); } } @@ -200,7 +200,7 @@ public void testConcurrentDeprecationLogger() throws IOException, UserException, assertLogLine( deprecationEvents.get(i), Level.WARN, - "org.elasticsearch.common.logging.DeprecationLogger.deprecated", + "org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run", "This is a maybe logged deprecation message" + i); } @@ -242,13 +242,13 @@ public void testDeprecationLoggerMaybeLog() throws IOException, UserException { assertLogLine( deprecationEvents.get(0), Level.WARN, - "org.elasticsearch.common.logging.DeprecationLogger.deprecated", + "org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run", "This is a maybe logged deprecation message"); for (int k = 0; k < 128; k++) { assertLogLine( deprecationEvents.get(1 + k), Level.WARN, - "org.elasticsearch.common.logging.DeprecationLogger.deprecated", + "org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run", "This is a maybe logged deprecation message" + k); } } @@ -276,7 +276,7 @@ public void testDeprecatedSettings() throws IOException, UserException { assertLogLine( deprecationEvents.get(0), Level.WARN, - "org.elasticsearch.common.logging.DeprecationLogger.deprecated", + "org.elasticsearch.common.logging.DeprecationLogger\\$2\\.run", "\\[deprecated.foo\\] setting was deprecated in Elasticsearch and will be removed in a future release! " + "See the breaking changes documentation for the next major version."); } diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 4a5f59b6a7463..39bec5ac0b3df 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -19,7 +19,6 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionCollection import org.elasticsearch.gradle.test.RestIntegTestTask apply plugin: 'elasticsearch.standalone-test' diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 7519cade0f29b..e8bd2d95697a8 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.rest.action.document.RestGetAction; import org.elasticsearch.rest.action.document.RestUpdateAction; import org.elasticsearch.rest.action.search.RestExplainAction; @@ -495,6 +496,7 @@ public void testRollover() throws IOException { Request bulkRequest = new Request("POST", "/" + index + "_write/doc/_bulk"); bulkRequest.setJsonEntity(bulk.toString()); bulkRequest.addParameter("refresh", ""); + bulkRequest.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); assertThat(EntityUtils.toString(client().performRequest(bulkRequest).getEntity()), containsString("\"errors\":false")); if (isRunningAgainstOldCluster()) { @@ -1070,6 +1072,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/doc/_bulk"); writeToRestoredRequest.addParameter("refresh", "true"); writeToRestoredRequest.setJsonEntity(bulk.toString()); + writeToRestoredRequest.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); assertThat(EntityUtils.toString(client().performRequest(writeToRestoredRequest).getEntity()), containsString("\"errors\":false")); // And count to make sure the add worked diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java index 1d8872da4f57c..c80218c50ebe9 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -30,7 +31,7 @@ import java.nio.charset.StandardCharsets; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HIT_AS_INT_PARAM; +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.equalTo; /** @@ -152,13 +153,14 @@ private void bulk(String index, String valueSuffix, int count) throws IOExceptio } Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); + bulk.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); bulk.setJsonEntity(b.toString()); client().performRequest(bulk); } private void assertCount(String index, int count) throws IOException { Request searchTestIndexRequest = new Request("POST", "/" + index + "/_search"); - searchTestIndexRequest.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + searchTestIndexRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); searchTestIndexRequest.addParameter("filter_path", "hits.total"); Response searchTestIndexResponse = client().performRequest(searchTestIndexRequest); assertEquals("{\"hits\":{\"total\":" + count + "}}", diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/XPackIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/XPackIT.java index cecdb0a6a2d2f..1704deada1762 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/XPackIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/XPackIT.java @@ -21,6 +21,7 @@ import org.apache.http.util.EntityUtils; import org.junit.Before; import org.elasticsearch.client.Request; +import org.elasticsearch.rest.action.document.RestBulkAction; import java.io.IOException; @@ -59,6 +60,7 @@ public void testBasicFeature() throws IOException { + "{\"index\":{}}\n" + "{\"f\": \"2\"}\n"); bulk.addParameter("refresh", "true"); + bulk.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); client().performRequest(bulk); Request sql = new Request("POST", "/_sql"); diff --git a/qa/smoke-test-client/build.gradle b/qa/smoke-test-client/build.gradle index a575a131d8759..637913083b48c 100644 --- a/qa/smoke-test-client/build.gradle +++ b/qa/smoke-test-client/build.gradle @@ -40,3 +40,13 @@ singleNodeIntegTestCluster { integTestCluster.dependsOn(singleNodeIntegTestRunner, 'singleNodeIntegTestCluster#stop') check.dependsOn(integTest) + + +testingConventions { + naming.clear() + naming { + IT { + baseClass 'org.elasticsearch.smoketest.ESSmokeClientTestCase' + } + } +} \ No newline at end of file diff --git a/qa/smoke-test-ingest-with-all-dependencies/build.gradle b/qa/smoke-test-ingest-with-all-dependencies/build.gradle index b2295f535ba38..9267f90cd7e0a 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/build.gradle +++ b/qa/smoke-test-ingest-with-all-dependencies/build.gradle @@ -27,3 +27,11 @@ dependencies { testCompile project(path: ':modules:lang-painless', configuration: 'runtime') testCompile project(path: ':modules:reindex', configuration: 'runtime') } + +testingConventions { + naming { + IT { + baseClass 'org.elasticsearch.ingest.AbstractScriptTestCase' + } + } +} diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 1f83f431dc3eb..d1ded46d2b684 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -77,7 +77,7 @@ tasks.unitTest.enabled = false tasks.dependencyLicenses.enabled = false tasks.dependenciesInfo.enabled = false -tasks.thirdPartyAudit.excludes = [ +tasks.thirdPartyAudit.ignoreMissingClasses ( // commons-logging optional dependencies 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', @@ -89,4 +89,4 @@ tasks.thirdPartyAudit.excludes = [ // commons-logging provided dependencies 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' -] +) diff --git a/qa/verify-version-constants/build.gradle b/qa/verify-version-constants/build.gradle index 3160117f7a212..a4cfa26875546 100644 --- a/qa/verify-version-constants/build.gradle +++ b/qa/verify-version-constants/build.gradle @@ -17,7 +17,6 @@ * under the License. */ -import java.util.Locale import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.test.RestIntegTestTask diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle index 0ed4f8c067911..fce27e6ab8a36 100644 --- a/qa/wildfly/build.gradle +++ b/qa/wildfly/build.gradle @@ -217,3 +217,14 @@ dependencyLicenses.enabled = false dependenciesInfo.enabled = false thirdPartyAudit.enabled = false + + +testingConventions { + naming.clear() + // We only have one "special" integration test here to connect to wildfly + naming { + IT { + baseClass 'org.apache.lucene.util.LuceneTestCase' + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json index 2ff9d3f68d9d9..d4a16e576e1b9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json @@ -14,7 +14,7 @@ }, "params": { "include_type_name": { - "type" : "string", + "type" : "boolean", "description" : "Whether a type should be expected in the body of the mappings." }, "wait_for_active_shards": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index 6474b8acf5298..76e6ed00feb26 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -13,6 +13,10 @@ } }, "params":{ + "include_type_name": { + "type" : "boolean", + "description" : "Whether to add the type name to the response (default: false)" + }, "local":{ "type":"boolean", "description":"Return local information, do not retrieve the state from master node (default: false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json index 3d5a629eff08e..3ce610153b5e6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json @@ -21,6 +21,10 @@ } }, "params": { + "include_type_name": { + "type" : "boolean", + "description" : "Whether a type should be returned in the body of the mappings." + }, "include_defaults": { "type" : "boolean", "description" : "Whether the default mapping values should be returned as well" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json index 9bfb9c76abf82..ccec2ddffdd0c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json @@ -17,7 +17,7 @@ }, "params": { "include_type_name": { - "type" : "string", + "type" : "boolean", "description" : "Whether to add the type name to the response" }, "ignore_unavailable": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json index e3a97ee5c012a..e2aae3b7444aa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json @@ -16,6 +16,10 @@ } }, "params": { + "include_type_name": { + "type" : "boolean", + "description" : "Whether a type should be returned in the body of the mappings." + }, "flat_settings": { "type": "boolean", "description": "Return settings in flat format (default: false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json index 4efb615329639..cc55ffccdd1ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json @@ -17,7 +17,7 @@ }, "params": { "include_type_name": { - "type" : "string", + "type" : "boolean", "description" : "Whether a type should be expected in the body of the mappings." }, "timeout": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json index 5bcb2f8a24346..65aa9506ff9f1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json @@ -13,6 +13,10 @@ } }, "params": { + "include_type_name": { + "type" : "boolean", + "description" : "Whether a type should be returned in the body of the mappings." + }, "order": { "type" : "number", "description" : "The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower numbers)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/10_basic.yml index 6301087f48902..ef028dcdf67e8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/10_basic.yml @@ -52,6 +52,35 @@ setup: - is_true: test_index.settings - is_true: test_index.mappings +--- +"Test include_type_name": + - skip: + version: " - 6.99.99" + reason: the include_type_name parameter is not backported to pre 7.0 versions yet + + - do: + indices.get: + include_type_name: true + index: test_index + + - is_true: test_index.mappings + - is_true: test_index.mappings.type_1 + + - do: + indices.get: + include_type_name: false + index: test_index + + - is_true: test_index.mappings + - is_false: test_index.mappings.type_1 + + - do: + indices.get: + index: test_index + + - is_true: test_index.mappings + - is_false: test_index.mappings.type_1 + --- "Get index infos should work for wildcards": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml index 6cf0a0b7cf26c..b77c56d34160c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml @@ -1,54 +1,48 @@ --- setup: + - skip: + version: " - 6.99.99" + reason: include_type_name is not supported before 7.0.0 - do: indices.create: + include_type_name: false index: test_index body: mappings: - test_type: - properties: - text: - type: text + properties: + text: + type: text --- -"Get field mapping with no index and type": +"Get field mapping with no index": - do: indices.get_field_mapping: + include_type_name: false fields: text - - match: {test_index.mappings.test_type.text.mapping.text.type: text} + - match: {test_index.mappings.text.mapping.text.type: text} --- "Get field mapping by index only": - do: indices.get_field_mapping: + include_type_name: false index: test_index fields: text - - match: {test_index.mappings.test_type.text.mapping.text.type: text} + - match: {test_index.mappings.text.mapping.text.type: text} --- -"Get field mapping by type & field": +"Get field mapping by field, with another field that doesn't exist": - do: indices.get_field_mapping: + include_type_name: false index: test_index - type: test_type - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - ---- -"Get field mapping by type & field, with another field that doesn't exist": - - - do: - indices.get_field_mapping: - index: test_index - type: test_type fields: [ text , text1 ] - - match: {test_index.mappings.test_type.text.mapping.text.type: text} + - match: {test_index.mappings.text.mapping.text.type: text} - is_false: test_index.mappings.test_type.text1 --- @@ -56,21 +50,10 @@ setup: - do: indices.get_field_mapping: + include_type_name: false index: test_index - type: test_type fields: text include_defaults: true - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - - match: {test_index.mappings.test_type.text.mapping.text.analyzer: default} - ---- -"Get field mapping should work without index specifying type and fields": - - - do: - indices.get_field_mapping: - type: test_type - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - + - match: {test_index.mappings.text.mapping.text.type: text} + - match: {test_index.mappings.text.mapping.text.analyzer: default} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml new file mode 100644 index 0000000000000..6cf0a0b7cf26c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml @@ -0,0 +1,76 @@ +--- +setup: + - do: + indices.create: + index: test_index + body: + mappings: + test_type: + properties: + text: + type: text + +--- +"Get field mapping with no index and type": + + - do: + indices.get_field_mapping: + fields: text + + - match: {test_index.mappings.test_type.text.mapping.text.type: text} + +--- +"Get field mapping by index only": + - do: + indices.get_field_mapping: + index: test_index + fields: text + + - match: {test_index.mappings.test_type.text.mapping.text.type: text} + +--- +"Get field mapping by type & field": + + - do: + indices.get_field_mapping: + index: test_index + type: test_type + fields: text + + - match: {test_index.mappings.test_type.text.mapping.text.type: text} + +--- +"Get field mapping by type & field, with another field that doesn't exist": + + - do: + indices.get_field_mapping: + index: test_index + type: test_type + fields: [ text , text1 ] + + - match: {test_index.mappings.test_type.text.mapping.text.type: text} + - is_false: test_index.mappings.test_type.text1 + +--- +"Get field mapping with include_defaults": + + - do: + indices.get_field_mapping: + index: test_index + type: test_type + fields: text + include_defaults: true + + - match: {test_index.mappings.test_type.text.mapping.text.type: text} + - match: {test_index.mappings.test_type.text.mapping.text.analyzer: default} + +--- +"Get field mapping should work without index specifying type and fields": + + - do: + indices.get_field_mapping: + type: test_type + fields: text + + - match: {test_index.mappings.test_type.text.mapping.text.type: text} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml index 9b8c3efbce81a..61f1f409d2939 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml @@ -1,21 +1,22 @@ --- "Return empty object if field doesn't exist, but type and index do": - + - skip: + version: " - 6.99.99" + reason: types are required in requests before 7.0.0 - do: indices.create: + include_type_name: false index: test_index body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace + mappings: + properties: + text: + type: text + analyzer: whitespace - do: indices.get_field_mapping: index: test_index - type: test_type fields: not_existent - - - match: { '': {}} + + - match: { 'test_index.mappings': {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml new file mode 100644 index 0000000000000..c760561f09282 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml @@ -0,0 +1,21 @@ +--- +"Return empty object if field doesn't exist, but type and index do": + + - do: + indices.create: + index: test_index + body: + mappings: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_field_mapping: + index: test_index + type: test_type + fields: not_existent + + - match: { '': {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/40_missing_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/40_missing_index.yml index 7da516e116c3d..7c7b07b587849 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/40_missing_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/40_missing_index.yml @@ -5,7 +5,6 @@ catch: missing indices.get_field_mapping: index: test_index - type: type fields: field diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml index 9d62ab6101fc2..3ffecdcc72618 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml @@ -1,135 +1,142 @@ --- setup: + - skip: + version: " - 6.99.99" + reason: types are required in requests before 7.0.0 - do: indices.create: + include_type_name: false index: test_index body: mappings: - test_type: - properties: - t1: - type: text - t2: - type: text - obj: - properties: - t1: - type: text - i_t1: - type: text - i_t3: - type: text + properties: + t1: + type: text + t2: + type: text + obj: + properties: + t1: + type: text + i_t1: + type: text + i_t3: + type: text - do: indices.create: + include_type_name: false index: test_index_2 body: mappings: - test_type_2: - properties: - t1: - type: text - t2: - type: text - obj: - properties: - t1: - type: text - i_t1: - type: text - i_t3: - type: text + properties: + t1: + type: text + t2: + type: text + obj: + properties: + t1: + type: text + i_t1: + type: text + i_t3: + type: text --- "Get field mapping with * for fields": - do: indices.get_field_mapping: + include_type_name: false fields: "*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 } - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 } + - match: {test_index.mappings.t1.full_name: t1 } + - match: {test_index.mappings.t2.full_name: t2 } + - match: {test_index.mappings.obj\.t1.full_name: obj.t1 } + - match: {test_index.mappings.obj\.i_t1.full_name: obj.i_t1 } + - match: {test_index.mappings.obj\.i_t3.full_name: obj.i_t3 } --- "Get field mapping with t* for fields": - do: indices.get_field_mapping: + include_type_name: false index: test_index fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} + - match: {test_index.mappings.t1.full_name: t1 } + - match: {test_index.mappings.t2.full_name: t2 } + - length: {test_index.mappings: 2} --- "Get field mapping with *t1 for fields": - do: indices.get_field_mapping: + include_type_name: false index: test_index fields: "*t1" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 } - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - length: {test_index.mappings.test_type: 3} + - match: {test_index.mappings.t1.full_name: t1 } + - match: {test_index.mappings.obj\.t1.full_name: obj.t1 } + - match: {test_index.mappings.obj\.i_t1.full_name: obj.i_t1 } + - length: {test_index.mappings: 3} --- "Get field mapping with wildcarded relative names": - do: indices.get_field_mapping: + include_type_name: false index: test_index fields: "obj.i_*" - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 } - - length: {test_index.mappings.test_type: 2} + - match: {test_index.mappings.obj\.i_t1.full_name: obj.i_t1 } + - match: {test_index.mappings.obj\.i_t3.full_name: obj.i_t3 } + - length: {test_index.mappings: 2} --- -"Get field mapping should work using '_all' for indices and types": +"Get field mapping should work using '_all' for index": - do: indices.get_field_mapping: + include_type_name: false index: _all - type: _all fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} + - match: {test_index.mappings.t1.full_name: t1 } + - match: {test_index.mappings.t2.full_name: t2 } + - length: {test_index.mappings: 2} + - match: {test_index_2.mappings.t1.full_name: t1 } + - match: {test_index_2.mappings.t2.full_name: t2 } + - length: {test_index_2.mappings: 2} --- -"Get field mapping should work using '*' for indices and types": +"Get field mapping should work using '*' for index": - do: indices.get_field_mapping: + include_type_name: false index: '*' - type: '*' fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} + - match: {test_index.mappings.t1.full_name: t1 } + - match: {test_index.mappings.t2.full_name: t2 } + - length: {test_index.mappings: 2} + - match: {test_index_2.mappings.t1.full_name: t1 } + - match: {test_index_2.mappings.t2.full_name: t2 } + - length: {test_index_2.mappings: 2} --- -"Get field mapping should work using comma_separated values for indices and types": +"Get field mapping should work using comma_separated values for indices": - do: indices.get_field_mapping: + include_type_name: false index: 'test_index,test_index_2' - type: 'test_type,test_type_2' fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} + - match: {test_index.mappings.t1.full_name: t1 } + - match: {test_index.mappings.t2.full_name: t2 } + - length: {test_index.mappings: 2} + - match: {test_index_2.mappings.t1.full_name: t1 } + - match: {test_index_2.mappings.t2.full_name: t2 } + - length: {test_index_2.mappings: 2} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml new file mode 100644 index 0000000000000..9d62ab6101fc2 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml @@ -0,0 +1,135 @@ +--- +setup: + - do: + indices.create: + index: test_index + body: + mappings: + test_type: + properties: + t1: + type: text + t2: + type: text + obj: + properties: + t1: + type: text + i_t1: + type: text + i_t3: + type: text + + - do: + indices.create: + index: test_index_2 + body: + mappings: + test_type_2: + properties: + t1: + type: text + t2: + type: text + obj: + properties: + t1: + type: text + i_t1: + type: text + i_t3: + type: text + +--- +"Get field mapping with * for fields": + + - do: + indices.get_field_mapping: + fields: "*" + + - match: {test_index.mappings.test_type.t1.full_name: t1 } + - match: {test_index.mappings.test_type.t2.full_name: t2 } + - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 } + - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } + - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 } + +--- +"Get field mapping with t* for fields": + + - do: + indices.get_field_mapping: + index: test_index + fields: "t*" + + - match: {test_index.mappings.test_type.t1.full_name: t1 } + - match: {test_index.mappings.test_type.t2.full_name: t2 } + - length: {test_index.mappings.test_type: 2} + +--- +"Get field mapping with *t1 for fields": + + - do: + indices.get_field_mapping: + index: test_index + fields: "*t1" + - match: {test_index.mappings.test_type.t1.full_name: t1 } + - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 } + - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } + - length: {test_index.mappings.test_type: 3} + +--- +"Get field mapping with wildcarded relative names": + + - do: + indices.get_field_mapping: + index: test_index + fields: "obj.i_*" + - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } + - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 } + - length: {test_index.mappings.test_type: 2} + +--- +"Get field mapping should work using '_all' for indices and types": + + - do: + indices.get_field_mapping: + index: _all + type: _all + fields: "t*" + - match: {test_index.mappings.test_type.t1.full_name: t1 } + - match: {test_index.mappings.test_type.t2.full_name: t2 } + - length: {test_index.mappings.test_type: 2} + - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } + - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } + - length: {test_index_2.mappings.test_type_2: 2} + +--- +"Get field mapping should work using '*' for indices and types": + + - do: + indices.get_field_mapping: + index: '*' + type: '*' + fields: "t*" + - match: {test_index.mappings.test_type.t1.full_name: t1 } + - match: {test_index.mappings.test_type.t2.full_name: t2 } + - length: {test_index.mappings.test_type: 2} + - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } + - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } + - length: {test_index_2.mappings.test_type_2: 2} + +--- +"Get field mapping should work using comma_separated values for indices and types": + + - do: + indices.get_field_mapping: + index: 'test_index,test_index_2' + type: 'test_type,test_type_2' + fields: "t*" + - match: {test_index.mappings.test_type.t1.full_name: t1 } + - match: {test_index.mappings.test_type.t2.full_name: t2 } + - length: {test_index.mappings.test_type: 2} + - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } + - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } + - length: {test_index_2.mappings.test_type_2: 2} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml new file mode 100644 index 0000000000000..d7ea620bb6c58 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml @@ -0,0 +1,24 @@ +--- +"GET mapping with typeless API on an index that has types": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + + - do: + indices.create: # not using include_type_name: false on purpose + index: index + body: + mappings: + not_doc: + properties: + foo: + type: "keyword" + + - do: + indices.get_field_mapping: + include_type_name: false + index: index + fields: foo + + - match: { index.mappings.foo.mapping.foo.type: "keyword" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml index a03a10c1a5a89..8fe244e0c6323 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/10_basic.yml @@ -1,4 +1,7 @@ setup: + - skip: + version: " - 6.99.99" + reason: include_type_name is not supported before 7.0.0 - do: indices.put_template: name: test @@ -7,16 +10,44 @@ setup: settings: number_of_shards: 1 number_of_replicas: 0 + mappings: + _doc: + properties: + field: + type: keyword --- "Get template": - do: indices.get_template: + include_type_name: false name: test - match: {test.index_patterns: ["test-*"]} - match: {test.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} + - match: {test.mappings: {properties: {field: {type: keyword}}}} + +--- +"Get template with no mappings": + + - do: + indices.put_template: + name: test_no_mappings + body: + index_patterns: test-* + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + indices.get_template: + include_type_name: false + name: test_no_mappings + + - match: {test_no_mappings.index_patterns: ["test-*"]} + - match: {test_no_mappings.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} + - match: {test_no_mappings.mappings: {}} --- "Get all templates": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml new file mode 100644 index 0000000000000..c15f5dc6de4f1 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml @@ -0,0 +1,45 @@ +setup: + - do: + indices.put_template: + name: test + body: + index_patterns: test-* + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + _doc: + properties: + field: + type: keyword + +--- +"Get template": + + - do: + indices.get_template: + name: test + + - match: {test.index_patterns: ["test-*"]} + - match: {test.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} + - match: {test.mappings: {_doc: {properties: {field: {type: keyword}}}}} + +--- +"Get template with no mappings": + + - do: + indices.put_template: + name: test_no_mappings + body: + index_patterns: test-* + settings: + number_of_shards: 1 + number_of_replicas: 0 + + - do: + indices.get_template: + name: test_no_mappings + + - match: {test_no_mappings.index_patterns: ["test-*"]} + - match: {test_no_mappings.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} + - match: {test_no_mappings.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml index b4e66c23c605b..8637b3e6d1864 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml @@ -1,42 +1,87 @@ --- "Put template": + - skip: + version: " - 6.99.99" + reason: include_type_name is not supported before 7.0.0 - do: indices.put_template: + include_type_name: false name: test body: index_patterns: test-* settings: number_of_shards: 1 number_of_replicas: 0 + mappings: + properties: + field: + type: keyword - do: indices.get_template: + include_type_name: false name: test flat_settings: true - match: {test.index_patterns: ["test-*"]} - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} + - match: {test.mappings: {properties: {field: {type: keyword}}}} --- "Put multiple template": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 - do: indices.put_template: + include_type_name: false name: test body: index_patterns: [test-*, test2-*] settings: number_of_shards: 1 number_of_replicas: 0 + mappings: + properties: + field: + type: text - do: indices.get_template: + include_type_name: false name: test flat_settings: true - match: {test.index_patterns: ["test-*", "test2-*"]} - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} + - match: {test.mappings: {properties: {field: {type: text}}}} + +--- +"Put template with empty mappings": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + + - do: + indices.put_template: + include_type_name: false + name: test + body: + index_patterns: test-* + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: {} + + - do: + indices.get_template: + include_type_name: false + name: test + flat_settings: true + + - match: {test.mappings: {}} --- "Put template with aliases": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml new file mode 100644 index 0000000000000..1e14a9d3895a7 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml @@ -0,0 +1,68 @@ +--- +"Put template": + - do: + indices.put_template: + name: test + body: + index_patterns: test-* + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + _doc: + properties: + field: + type: keyword + + - do: + indices.get_template: + name: test + flat_settings: true + + - match: {test.index_patterns: ["test-*"]} + - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} + - match: {test.mappings: {_doc: {properties: {field: {type: keyword}}}}} + +--- +"Put multiple template": + - do: + indices.put_template: + name: test + body: + index_patterns: [test-*, test2-*] + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + _doc: + properties: + field: + type: text + + - do: + indices.get_template: + name: test + flat_settings: true + + - match: {test.index_patterns: ["test-*", "test2-*"]} + - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} + - match: {test.mappings: {_doc: {properties: {field: {type: text}}}}} + +--- +"Put template with empty mappings": + - do: + indices.put_template: + name: test + body: + index_patterns: test-* + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: {} + + - do: + indices.get_template: + name: test + flat_settings: true + + - match: {test.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml index 0d178a389be44..f9fe244529f28 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml @@ -115,11 +115,45 @@ setup: - query: match: {foo: foo} - - match: { responses.0.hits.total.value: 2 } + - match: { responses.0.hits.total.value: 2 } - match: { responses.0.hits.total.relation: eq } - - match: { responses.1.hits.total.value: 1 } + - match: { responses.1.hits.total.value: 1 } - match: { responses.1.hits.total.relation: eq } - - match: { responses.2.hits.total.value: 1 } + - match: { responses.2.hits.total.value: 1 } - match: { responses.2.hits.total.relation: eq } + - do: + msearch: + body: + - index: index_* + - { query: { match: {foo: foo}}, track_total_hits: 1 } + - index: index_2 + - query: + match_all: {} + - index: index_1 + - query: + match: {foo: foo} + + - match: { responses.0.hits.total.value: 1 } + - match: { responses.0.hits.total.relation: gte } + - match: { responses.1.hits.total.value: 1 } + - match: { responses.1.hits.total.relation: eq } + - match: { responses.2.hits.total.value: 1 } + - match: { responses.2.hits.total.relation: eq } + + - do: + catch: /\[rest_total_hits_as_int\] cannot be used if the tracking of total hits is not accurate, got 10/ + msearch: + rest_total_hits_as_int: true + body: + - index: index_* + - { query: { match_all: {}}, track_total_hits: 10} + - index: index_2 + - query: + match_all: {} + - index: index_1 + - query: + match: {foo: foo} + + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index 26c530551a9e1..325bdf8f18e22 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -335,10 +335,11 @@ setup: --- "Composite aggregation and array size": - skip: - version: " - 6.3.99" - reason: starting in 6.4 the composite sources do not allocate arrays eagerly. + version: " - 6.99.99" + reason: starting in 7.0 the composite aggregation throws an execption if the provided size is greater than search.max_buckets. - do: + catch: /.*Trying to create too many buckets.*/ search: rest_total_hits_as_int: true index: test @@ -356,8 +357,3 @@ setup: } } ] - - - match: {hits.total: 6} - - length: { aggregations.test.buckets: 2 } - - length: { aggregations.test.after_key: 1 } - - match: { aggregations.test.after_key.keyword: "foo" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml index 515dcfe463069..3966a6a182a62 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/170_terms_query.yml @@ -48,7 +48,7 @@ search: rest_total_hits_as_int: true index: test_index - body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u1", "path" : "followers"}}}} + body: {"query" : {"terms" : {"user" : {"index" : "test_index", "id" : "u1", "path" : "followers"}}}} - match: { hits.total: 2 } - do: @@ -56,4 +56,4 @@ search: rest_total_hits_as_int: true index: test_index - body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u2", "path" : "followers"}}}} + body: {"query" : {"terms" : {"user" : {"index" : "test_index", "id" : "u2", "path" : "followers"}}}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml new file mode 100644 index 0000000000000..515dcfe463069 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml @@ -0,0 +1,59 @@ +--- +"Terms Query with No.of terms exceeding index.max_terms_count should FAIL": + - skip: + version: " - 6.99.99" + reason: index.max_terms_count setting has been added in 7.0.0 + - do: + indices.create: + index: test_index + body: + settings: + number_of_shards: 1 + index.max_terms_count: 2 + mappings: + test_type: + properties: + user: + type: keyword + followers: + type: keyword + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u1"}}' + - '{"user": "u1", "followers": ["u2", "u3"]}' + - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u2"}}' + - '{"user": "u2", "followers": ["u1", "u3", "u4"]}' + - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u3"}}' + - '{"user": "u3", "followers": ["u1"]}' + - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u4"}}' + - '{"user": "u4", "followers": ["u3"]}' + + - do: + search: + rest_total_hits_as_int: true + index: test_index + body: {"query" : {"terms" : {"user" : ["u1", "u2"]}}} + - match: { hits.total: 2 } + + - do: + catch: bad_request + search: + rest_total_hits_as_int: true + index: test_index + body: {"query" : {"terms" : {"user" : ["u1", "u2", "u3"]}}} + + - do: + search: + rest_total_hits_as_int: true + index: test_index + body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u1", "path" : "followers"}}}} + - match: { hits.total: 2 } + + - do: + catch: bad_request + search: + rest_total_hits_as_int: true + index: test_index + body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u2", "path" : "followers"}}}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml index 7b89dd620a9bb..0f2d48af289c5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml @@ -13,15 +13,15 @@ setup: text: type: text index_prefixes: - min_chars: 1 - max_chars: 10 + min_chars: 2 + max_chars: 5 - do: index: index: test type: test id: 1 - body: { text: some short words and a stupendously long one } + body: { text: some short words with a stupendously long one } - do: indices.refresh: @@ -63,10 +63,11 @@ setup: rest_total_hits_as_int: true index: test body: + explain: true query: query_string: default_field: text - query: s* + query: a* boost: 2 - match: {hits.total: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_total_hits_object.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_total_hits_object.yml index f16609808690a..a9c37c00b929a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_total_hits_object.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_total_hits_object.yml @@ -2,9 +2,11 @@ setup: - do: indices.create: index: test_2 + - do: indices.create: index: test_1 + - do: index: index: test_1 @@ -14,10 +16,45 @@ setup: - do: index: - index: test_2 - type: test - id: 42 - body: { foo: bar } + index: test_1 + type: test + id: 3 + body: { foo: baz } + + - do: + index: + index: test_1 + type: test + id: 2 + body: { foo: bar } + + - do: + index: + index: test_1 + type: test + id: 4 + body: { foo: bar } + + - do: + index: + index: test_2 + type: test + id: 42 + body: { foo: bar } + + - do: + index: + index: test_2 + type: test + id: 24 + body: { foo: baz } + + - do: + index: + index: test_2 + type: test + id: 36 + body: { foo: bar } - do: indices.refresh: @@ -28,6 +65,7 @@ setup: - skip: version: " - 6.99.99" reason: hits.total is rendered as an object in 7.0.0 + - do: search: index: _all @@ -36,7 +74,7 @@ setup: match: foo: bar - - match: {hits.total.value: 2} + - match: {hits.total.value: 5} - match: {hits.total.relation: eq} - do: @@ -47,7 +85,7 @@ setup: match: foo: bar - - match: {hits.total.value: 1} + - match: {hits.total.value: 3} - match: {hits.total.relation: eq} - do: @@ -61,6 +99,54 @@ setup: - is_false: hits.total + - do: + search: + track_total_hits: 4 + body: + query: + match: + foo: bar + + - match: {hits.total.value: 4} + - match: {hits.total.relation: gte} + + + - do: + search: + size: 3 + track_total_hits: 4 + body: + query: + match: + foo: bar + + - match: {hits.total.value: 4} + - match: {hits.total.relation: gte} + + - do: + catch: /\[rest_total_hits_as_int\] cannot be used if the tracking of total hits is not accurate, got 100/ + search: + rest_total_hits_as_int: true + index: test_2 + track_total_hits: 100 + body: + query: + match: + foo: bar + + - do: + catch: /\[track_total_hits\] parameter must be positive or equals to -1, got -2/ + search: + rest_total_hits_as_int: true + index: test_2 + track_total_hits: -2 + body: + query: + match: + foo: bar + +--- +"track_total_hits with rest_total_hits_as_int": - do: search: track_total_hits: false @@ -82,6 +168,6 @@ setup: match: foo: bar - - match: {hits.total: 1} + - match: {hits.total: 2} diff --git a/server/build.gradle b/server/build.gradle index c3a8958f3d8a8..a3197acde4ad8 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -161,6 +161,19 @@ forbiddenPatterns { exclude '**/*.st' } +testingConventions { + naming.clear() + naming { + Tests { + baseClass "org.apache.lucene.util.LuceneTestCase" + } + IT { + baseClass "org.elasticsearch.test.ESIntegTestCase" + baseClass "org.elasticsearch.test.ESSingleNodeTestCase" + } + } +} + task generateModulesList { List modules = project(':modules').subprojects.collect { it.name } modules.add('x-pack') @@ -192,9 +205,7 @@ processResources { dependsOn generateModulesList, generatePluginsList } -thirdPartyAudit.excludes = [ - // classes are missing! - +thirdPartyAudit.ignoreMissingClasses ( // from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml) 'com.fasterxml.jackson.databind.ObjectMapper', @@ -301,11 +312,11 @@ thirdPartyAudit.excludes = [ 'com.google.common.geometry.S2Projections', 'com.google.common.geometry.S2Point', 'com.google.common.geometry.S2$Metric', - 'com.google.common.geometry.S2LatLng', -] + 'com.google.common.geometry.S2LatLng' +) if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += ['javax.xml.bind.DatatypeConverter'] + thirdPartyAudit.ignoreMissingClasses 'javax.xml.bind.DatatypeConverter' } dependencyLicenses { diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index b471f9fe13926..0000000000000 --- a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -729c6a031e3849874028020301e1f45a05d5a0bb \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..be6bfec6e5563 --- /dev/null +++ b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +66e4d1a3f91be88d903b1c75e71c8b15a7dc4f90 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 5c6c5cc229d6d..0000000000000 --- a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5f831dea7c0bafd6306653144388a8ecd1186158 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..357ce92760e39 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +069cfb0c693d365cbf332973ab796ba33646f867 \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 2708b818d44cb..0000000000000 --- a/server/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49b3ac44b6749a7ebf0c2e41a81e7910133d2fcc \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..efbb9ada534a5 --- /dev/null +++ b/server/licenses/lucene-core-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +ad32720fe677becb93a26692338b63754613aa50 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 2c7a4912deaea..0000000000000 --- a/server/licenses/lucene-grouping-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0396dff0af03463e784b86fd1a24008e2f07daa2 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..7f35ad20b8230 --- /dev/null +++ b/server/licenses/lucene-grouping-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +3f9499ffc5e956f7a113308198e74e80b7df290b \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 165e6dadbf594..0000000000000 --- a/server/licenses/lucene-highlighter-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d552b941fef2a64ab4c9b2509906950257f92262 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..566114f72948f --- /dev/null +++ b/server/licenses/lucene-highlighter-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +162cac09191f267039cdb73ccc7face5ef54ba8b \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index d80ff31f14578..0000000000000 --- a/server/licenses/lucene-join-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -192e9374124c14c7cd594a6f87aed61806e6e402 \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..baa09c01bed13 --- /dev/null +++ b/server/licenses/lucene-join-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +106cbbc96feb7413b17539f8a0ae2e7692550f44 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 179dd72165f87..0000000000000 --- a/server/licenses/lucene-memory-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b9345c0321a3f4c7aa69ecfaf15cdee74180e409 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..ba3d8ec94d0ac --- /dev/null +++ b/server/licenses/lucene-memory-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +37336dec582ce1569e944d1d8a5181c2eb2aec25 \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index abfd221dcdf44..0000000000000 --- a/server/licenses/lucene-misc-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ace540746369ded8b2f354d35002f5ccf6a58aab \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..764fffadffe99 --- /dev/null +++ b/server/licenses/lucene-misc-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +c18617a95c109160d0dacb58a1e268014f7f5862 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 7acef8ed7f817..0000000000000 --- a/server/licenses/lucene-queries-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -baaf3082d703c0e89ea5f131de878b586d302e34 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..e50a454a0ebd9 --- /dev/null +++ b/server/licenses/lucene-queries-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +ab86036efd74fc41730e20dd2d3de977297878e0 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 9e7b927a563a5..0000000000000 --- a/server/licenses/lucene-queryparser-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -799b3ffee1401fba8874b0a8ce1ab203c98d9708 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..8b2d290d647ed --- /dev/null +++ b/server/licenses/lucene-queryparser-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +02f3f472494f250da6fe7199de6c2f2ef5972774 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index e8bb1cbe8caea..0000000000000 --- a/server/licenses/lucene-sandbox-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a6293e2d890e4b1be455524466474a3eaac8be9a \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..2466df430847f --- /dev/null +++ b/server/licenses/lucene-sandbox-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +da0a248a2bb69499715411b682d45adaea5ab499 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index f646f423a8d83..0000000000000 --- a/server/licenses/lucene-spatial-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -77490eb78316afdec9d0889868997777caf820c0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..c8b8762f25a92 --- /dev/null +++ b/server/licenses/lucene-spatial-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +8ed0aad4c4214d0fe3571dfa2d09c936a91cf3c7 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index da0905ca36b70..0000000000000 --- a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4bb18049052ef738702ff5c0b294a5986971ed59 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..17b2f3ef4a33c --- /dev/null +++ b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +9105a1c73feeb836ca4244367f02d6a8d7e3cc27 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 60f7aaf2b53e6..0000000000000 --- a/server/licenses/lucene-spatial3d-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -936aa91d3b48bd559ee9d25303934f06aa95c1f7 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..62d3f24344e33 --- /dev/null +++ b/server/licenses/lucene-spatial3d-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +6ebc95227d4415cc6d345c1dd3759e1f348e0ca4 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-774e9aefbc.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 1ea6540ff82f8..0000000000000 --- a/server/licenses/lucene-suggest-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -268747a9cbdebb492268aca4558968f9d46d37a9 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..8471aff350b83 --- /dev/null +++ b/server/licenses/lucene-suggest-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +c4cf1b911521f962c9bd2d28efb519bf9e1b88f4 \ No newline at end of file diff --git a/server/src/main/java/org/apache/lucene/document/XLatLonShape.java b/server/src/main/java/org/apache/lucene/document/XLatLonShape.java deleted file mode 100644 index 87ebb6a753fd3..0000000000000 --- a/server/src/main/java/org/apache/lucene/document/XLatLonShape.java +++ /dev/null @@ -1,372 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.document; - -import org.apache.lucene.geo.GeoUtils; -import org.apache.lucene.geo.Line; -import org.apache.lucene.geo.Polygon; -import org.apache.lucene.geo.XTessellator; -import org.apache.lucene.geo.XTessellator.Triangle; -import org.apache.lucene.index.PointValues; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.NumericUtils; - -import java.util.ArrayList; -import java.util.List; - -import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude; -import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude; - -/** - * An indexed shape utility class. - *

- * {@link Polygon}'s are decomposed into a triangular mesh using the {@link XTessellator} utility class - * Each {@link Triangle} is encoded and indexed as a multi-value field. - *

- * Finding all shapes that intersect a range (e.g., bounding box) at search time is efficient. - *

- * This class defines static factory methods for common operations: - *

    - *
  • {@link #createIndexableFields(String, Polygon)} for matching polygons that intersect a bounding box. - *
  • {@link #newBoxQuery newBoxQuery()} for matching polygons that intersect a bounding box. - *
- - * WARNING: Like {@link LatLonPoint}, vertex values are indexed with some loss of precision from the - * original {@code double} values (4.190951585769653E-8 for the latitude component - * and 8.381903171539307E-8 for longitude). - * @see PointValues - * @see LatLonDocValuesField - * - */ -public class XLatLonShape { - public static final int BYTES = LatLonPoint.BYTES; - - protected static final FieldType TYPE = new FieldType(); - static { - TYPE.setDimensions(7, 4, BYTES); - TYPE.freeze(); - } - - // no instance: - private XLatLonShape() { - } - - /** create indexable fields for polygon geometry */ - public static Field[] createIndexableFields(String fieldName, Polygon polygon) { - // the lionshare of the indexing is done by the tessellator - List tessellation = XTessellator.tessellate(polygon); - List fields = new ArrayList<>(); - for (Triangle t : tessellation) { - fields.add(new LatLonTriangle(fieldName, t)); - } - return fields.toArray(new Field[fields.size()]); - } - - /** create indexable fields for line geometry */ - public static Field[] createIndexableFields(String fieldName, Line line) { - int numPoints = line.numPoints(); - Field[] fields = new Field[numPoints - 1]; - // create "flat" triangles - for (int i = 0, j = 1; j < numPoints; ++i, ++j) { - fields[i] = new LatLonTriangle(fieldName, line.getLat(i), line.getLon(i), line.getLat(j), line.getLon(j), - line.getLat(i), line.getLon(i)); - } - return fields; - } - - /** create indexable fields for point geometry */ - public static Field[] createIndexableFields(String fieldName, double lat, double lon) { - return new Field[] {new LatLonTriangle(fieldName, lat, lon, lat, lon, lat, lon)}; - } - - /** create a query to find all polygons that intersect a defined bounding box - **/ - public static Query newBoxQuery(String field, QueryRelation queryRelation, - double minLatitude, double maxLatitude, double minLongitude, double maxLongitude) { - return new XLatLonShapeBoundingBoxQuery(field, queryRelation, minLatitude, maxLatitude, minLongitude, maxLongitude); - } - - /** create a query to find all polygons that intersect a provided linestring (or array of linestrings) - * note: does not support dateline crossing - **/ - public static Query newLineQuery(String field, QueryRelation queryRelation, Line... lines) { - return new XLatLonShapeLineQuery(field, queryRelation, lines); - } - - /** create a query to find all polygons that intersect a provided polygon (or array of polygons) - * note: does not support dateline crossing - **/ - public static Query newPolygonQuery(String field, QueryRelation queryRelation, Polygon... polygons) { - return new XLatLonShapePolygonQuery(field, queryRelation, polygons); - } - - /** polygons are decomposed into tessellated triangles using {@link XTessellator} - * these triangles are encoded and inserted as separate indexed POINT fields - */ - private static class LatLonTriangle extends Field { - - LatLonTriangle(String name, double aLat, double aLon, double bLat, double bLon, double cLat, double cLon) { - super(name, TYPE); - setTriangleValue(encodeLongitude(aLon), encodeLatitude(aLat), encodeLongitude(bLon), encodeLatitude(bLat), - encodeLongitude(cLon), encodeLatitude(cLat)); - } - - LatLonTriangle(String name, Triangle t) { - super(name, TYPE); - setTriangleValue(t.getEncodedX(0), t.getEncodedY(0), t.getEncodedX(1), t.getEncodedY(1), - t.getEncodedX(2), t.getEncodedY(2)); - } - - - public void setTriangleValue(int aX, int aY, int bX, int bY, int cX, int cY) { - final byte[] bytes; - - if (fieldsData == null) { - bytes = new byte[7 * BYTES]; - fieldsData = new BytesRef(bytes); - } else { - bytes = ((BytesRef) fieldsData).bytes; - } - encodeTriangle(bytes, aY, aX, bY, bX, cY, cX); - } - } - - /** Query Relation Types **/ - public enum QueryRelation { - INTERSECTS, WITHIN, DISJOINT - } - - private static final int MINY_MINX_MAXY_MAXX_Y_X = 0; - private static final int MINY_MINX_Y_X_MAXY_MAXX = 1; - private static final int MAXY_MINX_Y_X_MINY_MAXX = 2; - private static final int MAXY_MINX_MINY_MAXX_Y_X = 3; - private static final int Y_MINX_MINY_X_MAXY_MAXX = 4; - private static final int Y_MINX_MINY_MAXX_MAXY_X = 5; - private static final int MAXY_MINX_MINY_X_Y_MAXX = 6; - private static final int MINY_MINX_Y_MAXX_MAXY_X = 7; - - /** - * A triangle is encoded using 6 points and an extra point with encoded information in three bits of how to reconstruct it. - * Triangles are encoded with CCW orientation and might be rotated to limit the number of possible reconstructions to 2^3. - * Reconstruction always happens from west to east. - */ - public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, int bLon, int cLat, int cLon) { - assert bytes.length == 7 * BYTES; - int aX; - int bX; - int cX; - int aY; - int bY; - int cY; - //change orientation if CW - if (GeoUtils.orient(aLon, aLat, bLon, bLat, cLon, cLat) == -1) { - aX = cLon; - bX = bLon; - cX = aLon; - aY = cLat; - bY = bLat; - cY = aLat; - } else { - aX = aLon; - bX = bLon; - cX = cLon; - aY = aLat; - bY = bLat; - cY = cLat; - } - //rotate edges and place minX at the beginning - if (bX < aX || cX < aX) { - if (bX < cX) { - int tempX = aX; - int tempY = aY; - aX = bX; - aY = bY; - bX = cX; - bY = cY; - cX = tempX; - cY = tempY; - } else if (cX < aX) { - int tempX = aX; - int tempY = aY; - aX = cX; - aY = cY; - cX = bX; - cY = bY; - bX = tempX; - bY = tempY; - } - } else if (aX == bX && aX == cX) { - //degenerated case, all points with same longitude - //we need to prevent that aX is in the middle (not part of the MBS) - if (bY < aY || cY < aY) { - if (bY < cY) { - int tempX = aX; - int tempY = aY; - aX = bX; - aY = bY; - bX = cX; - bY = cY; - cX = tempX; - cY = tempY; - } else if (cY < aY) { - int tempX = aX; - int tempY = aY; - aX = cX; - aY = cY; - cX = bX; - cY = bY; - bX = tempX; - bY = tempY; - } - } - } - - int minX = aX; - int minY = StrictMath.min(aY, StrictMath.min(bY, cY)); - int maxX = StrictMath.max(aX, StrictMath.max(bX, cX)); - int maxY = StrictMath.max(aY, StrictMath.max(bY, cY)); - - int bits, x, y; - if (minY == aY) { - if (maxY == bY && maxX == bX) { - y = cY; - x = cX; - bits = MINY_MINX_MAXY_MAXX_Y_X; - } else if (maxY == cY && maxX == cX) { - y = bY; - x = bX; - bits = MINY_MINX_Y_X_MAXY_MAXX; - } else { - y = bY; - x = cX; - bits = MINY_MINX_Y_MAXX_MAXY_X; - } - } else if (maxY == aY) { - if (minY == bY && maxX == bX) { - y = cY; - x = cX; - bits = MAXY_MINX_MINY_MAXX_Y_X; - } else if (minY == cY && maxX == cX) { - y = bY; - x = bX; - bits = MAXY_MINX_Y_X_MINY_MAXX; - } else { - y = cY; - x = bX; - bits = MAXY_MINX_MINY_X_Y_MAXX; - } - } else if (maxX == bX && minY == bY) { - y = aY; - x = cX; - bits = Y_MINX_MINY_MAXX_MAXY_X; - } else if (maxX == cX && maxY == cY) { - y = aY; - x = bX; - bits = Y_MINX_MINY_X_MAXY_MAXX; - } else { - throw new IllegalArgumentException("Could not encode the provided triangle"); - } - NumericUtils.intToSortableBytes(minY, bytes, 0); - NumericUtils.intToSortableBytes(minX, bytes, BYTES); - NumericUtils.intToSortableBytes(maxY, bytes, 2 * BYTES); - NumericUtils.intToSortableBytes(maxX, bytes, 3 * BYTES); - NumericUtils.intToSortableBytes(y, bytes, 4 * BYTES); - NumericUtils.intToSortableBytes(x, bytes, 5 * BYTES); - NumericUtils.intToSortableBytes(bits, bytes, 6 * BYTES); - } - - /** - * Decode a triangle encoded by {@link XLatLonShape#encodeTriangle(byte[], int, int, int, int, int, int)}. - */ - public static void decodeTriangle(byte[] t, int[] triangle) { - assert triangle.length == 6; - int bits = NumericUtils.sortableBytesToInt(t, 6 * XLatLonShape.BYTES); - //extract the first three bits - int tCode = (((1 << 3) - 1) & (bits >> 0)); - switch (tCode) { - case MINY_MINX_MAXY_MAXX_Y_X: - triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES); - break; - case MINY_MINX_Y_X_MAXY_MAXX: - triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES); - break; - case MAXY_MINX_Y_X_MINY_MAXX: - triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES); - break; - case MAXY_MINX_MINY_MAXX_Y_X: - triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES); - break; - case Y_MINX_MINY_X_MAXY_MAXX: - triangle[0] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES); - break; - case Y_MINX_MINY_MAXX_MAXY_X: - triangle[0] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES); - break; - case MAXY_MINX_MINY_X_Y_MAXX: - triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES); - break; - case MINY_MINX_Y_MAXX_MAXY_X: - triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES); - triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES); - triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES); - triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES); - triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES); - triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES); - break; - default: - throw new IllegalArgumentException("Could not decode the provided triangle"); - } - //Points of the decoded triangle must be co-planar or CCW oriented - assert GeoUtils.orient(triangle[1], triangle[0], triangle[3], triangle[2], triangle[5], triangle[4]) >= 0; - } -} diff --git a/server/src/main/java/org/apache/lucene/document/XLatLonShapeBoundingBoxQuery.java b/server/src/main/java/org/apache/lucene/document/XLatLonShapeBoundingBoxQuery.java deleted file mode 100644 index 8e30302225fc5..0000000000000 --- a/server/src/main/java/org/apache/lucene/document/XLatLonShapeBoundingBoxQuery.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.document; - -import org.apache.lucene.geo.Rectangle; -import org.apache.lucene.geo.XRectangle2D; -import org.apache.lucene.index.PointValues.Relation; - -/** - * Finds all previously indexed shapes that intersect the specified bounding box. - * - *

The field must be indexed using - * {@link XLatLonShape#createIndexableFields} added per document. - * - **/ -final class XLatLonShapeBoundingBoxQuery extends XLatLonShapeQuery { - final XRectangle2D rectangle2D; - - XLatLonShapeBoundingBoxQuery(String field, XLatLonShape.QueryRelation queryRelation, - double minLat, double maxLat, double minLon, double maxLon) { - super(field, queryRelation); - Rectangle rectangle = new Rectangle(minLat, maxLat, minLon, maxLon); - this.rectangle2D = XRectangle2D.create(rectangle); - } - - @Override - protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle, - int maxXOffset, int maxYOffset, byte[] maxTriangle) { - return rectangle2D.relateRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); - } - - /** returns true if the query matches the encoded triangle */ - @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle) { - // decode indexed triangle - XLatLonShape.decodeTriangle(t, scratchTriangle); - - int aY = scratchTriangle[0]; - int aX = scratchTriangle[1]; - int bY = scratchTriangle[2]; - int bX = scratchTriangle[3]; - int cY = scratchTriangle[4]; - int cX = scratchTriangle[5]; - - if (queryRelation == XLatLonShape.QueryRelation.WITHIN) { - return rectangle2D.containsTriangle(aX, aY, bX, bY, cX, cY); - } - return rectangle2D.intersectsTriangle(aX, aY, bX, bY, cX, cY); - } - - @Override - public boolean equals(Object o) { - return sameClassAs(o) && equalsTo(getClass().cast(o)); - } - - @Override - protected boolean equalsTo(Object o) { - return super.equalsTo(o) && rectangle2D.equals(((XLatLonShapeBoundingBoxQuery)o).rectangle2D); - } - - @Override - public int hashCode() { - int hash = super.hashCode(); - hash = 31 * hash + rectangle2D.hashCode(); - return hash; - } - - @Override - public String toString(String field) { - final StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()); - sb.append(':'); - if (this.field.equals(field) == false) { - sb.append(" field="); - sb.append(this.field); - sb.append(':'); - } - sb.append(rectangle2D.toString()); - return sb.toString(); - } -} diff --git a/server/src/main/java/org/apache/lucene/document/XLatLonShapeLineQuery.java b/server/src/main/java/org/apache/lucene/document/XLatLonShapeLineQuery.java deleted file mode 100644 index 4bbb077303ed9..0000000000000 --- a/server/src/main/java/org/apache/lucene/document/XLatLonShapeLineQuery.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.document; - -import org.apache.lucene.document.XLatLonShape.QueryRelation; -import org.apache.lucene.geo.GeoEncodingUtils; -import org.apache.lucene.geo.Line; -import org.apache.lucene.geo.Line2D; -import org.apache.lucene.index.PointValues.Relation; -import org.apache.lucene.util.NumericUtils; - -import java.util.Arrays; - -/** - * Finds all previously indexed shapes that intersect the specified arbitrary {@code Line}. - *

- * Note: - *

    - *
  • {@code QueryRelation.WITHIN} queries are not yet supported
  • - *
  • Dateline crossing is not yet supported
  • - *
- *

- * todo: - *

    - *
  • Add distance support for buffered queries
  • - *
- *

The field must be indexed using - * {@link XLatLonShape#createIndexableFields} added per document. - * - **/ -final class XLatLonShapeLineQuery extends XLatLonShapeQuery { - final Line[] lines; - private final Line2D line2D; - - XLatLonShapeLineQuery(String field, QueryRelation queryRelation, Line... lines) { - super(field, queryRelation); - /** line queries do not support within relations, only intersects and disjoint */ - if (queryRelation == QueryRelation.WITHIN) { - throw new IllegalArgumentException("LatLonShapeLineQuery does not support " + QueryRelation.WITHIN + " queries"); - } - - if (lines == null) { - throw new IllegalArgumentException("lines must not be null"); - } - if (lines.length == 0) { - throw new IllegalArgumentException("lines must not be empty"); - } - for (int i = 0; i < lines.length; ++i) { - if (lines[i] == null) { - throw new IllegalArgumentException("line[" + i + "] must not be null"); - } else if (lines[i].minLon > lines[i].maxLon) { - throw new IllegalArgumentException("LatLonShapeLineQuery does not currently support querying across dateline."); - } - } - this.lines = lines.clone(); - this.line2D = Line2D.create(lines); - } - - @Override - protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle, - int maxXOffset, int maxYOffset, byte[] maxTriangle) { - double minLat = GeoEncodingUtils.decodeLatitude(NumericUtils.sortableBytesToInt(minTriangle, minYOffset)); - double minLon = GeoEncodingUtils.decodeLongitude(NumericUtils.sortableBytesToInt(minTriangle, minXOffset)); - double maxLat = GeoEncodingUtils.decodeLatitude(NumericUtils.sortableBytesToInt(maxTriangle, maxYOffset)); - double maxLon = GeoEncodingUtils.decodeLongitude(NumericUtils.sortableBytesToInt(maxTriangle, maxXOffset)); - - // check internal node against query - return line2D.relate(minLat, maxLat, minLon, maxLon); - } - - @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle) { - XLatLonShape.decodeTriangle(t, scratchTriangle); - - double alat = GeoEncodingUtils.decodeLatitude(scratchTriangle[0]); - double alon = GeoEncodingUtils.decodeLongitude(scratchTriangle[1]); - double blat = GeoEncodingUtils.decodeLatitude(scratchTriangle[2]); - double blon = GeoEncodingUtils.decodeLongitude(scratchTriangle[3]); - double clat = GeoEncodingUtils.decodeLatitude(scratchTriangle[4]); - double clon = GeoEncodingUtils.decodeLongitude(scratchTriangle[5]); - - if (queryRelation == XLatLonShape.QueryRelation.WITHIN) { - return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; - } - // INTERSECTS - return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; - } - - @Override - public String toString(String field) { - final StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()); - sb.append(':'); - if (this.field.equals(field) == false) { - sb.append(" field="); - sb.append(this.field); - sb.append(':'); - } - sb.append("Line(" + lines[0].toGeoJSON() + ")"); - return sb.toString(); - } - - @Override - public boolean equals(Object o) { - return super.equals(o); - } - - @Override - protected boolean equalsTo(Object o) { - return super.equalsTo(o) && Arrays.equals(lines, ((XLatLonShapeLineQuery)o).lines); - } - - @Override - public int hashCode() { - int hash = super.hashCode(); - hash = 31 * hash + Arrays.hashCode(lines); - return hash; - } -} diff --git a/server/src/main/java/org/apache/lucene/document/XLatLonShapePolygonQuery.java b/server/src/main/java/org/apache/lucene/document/XLatLonShapePolygonQuery.java deleted file mode 100644 index 5b67d8c0bc9d1..0000000000000 --- a/server/src/main/java/org/apache/lucene/document/XLatLonShapePolygonQuery.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.document; - -import org.apache.lucene.document.XLatLonShape.QueryRelation; -import org.apache.lucene.geo.GeoEncodingUtils; -import org.apache.lucene.geo.Polygon; -import org.apache.lucene.geo.Polygon2D; -import org.apache.lucene.index.PointValues.Relation; -import org.apache.lucene.util.NumericUtils; - -import java.util.Arrays; - -/** - * Finds all previously indexed shapes that intersect the specified arbitrary. - * - *

The field must be indexed using - * {@link XLatLonShape#createIndexableFields} added per document. - * - **/ -final class XLatLonShapePolygonQuery extends XLatLonShapeQuery { - final Polygon[] polygons; - private final Polygon2D poly2D; - - /** - * Creates a query that matches all indexed shapes to the provided polygons - */ - XLatLonShapePolygonQuery(String field, QueryRelation queryRelation, Polygon... polygons) { - super(field, queryRelation); - if (polygons == null) { - throw new IllegalArgumentException("polygons must not be null"); - } - if (polygons.length == 0) { - throw new IllegalArgumentException("polygons must not be empty"); - } - for (int i = 0; i < polygons.length; i++) { - if (polygons[i] == null) { - throw new IllegalArgumentException("polygon[" + i + "] must not be null"); - } else if (polygons[i].minLon > polygons[i].maxLon) { - throw new IllegalArgumentException("LatLonShapePolygonQuery does not currently support querying across dateline."); - } - } - this.polygons = polygons.clone(); - this.poly2D = Polygon2D.create(polygons); - } - - @Override - protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle, - int maxXOffset, int maxYOffset, byte[] maxTriangle) { - - double minLat = GeoEncodingUtils.decodeLatitude(NumericUtils.sortableBytesToInt(minTriangle, minYOffset)); - double minLon = GeoEncodingUtils.decodeLongitude(NumericUtils.sortableBytesToInt(minTriangle, minXOffset)); - double maxLat = GeoEncodingUtils.decodeLatitude(NumericUtils.sortableBytesToInt(maxTriangle, maxYOffset)); - double maxLon = GeoEncodingUtils.decodeLongitude(NumericUtils.sortableBytesToInt(maxTriangle, maxXOffset)); - - // check internal node against query - return poly2D.relate(minLat, maxLat, minLon, maxLon); - } - - @Override - protected boolean queryMatches(byte[] t, int[] scratchTriangle) { - XLatLonShape.decodeTriangle(t, scratchTriangle); - - double alat = GeoEncodingUtils.decodeLatitude(scratchTriangle[0]); - double alon = GeoEncodingUtils.decodeLongitude(scratchTriangle[1]); - double blat = GeoEncodingUtils.decodeLatitude(scratchTriangle[2]); - double blon = GeoEncodingUtils.decodeLongitude(scratchTriangle[3]); - double clat = GeoEncodingUtils.decodeLatitude(scratchTriangle[4]); - double clon = GeoEncodingUtils.decodeLongitude(scratchTriangle[5]); - - if (queryRelation == QueryRelation.WITHIN) { - return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY; - } - // INTERSECTS - return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY; - } - - @Override - public String toString(String field) { - final StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()); - sb.append(':'); - if (this.field.equals(field) == false) { - sb.append(" field="); - sb.append(this.field); - sb.append(':'); - } - sb.append("Polygon(" + polygons[0].toGeoJSON() + ")"); - return sb.toString(); - } - - @Override - public boolean equals(Object o) { - return super.equals(o); - } - - @Override - protected boolean equalsTo(Object o) { - return super.equalsTo(o) && Arrays.equals(polygons, ((XLatLonShapePolygonQuery)o).polygons); - } - - @Override - public int hashCode() { - int hash = super.hashCode(); - hash = 31 * hash + Arrays.hashCode(polygons); - return hash; - } -} diff --git a/server/src/main/java/org/apache/lucene/document/XLatLonShapeQuery.java b/server/src/main/java/org/apache/lucene/document/XLatLonShapeQuery.java deleted file mode 100644 index f4c67872cdc51..0000000000000 --- a/server/src/main/java/org/apache/lucene/document/XLatLonShapeQuery.java +++ /dev/null @@ -1,363 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.document; - -import org.apache.lucene.document.XLatLonShape.QueryRelation; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.PointValues; -import org.apache.lucene.index.PointValues.IntersectVisitor; -import org.apache.lucene.index.PointValues.Relation; -import org.apache.lucene.search.ConstantScoreScorer; -import org.apache.lucene.search.ConstantScoreWeight; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.ScorerSupplier; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BitSetIterator; -import org.apache.lucene.util.DocIdSetBuilder; -import org.apache.lucene.util.FixedBitSet; - -import java.io.IOException; -import java.util.Objects; - -/** - * Base LatLonShape Query class providing common query logic for - * {@link XLatLonShapeBoundingBoxQuery} and {@link XLatLonShapePolygonQuery} - * - * Note: this class implements the majority of the INTERSECTS, WITHIN, DISJOINT relation logic - * - **/ -abstract class XLatLonShapeQuery extends Query { - /** field name */ - final String field; - /** query relation - * disjoint: {@code CELL_OUTSIDE_QUERY} - * intersects: {@code CELL_CROSSES_QUERY}, - * within: {@code CELL_WITHIN_QUERY} */ - final XLatLonShape.QueryRelation queryRelation; - - protected XLatLonShapeQuery(String field, final QueryRelation queryType) { - if (field == null) { - throw new IllegalArgumentException("field must not be null"); - } - this.field = field; - this.queryRelation = queryType; - } - - /** - * relates an internal node (bounding box of a range of triangles) to the target query - * Note: logic is specific to query type - * see {@link XLatLonShapeBoundingBoxQuery#relateRangeToQuery} and {@link XLatLonShapePolygonQuery#relateRangeToQuery} - */ - protected abstract Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle, - int maxXOffset, int maxYOffset, byte[] maxTriangle); - - /** returns true if the provided triangle matches the query */ - protected abstract boolean queryMatches(byte[] triangle, int[] scratchTriangle); - - /** relates a range of triangles (internal node) to the query */ - protected Relation relateRangeToQuery(byte[] minTriangle, byte[] maxTriangle) { - // compute bounding box of internal node - Relation r = relateRangeBBoxToQuery(XLatLonShape.BYTES, 0, minTriangle, 3 * XLatLonShape.BYTES, - 2 * XLatLonShape.BYTES, maxTriangle); - if (queryRelation == QueryRelation.DISJOINT) { - return transposeRelation(r); - } - return r; - } - - @Override - public final Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { - - return new ConstantScoreWeight(this, boost) { - - /** create a visitor that adds documents that match the query using a sparse bitset. (Used by INTERSECT) */ - protected IntersectVisitor getSparseIntersectVisitor(DocIdSetBuilder result) { - return new IntersectVisitor() { - final int[] scratchTriangle = new int[6]; - DocIdSetBuilder.BulkAdder adder; - - @Override - public void grow(int count) { - adder = result.grow(count); - } - - @Override - public void visit(int docID) throws IOException { - adder.add(docID); - } - - @Override - public void visit(int docID, byte[] t) throws IOException { - if (queryMatches(t, scratchTriangle)) { - adder.add(docID); - } - } - - @Override - public Relation compare(byte[] minTriangle, byte[] maxTriangle) { - return relateRangeToQuery(minTriangle, maxTriangle); - } - }; - } - - /** create a visitor that adds documents that match the query using a dense bitset. (Used by WITHIN, DISJOINT) */ - protected IntersectVisitor getDenseIntersectVisitor(FixedBitSet intersect, FixedBitSet disjoint) { - return new IntersectVisitor() { - final int[] scratchTriangle = new int[6]; - @Override - public void visit(int docID) throws IOException { - if (queryRelation == QueryRelation.DISJOINT) { - // if DISJOINT query set the doc in the disjoint bitset - disjoint.set(docID); - } else { - // for INTERSECT, and WITHIN queries we set the intersect bitset - intersect.set(docID); - } - } - - @Override - public void visit(int docID, byte[] t) throws IOException { - if (queryMatches(t, scratchTriangle)) { - intersect.set(docID); - } else { - disjoint.set(docID); - } - } - - @Override - public Relation compare(byte[] minTriangle, byte[] maxTriangle) { - return relateRangeToQuery(minTriangle, maxTriangle); - } - }; - } - - /** get a scorer supplier for INTERSECT queries */ - protected ScorerSupplier getIntersectScorerSupplier(LeafReader reader, PointValues values, Weight weight, - ScoreMode scoreMode) throws IOException { - DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field); - IntersectVisitor visitor = getSparseIntersectVisitor(result); - return new RelationScorerSupplier(values, visitor) { - @Override - public Scorer get(long leadCost) throws IOException { - return getIntersectsScorer(XLatLonShapeQuery.this, reader, weight, result, score(), scoreMode); - } - }; - } - - /** get a scorer supplier for all other queries (DISJOINT, WITHIN) */ - protected ScorerSupplier getScorerSupplier(LeafReader reader, PointValues values, Weight weight, - ScoreMode scoreMode) throws IOException { - if (queryRelation == QueryRelation.INTERSECTS) { - return getIntersectScorerSupplier(reader, values, weight, scoreMode); - } - - FixedBitSet intersect = new FixedBitSet(reader.maxDoc()); - FixedBitSet disjoint = new FixedBitSet(reader.maxDoc()); - IntersectVisitor visitor = getDenseIntersectVisitor(intersect, disjoint); - return new RelationScorerSupplier(values, visitor) { - @Override - public Scorer get(long leadCost) throws IOException { - return getScorer(XLatLonShapeQuery.this, weight, intersect, disjoint, score(), scoreMode); - } - }; - } - - @Override - public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { - LeafReader reader = context.reader(); - PointValues values = reader.getPointValues(field); - if (values == null) { - // No docs in this segment had any points fields - return null; - } - FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); - if (fieldInfo == null) { - // No docs in this segment indexed this field at all - return null; - } - - boolean allDocsMatch = true; - if (values.getDocCount() != reader.maxDoc() || - relateRangeToQuery(values.getMinPackedValue(), values.getMaxPackedValue()) != Relation.CELL_INSIDE_QUERY) { - allDocsMatch = false; - } - - final Weight weight = this; - if (allDocsMatch) { - return new ScorerSupplier() { - @Override - public Scorer get(long leadCost) throws IOException { - return new ConstantScoreScorer(weight, score(), scoreMode, DocIdSetIterator.all(reader.maxDoc())); - } - - @Override - public long cost() { - return reader.maxDoc(); - } - }; - } else { - return getScorerSupplier(reader, values, weight, scoreMode); - } - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return true; - } - }; - } - - /** returns the field name */ - public String getField() { - return field; - } - - /** returns the query relation */ - public QueryRelation getQueryRelation() { - return queryRelation; - } - - @Override - public int hashCode() { - int hash = classHash(); - hash = 31 * hash + field.hashCode(); - hash = 31 * hash + queryRelation.hashCode(); - return hash; - } - - @Override - public boolean equals(Object o) { - return sameClassAs(o) && equalsTo(o); - } - - protected boolean equalsTo(Object o) { - return Objects.equals(field, ((XLatLonShapeQuery)o).field) && this.queryRelation == ((XLatLonShapeQuery)o).queryRelation; - } - - /** transpose the relation; INSIDE becomes OUTSIDE, OUTSIDE becomes INSIDE, CROSSES remains unchanged */ - private static Relation transposeRelation(Relation r) { - if (r == Relation.CELL_INSIDE_QUERY) { - return Relation.CELL_OUTSIDE_QUERY; - } else if (r == Relation.CELL_OUTSIDE_QUERY) { - return Relation.CELL_INSIDE_QUERY; - } - return Relation.CELL_CROSSES_QUERY; - } - - /** utility class for implementing constant score logic specific to INTERSECT, WITHIN, and DISJOINT */ - private abstract static class RelationScorerSupplier extends ScorerSupplier { - PointValues values; - IntersectVisitor visitor; - long cost = -1; - - RelationScorerSupplier(PointValues values, IntersectVisitor visitor) { - this.values = values; - this.visitor = visitor; - } - - /** create a visitor that clears documents that do NOT match the polygon query; used with INTERSECTS */ - private IntersectVisitor getInverseIntersectVisitor(XLatLonShapeQuery query, FixedBitSet result, int[] cost) { - return new IntersectVisitor() { - int[] scratchTriangle = new int[6]; - @Override - public void visit(int docID) { - result.clear(docID); - cost[0]--; - } - - @Override - public void visit(int docID, byte[] packedTriangle) { - if (query.queryMatches(packedTriangle, scratchTriangle) == false) { - result.clear(docID); - cost[0]--; - } - } - - @Override - public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - return transposeRelation(query.relateRangeToQuery(minPackedValue, maxPackedValue)); - } - }; - } - - /** returns a Scorer for INTERSECT queries that uses a sparse bitset */ - protected Scorer getIntersectsScorer(XLatLonShapeQuery query, LeafReader reader, Weight weight, - DocIdSetBuilder docIdSetBuilder, final float boost, - ScoreMode scoreMode) throws IOException { - if (values.getDocCount() == reader.maxDoc() - && values.getDocCount() == values.size() - && cost() > reader.maxDoc() / 2) { - // If all docs have exactly one value and the cost is greater - // than half the leaf size then maybe we can make things faster - // by computing the set of documents that do NOT match the query - final FixedBitSet result = new FixedBitSet(reader.maxDoc()); - result.set(0, reader.maxDoc()); - int[] cost = new int[]{reader.maxDoc()}; - values.intersect(getInverseIntersectVisitor(query, result, cost)); - final DocIdSetIterator iterator = new BitSetIterator(result, cost[0]); - return new ConstantScoreScorer(weight, boost, scoreMode, iterator); - } - - values.intersect(visitor); - DocIdSetIterator iterator = docIdSetBuilder.build().iterator(); - return new ConstantScoreScorer(weight, boost, scoreMode, iterator); - } - - /** returns a Scorer for all other (non INTERSECT) queries */ - protected Scorer getScorer(XLatLonShapeQuery query, Weight weight, - FixedBitSet intersect, FixedBitSet disjoint, final float boost, - ScoreMode scoreMode) throws IOException { - values.intersect(visitor); - DocIdSetIterator iterator; - if (query.queryRelation == QueryRelation.DISJOINT) { - disjoint.andNot(intersect); - iterator = new BitSetIterator(disjoint, cost()); - } else if (query.queryRelation == QueryRelation.WITHIN) { - intersect.andNot(disjoint); - iterator = new BitSetIterator(intersect, cost()); - } else { - iterator = new BitSetIterator(intersect, cost()); - } - return new ConstantScoreScorer(weight, boost, scoreMode, iterator); - } - - @Override - public long cost() { - if (cost == -1) { - // Computing the cost may be expensive, so only do it if necessary - cost = values.estimatePointCount(visitor); - assert cost >= 0; - } - return cost; - } - } -} diff --git a/server/src/main/java/org/apache/lucene/geo/XRectangle2D.java b/server/src/main/java/org/apache/lucene/geo/XRectangle2D.java deleted file mode 100644 index 0267ba29b86fc..0000000000000 --- a/server/src/main/java/org/apache/lucene/geo/XRectangle2D.java +++ /dev/null @@ -1,316 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.lucene.geo; - -import org.apache.lucene.document.XLatLonShape; -import org.apache.lucene.index.PointValues; -import org.apache.lucene.util.FutureArrays; -import org.apache.lucene.util.NumericUtils; - -import java.util.Arrays; - -import static org.apache.lucene.document.XLatLonShape.BYTES; -import static org.apache.lucene.geo.GeoEncodingUtils.MAX_LON_ENCODED; -import static org.apache.lucene.geo.GeoEncodingUtils.MIN_LON_ENCODED; -import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude; -import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; -import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude; -import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitudeCeil; -import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude; -import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil; -import static org.apache.lucene.geo.GeoUtils.orient; - -/** - * 2D rectangle implementation containing spatial logic. - * - */ -public class XRectangle2D { - final byte[] bbox; - final byte[] west; - final int minX; - final int maxX; - final int minY; - final int maxY; - - private XRectangle2D(double minLat, double maxLat, double minLon, double maxLon) { - this.bbox = new byte[4 * BYTES]; - int minXenc = encodeLongitudeCeil(minLon); - int maxXenc = encodeLongitude(maxLon); - int minYenc = encodeLatitudeCeil(minLat); - int maxYenc = encodeLatitude(maxLat); - if (minYenc > maxYenc) { - minYenc = maxYenc; - } - this.minY = minYenc; - this.maxY = maxYenc; - - if (minLon > maxLon == true) { - // crossing dateline is split into east/west boxes - this.west = new byte[4 * BYTES]; - this.minX = minXenc; - this.maxX = maxXenc; - encode(MIN_LON_ENCODED, this.maxX, this.minY, this.maxY, this.west); - encode(this.minX, MAX_LON_ENCODED, this.minY, this.maxY, this.bbox); - } else { - // encodeLongitudeCeil may cause minX to be > maxX iff - // the delta between the longitude < the encoding resolution - if (minXenc > maxXenc) { - minXenc = maxXenc; - } - this.west = null; - this.minX = minXenc; - this.maxX = maxXenc; - encode(this.minX, this.maxX, this.minY, this.maxY, bbox); - } - } - - /** Builds a XRectangle2D from rectangle */ - public static XRectangle2D create(Rectangle rectangle) { - return new XRectangle2D(rectangle.minLat, rectangle.maxLat, rectangle.minLon, rectangle.maxLon); - } - - public boolean crossesDateline() { - return minX > maxX; - } - - /** Checks if the rectangle contains the provided point **/ - public boolean queryContainsPoint(int x, int y) { - if (this.crossesDateline() == true) { - return bboxContainsPoint(x, y, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY) - || bboxContainsPoint(x, y, this.minX, MAX_LON_ENCODED, this.minY, this.maxY); - } - return bboxContainsPoint(x, y, this.minX, this.maxX, this.minY, this.maxY); - } - - /** compare this to a provided rangle bounding box **/ - public PointValues.Relation relateRangeBBox(int minXOffset, int minYOffset, byte[] minTriangle, - int maxXOffset, int maxYOffset, byte[] maxTriangle) { - PointValues.Relation eastRelation = compareBBoxToRangeBBox(this.bbox, minXOffset, minYOffset, minTriangle, - maxXOffset, maxYOffset, maxTriangle); - if (this.crossesDateline() && eastRelation == PointValues.Relation.CELL_OUTSIDE_QUERY) { - return compareBBoxToRangeBBox(this.west, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle); - } - return eastRelation; - } - - /** Checks if the rectangle intersects the provided triangle **/ - public boolean intersectsTriangle(int aX, int aY, int bX, int bY, int cX, int cY) { - // 1. query contains any triangle points - if (queryContainsPoint(aX, aY) || queryContainsPoint(bX, bY) || queryContainsPoint(cX, cY)) { - return true; - } - - // compute bounding box of triangle - int tMinX = StrictMath.min(StrictMath.min(aX, bX), cX); - int tMaxX = StrictMath.max(StrictMath.max(aX, bX), cX); - int tMinY = StrictMath.min(StrictMath.min(aY, bY), cY); - int tMaxY = StrictMath.max(StrictMath.max(aY, bY), cY); - - // 2. check bounding boxes are disjoint - if (this.crossesDateline() == true) { - if (boxesAreDisjoint(tMinX, tMaxX, tMinY, tMaxY, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY) - && boxesAreDisjoint(tMinX, tMaxX, tMinY, tMaxY, this.minX, MAX_LON_ENCODED, this.minY, this.maxY)) { - return false; - } - } else if (tMaxX < minX || tMinX > maxX || tMinY > maxY || tMaxY < minY) { - return false; - } - - // 3. check triangle contains any query points - if (XTessellator.pointInTriangle(minX, minY, aX, aY, bX, bY, cX, cY)) { - return true; - } else if (XTessellator.pointInTriangle(maxX, minY, aX, aY, bX, bY, cX, cY)) { - return true; - } else if (XTessellator.pointInTriangle(maxX, maxY, aX, aY, bX, bY, cX, cY)) { - return true; - } else if (XTessellator.pointInTriangle(minX, maxY, aX, aY, bX, bY, cX, cY)) { - return true; - } - - // 4. last ditch effort: check crossings - if (queryIntersects(aX, aY, bX, bY, cX, cY)) { - return true; - } - return false; - } - - /** Checks if the rectangle contains the provided triangle **/ - public boolean containsTriangle(int ax, int ay, int bx, int by, int cx, int cy) { - if (this.crossesDateline() == true) { - return bboxContainsTriangle(ax, ay, bx, by, cx, cy, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY) - || bboxContainsTriangle(ax, ay, bx, by, cx, cy, this.minX, MAX_LON_ENCODED, this.minY, this.maxY); - } - return bboxContainsTriangle(ax, ay, bx, by, cx, cy, minX, maxX, minY, maxY); - } - - /** static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection) */ - private static PointValues.Relation compareBBoxToRangeBBox(final byte[] bbox, - int minXOffset, int minYOffset, byte[] minTriangle, - int maxXOffset, int maxYOffset, byte[] maxTriangle) { - // check bounding box (DISJOINT) - if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) > 0 || - FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) < 0 || - FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) > 0 || - FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 0, BYTES) < 0) { - return PointValues.Relation.CELL_OUTSIDE_QUERY; - } - - if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 && - FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 && - FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 0, BYTES) >= 0 && - FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0) { - return PointValues.Relation.CELL_INSIDE_QUERY; - } - return PointValues.Relation.CELL_CROSSES_QUERY; - } - - /** - * encodes a bounding box into the provided byte array - */ - private static void encode(final int minX, final int maxX, final int minY, final int maxY, byte[] b) { - if (b == null) { - b = new byte[4 * XLatLonShape.BYTES]; - } - NumericUtils.intToSortableBytes(minY, b, 0); - NumericUtils.intToSortableBytes(minX, b, BYTES); - NumericUtils.intToSortableBytes(maxY, b, 2 * BYTES); - NumericUtils.intToSortableBytes(maxX, b, 3 * BYTES); - } - - /** returns true if the query intersects the provided triangle (in encoded space) */ - private boolean queryIntersects(int ax, int ay, int bx, int by, int cx, int cy) { - // check each edge of the triangle against the query - if (edgeIntersectsQuery(ax, ay, bx, by) || - edgeIntersectsQuery(bx, by, cx, cy) || - edgeIntersectsQuery(cx, cy, ax, ay)) { - return true; - } - return false; - } - - /** returns true if the edge (defined by (ax, ay) (bx, by)) intersects the query */ - private boolean edgeIntersectsQuery(int ax, int ay, int bx, int by) { - if (this.crossesDateline() == true) { - return edgeIntersectsBox(ax, ay, bx, by, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY) - || edgeIntersectsBox(ax, ay, bx, by, this.minX, MAX_LON_ENCODED, this.minY, this.maxY); - } - return edgeIntersectsBox(ax, ay, bx, by, this.minX, this.maxX, this.minY, this.maxY); - } - - /** static utility method to check if a bounding box contains a point */ - private static boolean bboxContainsPoint(int x, int y, int minX, int maxX, int minY, int maxY) { - return (x < minX || x > maxX || y < minY || y > maxY) == false; - } - - /** static utility method to check if a bounding box contains a triangle */ - private static boolean bboxContainsTriangle(int ax, int ay, int bx, int by, int cx, int cy, - int minX, int maxX, int minY, int maxY) { - return bboxContainsPoint(ax, ay, minX, maxX, minY, maxY) - && bboxContainsPoint(bx, by, minX, maxX, minY, maxY) - && bboxContainsPoint(cx, cy, minX, maxX, minY, maxY); - } - - /** returns true if the edge (defined by (ax, ay) (bx, by)) intersects the query */ - private static boolean edgeIntersectsBox(int ax, int ay, int bx, int by, - int minX, int maxX, int minY, int maxY) { - // shortcut: if edge is a point (occurs w/ Line shapes); simply check bbox w/ point - if (ax == bx && ay == by) { - return Rectangle.containsPoint(ay, ax, minY, maxY, minX, maxX); - } - - // shortcut: check if either of the end points fall inside the box - if (bboxContainsPoint(ax, ay, minX, maxX, minY, maxY) - || bboxContainsPoint(bx, by, minX, maxX, minY, maxY)) { - return true; - } - - // shortcut: check bboxes of edges are disjoint - if (boxesAreDisjoint(Math.min(ax, bx), Math.max(ax, bx), Math.min(ay, by), Math.max(ay, by), - minX, maxX, minY, maxY)) { - return false; - } - - // shortcut: edge is a point - if (ax == bx && ay == by) { - return false; - } - - // top - if (orient(ax, ay, bx, by, minX, maxY) * orient(ax, ay, bx, by, maxX, maxY) <= 0 && - orient(minX, maxY, maxX, maxY, ax, ay) * orient(minX, maxY, maxX, maxY, bx, by) <= 0) { - return true; - } - - // right - if (orient(ax, ay, bx, by, maxX, maxY) * orient(ax, ay, bx, by, maxX, minY) <= 0 && - orient(maxX, maxY, maxX, minY, ax, ay) * orient(maxX, maxY, maxX, minY, bx, by) <= 0) { - return true; - } - - // bottom - if (orient(ax, ay, bx, by, maxX, minY) * orient(ax, ay, bx, by, minX, minY) <= 0 && - orient(maxX, minY, minX, minY, ax, ay) * orient(maxX, minY, minX, minY, bx, by) <= 0) { - return true; - } - - // left - if (orient(ax, ay, bx, by, minX, minY) * orient(ax, ay, bx, by, minX, maxY) <= 0 && - orient(minX, minY, minX, maxY, ax, ay) * orient(minX, minY, minX, maxY, bx, by) <= 0) { - return true; - } - return false; - } - - /** utility method to check if two boxes are disjoint */ - private static boolean boxesAreDisjoint(final int aMinX, final int aMaxX, final int aMinY, final int aMaxY, - final int bMinX, final int bMaxX, final int bMinY, final int bMaxY) { - return (aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY); - } - - @Override - public boolean equals(Object o) { - return Arrays.equals(bbox, ((XRectangle2D)o).bbox) - && Arrays.equals(west, ((XRectangle2D)o).west); - } - - @Override - public int hashCode() { - int hash = super.hashCode(); - hash = 31 * hash + Arrays.hashCode(bbox); - hash = 31 * hash + Arrays.hashCode(west); - return hash; - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder(); - sb.append("Rectangle(lat="); - sb.append(decodeLatitude(minY)); - sb.append(" TO "); - sb.append(decodeLatitude(maxY)); - sb.append(" lon="); - sb.append(decodeLongitude(minX)); - sb.append(" TO "); - sb.append(decodeLongitude(maxX)); - if (maxX < minX) { - sb.append(" [crosses dateline!]"); - } - sb.append(")"); - return sb.toString(); - } -} diff --git a/server/src/main/java/org/apache/lucene/geo/XTessellator.java b/server/src/main/java/org/apache/lucene/geo/XTessellator.java deleted file mode 100644 index 48091439ba98f..0000000000000 --- a/server/src/main/java/org/apache/lucene/geo/XTessellator.java +++ /dev/null @@ -1,888 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.lucene.geo; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.lucene.geo.GeoUtils.WindingOrder; -import org.apache.lucene.util.BitUtil; - -import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude; -import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude; -import static org.apache.lucene.geo.GeoUtils.orient; - -/** - * Computes a triangular mesh tessellation for a given polygon. - *

- * This is inspired by mapbox's earcut algorithm (https://github.com/mapbox/earcut) - * which is a modification to FIST (https://www.cosy.sbg.ac.at/~held/projects/triang/triang.html) - * written by Martin Held, and ear clipping (https://www.geometrictools.com/Documentation/TriangulationByEarClipping.pdf) - * written by David Eberly. - *

- * Notes: - *

    - *
  • Requires valid polygons: - *
      - *
    • No self intersections - *
    • Holes may only touch at one vertex - *
    • Polygon must have an area (e.g., no "line" boxes) - *
    • sensitive to overflow (e.g, subatomic values such as E-200 can cause unexpected behavior) - *
    - *
- *

- * The code is a modified version of the javascript implementation provided by MapBox - * under the following license: - *

- * ISC License - *

- * Copyright (c) 2016, Mapbox - *

- * Permission to use, copy, modify, and/or distribute this software for any purpose - * with or without fee is hereby granted, provided that the above copyright notice - * and this permission notice appear in all copies. - *

- * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH' - * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND - * FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, - * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS - * OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF - * THIS SOFTWARE. - * - */ -public final class XTessellator { - // this is a dumb heuristic to control whether we cut over to sorted morton values - private static final int VERTEX_THRESHOLD = 80; - - /** state of the tessellated split - avoids recursion */ - private enum State { - INIT, CURE, SPLIT - } - - // No Instance: - private XTessellator() {} - - /** Produces an array of vertices representing the triangulated result set of the Points array */ - public static List tessellate(final Polygon polygon) { - // Attempt to establish a doubly-linked list of the provided shell points (should be CCW, but this will correct); - // then filter instances of intersections. - Node outerNode = createDoublyLinkedList(polygon, 0, WindingOrder.CW); - // If an outer node hasn't been detected, the shape is malformed. (must comply with OGC SFA specification) - if(outerNode == null) { - throw new IllegalArgumentException("Malformed shape detected in XTessellator!"); - } - - // Determine if the specified list of points contains holes - if (polygon.numHoles() > 0) { - // Eliminate the hole triangulation. - outerNode = eliminateHoles(polygon, outerNode); - } - - // If the shape crosses VERTEX_THRESHOLD, use z-order curve hashing: - final boolean mortonOptimized; - { - int threshold = VERTEX_THRESHOLD - polygon.numPoints(); - for (int i = 0; threshold >= 0 && i < polygon.numHoles(); ++i) { - threshold -= polygon.getHole(i).numPoints(); - } - - // Link polygon nodes in Z-Order - mortonOptimized = threshold < 0; - if (mortonOptimized == true) { - sortByMorton(outerNode); - } - } - // Calculate the tessellation using the doubly LinkedList. - List result = earcutLinkedList(outerNode, new ArrayList<>(), State.INIT, mortonOptimized); - if (result.size() == 0) { - throw new IllegalArgumentException("Unable to Tessellate shape [" + polygon + "]. Possible malformed shape detected."); - } - - return result; - } - - /** Creates a circular doubly linked list using polygon points. The order is governed by the specified winding order */ - private static Node createDoublyLinkedList(final Polygon polygon, int startIndex, final WindingOrder windingOrder) { - Node lastNode = null; - // Link points into the circular doubly-linked list in the specified winding order - if (windingOrder == polygon.getWindingOrder()) { - for (int i = 0; i < polygon.numPoints(); ++i) { - lastNode = insertNode(polygon, startIndex++, i, lastNode); - } - } else { - for (int i = polygon.numPoints() - 1; i >= 0; --i) { - lastNode = insertNode(polygon, startIndex++, i, lastNode); - } - } - // if first and last node are the same then remove the end node and set lastNode to the start - if (lastNode != null && isVertexEquals(lastNode, lastNode.next)) { - removeNode(lastNode); - lastNode = lastNode.next; - } - - // Return the last node in the Doubly-Linked List - return filterPoints(lastNode, null); - } - - /** Links every hole into the outer loop, producing a single-ring polygon without holes. **/ - private static Node eliminateHoles(final Polygon polygon, Node outerNode) { - // Define a list to hole a reference to each filtered hole list. - final List holeList = new ArrayList<>(); - // Iterate through each array of hole vertices. - Polygon[] holes = polygon.getHoles(); - int nodeIndex = polygon.numPoints(); - for(int i = 0; i < polygon.numHoles(); ++i) { - // create the doubly-linked hole list - Node list = createDoublyLinkedList(holes[i], nodeIndex, WindingOrder.CCW); - if (list == list.next) { - list.isSteiner = true; - } - // Determine if the resulting hole polygon was successful. - if(list != null) { - // Add the leftmost vertex of the hole. - holeList.add(fetchLeftmost(list)); - } - nodeIndex += holes[i].numPoints(); - } - - // Sort the hole vertices by x coordinate - holeList.sort((Node pNodeA, Node pNodeB) -> - pNodeA.getX() < pNodeB.getX() ? -1 : pNodeA.getX() == pNodeB.getX() ? 0 : 1); - - // Process holes from left to right. - for(int i = 0; i < holeList.size(); ++i) { - // Eliminate hole triangles from the result set - final Node holeNode = holeList.get(i); - eliminateHole(holeNode, outerNode); - // Filter the new polygon. - outerNode = filterPoints(outerNode, outerNode.next); - } - // Return a pointer to the list. - return outerNode; - } - - /** Finds a bridge between vertices that connects a hole with an outer ring, and links it */ - private static void eliminateHole(final Node holeNode, Node outerNode) { - // Attempt to find a logical bridge between the HoleNode and OuterNode. - outerNode = fetchHoleBridge(holeNode, outerNode); - // Determine whether a hole bridge could be fetched. - if(outerNode != null) { - // Split the resulting polygon. - Node node = splitPolygon(outerNode, holeNode); - // Filter the split nodes. - filterPoints(node, node.next); - } - } - - /** - * David Eberly's algorithm for finding a bridge between a hole and outer polygon - * - * see: http://www.geometrictools.com/Documentation/TriangulationByEarClipping.pdf - **/ - private static Node fetchHoleBridge(final Node holeNode, final Node outerNode) { - Node p = outerNode; - double qx = Double.NEGATIVE_INFINITY; - final double hx = holeNode.getX(); - final double hy = holeNode.getY(); - Node connection = null; - // 1. find a segment intersected by a ray from the hole's leftmost point to the left; - // segment's endpoint with lesser x will be potential connection point - { - do { - if (hy <= p.getY() && hy >= p.next.getY() && p.next.getY() != p.getY()) { - final double x = p.getX() + (hy - p.getY()) * (p.next.getX() - p.getX()) / (p.next.getY() - p.getY()); - if (x <= hx && x > qx) { - qx = x; - if (x == hx) { - if (hy == p.getY()) return p; - if (hy == p.next.getY()) return p.next; - } - connection = p.getX() < p.next.getX() ? p : p.next; - } - } - p = p.next; - } while (p != outerNode); - } - - if (connection == null) { - return null; - } else if (hx == qx) { - return connection.previous; - } - - // 2. look for points inside the triangle of hole point, segment intersection, and endpoint - // its a valid connection iff there are no points found; - // otherwise choose the point of the minimum angle with the ray as the connection point - Node stop = connection; - final double mx = connection.getX(); - final double my = connection.getY(); - double tanMin = Double.POSITIVE_INFINITY; - double tan; - p = connection.next; - { - while (p != stop) { - if (hx >= p.getX() && p.getX() >= mx && hx != p.getX() - && pointInEar(p.getX(), p.getY(), hy < my ? hx : qx, hy, mx, my, hy < my ? qx : hx, hy)) { - tan = Math.abs(hy - p.getY()) / (hx - p.getX()); // tangential - if ((tan < tanMin || (tan == tanMin && p.getX() > connection.getX())) && isLocallyInside(p, holeNode)) { - connection = p; - tanMin = tan; - } - } - p = p.next; - } - } - - return connection; - } - - /** Finds the left-most hole of a polygon ring. **/ - private static Node fetchLeftmost(final Node start) { - Node node = start; - Node leftMost = start; - do { - // Determine if the current node possesses a lesser X position. - if (node.getX() < leftMost.getX()) { - // Maintain a reference to this Node. - leftMost = node; - } - // Progress the search to the next node in the doubly-linked list. - node = node.next; - } while (node != start); - - // Return the node with the smallest X value. - return leftMost; - } - - /** Main ear slicing loop which triangulates the vertices of a polygon, provided as a doubly-linked list. **/ - private static List earcutLinkedList(Node currEar, final List tessellation, - State state, final boolean mortonOptimized) { - earcut : do { - if (currEar == null || currEar.previous == currEar.next) { - return tessellation; - } - - Node stop = currEar; - Node prevNode; - Node nextNode; - - // Iteratively slice ears - do { - prevNode = currEar.previous; - nextNode = currEar.next; - // Determine whether the current triangle must be cut off. - final boolean isReflex = area(prevNode.getX(), prevNode.getY(), currEar.getX(), currEar.getY(), - nextNode.getX(), nextNode.getY()) >= 0; - if (isReflex == false && isEar(currEar, mortonOptimized) == true) { - // Return the triangulated data - tessellation.add(new Triangle(prevNode, currEar, nextNode)); - // Remove the ear node. - removeNode(currEar); - - // Skipping to the next node leaves fewer slither triangles. - currEar = nextNode.next; - stop = nextNode.next; - continue; - } - currEar = nextNode; - - // If the whole polygon has been iterated over and no more ears can be found. - if (currEar == stop) { - switch (state) { - case INIT: - // try filtering points and slicing again - currEar = filterPoints(currEar, null); - state = State.CURE; - continue earcut; - case CURE: - // if this didn't work, try curing all small self-intersections locally - currEar = cureLocalIntersections(currEar, tessellation); - state = State.SPLIT; - continue earcut; - case SPLIT: - // as a last resort, try splitting the remaining polygon into two - if (splitEarcut(currEar, tessellation, mortonOptimized) == false) { - //we could not process all points. Tessellation failed - tessellation.clear(); - } - break; - } - break; - } - } while (currEar.previous != currEar.next); - break; - } while (true); - // Return the calculated tessellation - return tessellation; - } - - /** Determines whether a polygon node forms a valid ear with adjacent nodes. **/ - private static boolean isEar(final Node ear, final boolean mortonOptimized) { - if (mortonOptimized == true) { - return mortonIsEar(ear); - } - - // make sure there aren't other points inside the potential ear - Node node = ear.next.next; - while (node != ear.previous) { - if (pointInEar(node.getX(), node.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(), - ear.next.getX(), ear.next.getY()) - && area(node.previous.getX(), node.previous.getY(), node.getX(), node.getY(), - node.next.getX(), node.next.getY()) >= 0) { - return false; - } - node = node.next; - } - return true; - } - - /** Uses morton code for speed to determine whether or a polygon node forms a valid ear w/ adjacent nodes */ - private static boolean mortonIsEar(final Node ear) { - // triangle bbox (flip the bits so negative encoded values are < positive encoded values) - int minTX = StrictMath.min(StrictMath.min(ear.previous.x, ear.x), ear.next.x) ^ 0x80000000; - int minTY = StrictMath.min(StrictMath.min(ear.previous.y, ear.y), ear.next.y) ^ 0x80000000; - int maxTX = StrictMath.max(StrictMath.max(ear.previous.x, ear.x), ear.next.x) ^ 0x80000000; - int maxTY = StrictMath.max(StrictMath.max(ear.previous.y, ear.y), ear.next.y) ^ 0x80000000; - - // z-order range for the current triangle bbox; - long minZ = BitUtil.interleave(minTX, minTY); - long maxZ = BitUtil.interleave(maxTX, maxTY); - - // now make sure we don't have other points inside the potential ear; - - // look for points inside the triangle in both directions - Node p = ear.previousZ; - Node n = ear.nextZ; - while (p != null && Long.compareUnsigned(p.morton, minZ) >= 0 - && n != null && Long.compareUnsigned(n.morton, maxZ) <= 0) { - if (p.idx != ear.previous.idx && p.idx != ear.next.idx && - pointInEar(p.getX(), p.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(), - ear.next.getX(), ear.next.getY()) && - area(p.previous.getX(), p.previous.getY(), p.getX(), p.getY(), p.next.getX(), p.next.getY()) >= 0) return false; - p = p.previousZ; - - if (n.idx != ear.previous.idx && n.idx != ear.next.idx && - pointInEar(n.getX(), n.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(), - ear.next.getX(), ear.next.getY()) && - area(n.previous.getX(), n.previous.getY(), n.getX(), n.getY(), n.next.getX(), n.next.getY()) >= 0) return false; - n = n.nextZ; - } - - // first look for points inside the triangle in decreasing z-order - while (p != null && Long.compareUnsigned(p.morton, minZ) >= 0) { - if (p.idx != ear.previous.idx && p.idx != ear.next.idx - && pointInEar(p.getX(), p.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(), - ear.next.getX(), ear.next.getY()) - && area(p.previous.getX(), p.previous.getY(), p.getX(), p.getY(), p.next.getX(), p.next.getY()) >= 0) { - return false; - } - p = p.previousZ; - } - // then look for points in increasing z-order - while (n != null && - Long.compareUnsigned(n.morton, maxZ) <= 0) { - if (n.idx != ear.previous.idx && n.idx != ear.next.idx - && pointInEar(n.getX(), n.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(), - ear.next.getX(), ear.next.getY()) - && area(n.previous.getX(), n.previous.getY(), n.getX(), n.getY(), n.next.getX(), n.next.getY()) >= 0) { - return false; - } - n = n.nextZ; - } - return true; - } - - /** Iterate through all polygon nodes and remove small local self-intersections **/ - private static Node cureLocalIntersections(Node startNode, final List tessellation) { - Node node = startNode; - Node nextNode; - do { - nextNode = node.next; - Node a = node.previous; - Node b = nextNode.next; - - // a self-intersection where edge (v[i-1],v[i]) intersects (v[i+1],v[i+2]) - if (isVertexEquals(a, b) == false - && isIntersectingPolygon(a, a.getX(), a.getY(), b.getX(), b.getY()) == false - && linesIntersect(a.getX(), a.getY(), node.getX(), node.getY(), nextNode.getX(), nextNode.getY(), b.getX(), b.getY()) - && isLocallyInside(a, b) && isLocallyInside(b, a)) { - // Return the triangulated vertices to the tessellation - tessellation.add(new Triangle(a, node, b)); - - // remove two nodes involved - removeNode(node); - removeNode(node.next); - node = startNode = b; - } - node = node.next; - } while (node != startNode); - - return node; - } - - /** Attempt to split a polygon and independently triangulate each side. Return true if the polygon was splitted **/ - private static boolean splitEarcut(final Node start, final List tessellation, final boolean mortonIndexed) { - // Search for a valid diagonal that divides the polygon into two. - Node searchNode = start; - Node nextNode; - do { - nextNode = searchNode.next; - Node diagonal = nextNode.next; - while (diagonal != searchNode.previous) { - if(isValidDiagonal(searchNode, diagonal)) { - // Split the polygon into two at the point of the diagonal - Node splitNode = splitPolygon(searchNode, diagonal); - // Filter the resulting polygon. - searchNode = filterPoints(searchNode, searchNode.next); - splitNode = filterPoints(splitNode, splitNode.next); - // Attempt to earcut both of the resulting polygons - if (mortonIndexed) { - sortByMortonWithReset(searchNode); - sortByMortonWithReset(splitNode); - } - earcutLinkedList(searchNode, tessellation, State.INIT, mortonIndexed); - earcutLinkedList(splitNode, tessellation, State.INIT, mortonIndexed); - // Finish the iterative search - return true; - } - diagonal = diagonal.next; - } - searchNode = searchNode.next; - } while (searchNode != start); - return false; - } - - /** Links two polygon vertices using a bridge. **/ - private static Node splitPolygon(final Node a, final Node b) { - final Node a2 = new Node(a); - final Node b2 = new Node(b); - final Node an = a.next; - final Node bp = b.previous; - - a.next = b; - a.nextZ = b; - b.previous = a; - b.previousZ = a; - a2.next = an; - a2.nextZ = an; - an.previous = a2; - an.previousZ = a2; - b2.next = a2; - b2.nextZ = a2; - a2.previous = b2; - a2.previousZ = b2; - bp.next = b2; - bp.nextZ = b2; - - return b2; - } - - /** Determines whether a diagonal between two polygon nodes lies within a polygon interior. - * (This determines the validity of the ray.) **/ - private static boolean isValidDiagonal(final Node a, final Node b) { - return a.next.idx != b.idx && a.previous.idx != b.idx - && isIntersectingPolygon(a, a.getX(), a.getY(), b.getX(), b.getY()) == false - && isLocallyInside(a, b) && isLocallyInside(b, a) - && middleInsert(a, a.getX(), a.getY(), b.getX(), b.getY()); - } - - private static boolean isLocallyInside(final Node a, final Node b) { - // if a is cw - if (area(a.previous.getX(), a.previous.getY(), a.getX(), a.getY(), a.next.getX(), a.next.getY()) < 0) { - return area(a.getX(), a.getY(), b.getX(), b.getY(), a.next.getX(), a.next.getY()) >= 0 - && area(a.getX(), a.getY(), a.previous.getX(), a.previous.getY(), b.getX(), b.getY()) >= 0; - } - // ccw - return area(a.getX(), a.getY(), b.getX(), b.getY(), a.previous.getX(), a.previous.getY()) < 0 - || area(a.getX(), a.getY(), a.next.getX(), a.next.getY(), b.getX(), b.getY()) < 0; - } - - /** Determine whether the middle point of a polygon diagonal is contained within the polygon */ - private static boolean middleInsert(final Node start, final double x0, final double y0, - final double x1, final double y1) { - Node node = start; - Node nextNode; - boolean lIsInside = false; - final double lDx = (x0 + x1) / 2.0f; - final double lDy = (y0 + y1) / 2.0f; - do { - nextNode = node.next; - if (node.getY() > lDy != nextNode.getY() > lDy && - lDx < (nextNode.getX() - node.getX()) * (lDy - node.getY()) / (nextNode.getY() - node.getY()) + node.getX()) { - lIsInside = !lIsInside; - } - node = node.next; - } while (node != start); - return lIsInside; - } - - /** Determines if the diagonal of a polygon is intersecting with any polygon elements. **/ - private static boolean isIntersectingPolygon(final Node start, final double x0, final double y0, - final double x1, final double y1) { - Node node = start; - Node nextNode; - do { - nextNode = node.next; - if(isVertexEquals(node, x0, y0) == false && isVertexEquals(node, x1, y1) == false) { - if (linesIntersect(node.getX(), node.getY(), nextNode.getX(), nextNode.getY(), x0, y0, x1, y1)) { - return true; - } - } - node = nextNode; - } while (node != start); - - return false; - } - - /** Determines whether two line segments intersect. **/ - public static boolean linesIntersect(final double aX0, final double aY0, final double aX1, final double aY1, - final double bX0, final double bY0, final double bX1, final double bY1) { - return (area(aX0, aY0, aX1, aY1, bX0, bY0) > 0) != (area(aX0, aY0, aX1, aY1, bX1, bY1) > 0) - && (area(bX0, bY0, bX1, bY1, aX0, aY0) > 0) != (area(bX0, bY0, bX1, bY1, aX1, aY1) > 0); - } - - /** Interlinks polygon nodes in Z-Order. It reset the values on the z values**/ - private static void sortByMortonWithReset(Node start) { - Node next = start; - do { - next.previousZ = next.previous; - next.nextZ = next.next; - next = next.next; - } while (next != start); - sortByMorton(start); - } - - /** Interlinks polygon nodes in Z-Order. **/ - private static void sortByMorton(Node start) { - start.previousZ.nextZ = null; - start.previousZ = null; - // Sort the generated ring using Z ordering. - tathamSort(start); - } - - /** - * Simon Tatham's doubly-linked list O(n log n) mergesort - * see: http://www.chiark.greenend.org.uk/~sgtatham/algorithms/listsort.html - **/ - private static void tathamSort(Node list) { - Node p, q, e, tail; - int i, numMerges, pSize, qSize; - int inSize = 1; - - if (list == null) { - return; - } - - do { - p = list; - list = null; - tail = null; - // count number of merges in this pass - numMerges = 0; - - while(p != null) { - ++numMerges; - // step 'insize' places along from p - q = p; - for (i = 0, pSize = 0; i < inSize && q != null; ++i, ++pSize, q = q.nextZ); - // if q hasn't fallen off end, we have two lists to merge - qSize = inSize; - - // now we have two lists; merge - while (pSize > 0 || (qSize > 0 && q != null)) { - if (pSize != 0 && (qSize == 0 || q == null || Long.compareUnsigned(p.morton, q.morton) <= 0)) { - e = p; - p = p.nextZ; - --pSize; - } else { - e = q; - q = q.nextZ; - --qSize; - } - - if (tail != null) { - tail.nextZ = e; - } else { - list = e; - } - // maintain reverse pointers - e.previousZ = tail; - tail = e; - } - // now p has stepped 'insize' places along, and q has too - p = q; - } - - tail.nextZ = null; - inSize *= 2; - } while (numMerges > 1); - } - - /** Eliminate colinear/duplicate points from the doubly linked list */ - private static Node filterPoints(final Node start, Node end) { - if (start == null) { - return start; - } - - if(end == null) { - end = start; - } - - Node node = start; - Node nextNode; - Node prevNode; - boolean continueIteration; - - do { - continueIteration = false; - nextNode = node.next; - prevNode = node.previous; - if (node.isSteiner == false && isVertexEquals(node, nextNode) - || area(prevNode.getX(), prevNode.getY(), node.getX(), node.getY(), nextNode.getX(), nextNode.getY()) == 0) { - // Remove the node - removeNode(node); - node = end = prevNode; - - if (node == nextNode) { - break; - } - continueIteration = true; - } else { - node = nextNode; - } - } while (continueIteration || node != end); - return end; - } - - /** Creates a node and optionally links it with a previous node in a circular doubly-linked list */ - private static Node insertNode(final Polygon polygon, int index, int vertexIndex, final Node lastNode) { - final Node node = new Node(polygon, index, vertexIndex); - if(lastNode == null) { - node.previous = node; - node.previousZ = node; - node.next = node; - node.nextZ = node; - } else { - node.next = lastNode.next; - node.nextZ = lastNode.next; - node.previous = lastNode; - node.previousZ = lastNode; - lastNode.next.previous = node; - lastNode.nextZ.previousZ = node; - lastNode.next = node; - lastNode.nextZ = node; - } - return node; - } - - /** Removes a node from the doubly linked list */ - private static void removeNode(Node node) { - node.next.previous = node.previous; - node.previous.next = node.next; - - if (node.previousZ != null) { - node.previousZ.nextZ = node.nextZ; - } - if (node.nextZ != null) { - node.nextZ.previousZ = node.previousZ; - } - } - - /** Determines if two point vertices are equal. **/ - private static boolean isVertexEquals(final Node a, final Node b) { - return isVertexEquals(a, b.getX(), b.getY()); - } - - /** Determines if two point vertices are equal. **/ - private static boolean isVertexEquals(final Node a, final double x, final double y) { - return a.getX() == x && a.getY() == y; - } - - /** Compute signed area of triangle */ - private static double area(final double aX, final double aY, final double bX, final double bY, - final double cX, final double cY) { - return (bY - aY) * (cX - bX) - (bX - aX) * (cY - bY); - } - - /** Compute whether point is in a candidate ear */ - private static boolean pointInEar(final double x, final double y, final double ax, final double ay, - final double bx, final double by, final double cx, final double cy) { - return (cx - x) * (ay - y) - (ax - x) * (cy - y) >= 0 && - (ax - x) * (by - y) - (bx - x) * (ay - y) >= 0 && - (bx - x) * (cy - y) - (cx - x) * (by - y) >= 0; - } - - /** compute whether the given x, y point is in a triangle; uses the winding order method */ - public static boolean pointInTriangle (double x, double y, double ax, double ay, double bx, double by, double cx, double cy) { - int a = orient(x, y, ax, ay, bx, by); - int b = orient(x, y, bx, by, cx, cy); - if (a == 0 || b == 0 || a < 0 == b < 0) { - int c = orient(x, y, cx, cy, ax, ay); - return c == 0 || (c < 0 == (b < 0 || a < 0)); - } - return false; - } - - /** Brute force compute if a point is in the polygon by traversing entire triangulation - * todo: speed this up using either binary tree or prefix coding (filtering by bounding box of triangle) - **/ - public static boolean pointInPolygon(final List tessellation, double lat, double lon) { - // each triangle - for (int i = 0; i < tessellation.size(); ++i) { - if (tessellation.get(i).containsPoint(lat, lon)) { - return true; - } - } - return false; - } - - /** Circular Doubly-linked list used for polygon coordinates */ - protected static class Node { - // node index in the linked list - private final int idx; - // vertex index in the polygon - private final int vrtxIdx; - // reference to the polygon for lat/lon values - private final Polygon polygon; - // encoded x value - private final int x; - // encoded y value - private final int y; - // morton code for sorting - private final long morton; - - // previous node - private Node previous; - // next node - private Node next; - // previous z node - private Node previousZ; - // next z node - private Node nextZ; - // triangle center - private boolean isSteiner = false; - - protected Node(final Polygon polygon, final int index, final int vertexIndex) { - this.idx = index; - this.vrtxIdx = vertexIndex; - this.polygon = polygon; - this.y = encodeLatitude(polygon.getPolyLat(vrtxIdx)); - this.x = encodeLongitude(polygon.getPolyLon(vrtxIdx)); - this.morton = BitUtil.interleave(x ^ 0x80000000, y ^ 0x80000000); - this.previous = null; - this.next = null; - this.previousZ = null; - this.nextZ = null; - } - - /** simple deep copy constructor */ - protected Node(Node other) { - this.idx = other.idx; - this.vrtxIdx = other.vrtxIdx; - this.polygon = other.polygon; - this.morton = other.morton; - this.x = other.x; - this.y = other.y; - this.previous = other.previous; - this.next = other.next; - this.previousZ = other.previousZ; - this.nextZ = other.nextZ; - this.isSteiner = other.isSteiner; - } - - /** get the x value */ - public final double getX() { - return polygon.getPolyLon(vrtxIdx); - } - - /** get the y value */ - public final double getY() { - return polygon.getPolyLat(vrtxIdx); - } - - /** get the longitude value */ - public final double getLon() { - return polygon.getPolyLon(vrtxIdx); - } - - /** get the latitude value */ - public final double getLat() { - return polygon.getPolyLat(vrtxIdx); - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - if (this.previous == null) - builder.append("||-"); - else - builder.append(this.previous.idx + " <- "); - builder.append(this.idx); - if (this.next == null) - builder.append(" -||"); - else - builder.append(" -> " + this.next.idx); - return builder.toString(); - } - } - - /** Triangle in the tessellated mesh */ - public static final class Triangle { - Node[] vertex; - - protected Triangle(Node a, Node b, Node c) { - this.vertex = new Node[] {a, b, c}; - } - - /** get quantized x value for the given vertex */ - public int getEncodedX(int vertex) { - return this.vertex[vertex].x; - } - - /** get quantized y value for the given vertex */ - public int getEncodedY(int vertex) { - return this.vertex[vertex].y; - } - - /** get latitude value for the given vertex */ - public double getLat(int vertex) { - return this.vertex[vertex].getLat(); - } - - /** get longitude value for the given vertex */ - public double getLon(int vertex) { - return this.vertex[vertex].getLon(); - } - - /** utility method to compute whether the point is in the triangle */ - protected boolean containsPoint(double lat, double lon) { - return pointInTriangle(lon, lat, - vertex[0].getLon(), vertex[0].getLat(), - vertex[1].getLon(), vertex[1].getLat(), - vertex[2].getLon(), vertex[2].getLat()); - } - - /** pretty print the triangle vertices */ - public String toString() { - String result = vertex[0].x + ", " + vertex[0].y + " " + - vertex[1].x + ", " + vertex[1].y + " " + - vertex[2].x + ", " + vertex[2].y; - return result; - } - } -} diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index f639f139b55fc..07bc519346f7c 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -136,4 +136,48 @@ static void onFailure(Iterable> listeners, E } ExceptionsHelper.maybeThrowRuntimeAndSuppress(exceptionList); } + + /** + * Wraps a given listener and returns a new listener which executes the provided {@code runAfter} + * callback when the listener is notified via either {@code #onResponse} or {@code #onFailure}. + */ + static ActionListener runAfter(ActionListener delegate, Runnable runAfter) { + return new ActionListener() { + @Override + public void onResponse(Response response) { + try { + delegate.onResponse(response); + } finally { + runAfter.run(); + } + } + + @Override + public void onFailure(Exception e) { + try { + delegate.onFailure(e); + } finally { + runAfter.run(); + } + } + }; + } + + /** + * Wraps a given listener and returns a new listener which makes sure {@link #onResponse(Object)} + * and {@link #onFailure(Exception)} of the provided listener will be called at most once. + */ + static ActionListener notifyOnce(ActionListener delegate) { + return new NotifyOnceListener() { + @Override + protected void innerOnResponse(Response response) { + delegate.onResponse(response); + } + + @Override + protected void innerOnFailure(Exception e) { + delegate.onFailure(e); + } + }; + } } diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java index e2d01aad230bd..fdf62e951a517 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java @@ -60,6 +60,15 @@ public interface DocWriteRequest extends IndicesRequest { */ String type(); + /** + * Set the default type supplied to a bulk + * request if this individual request's type is null + * or empty + * @return the Request + */ + T defaultTypeIfNull(String defaultType); + + /** * Get the id of the document for this request * @return the id diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/bootstrap/GetDiscoveredNodesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/bootstrap/GetDiscoveredNodesRequest.java index 28611869475d1..f91a4de5263be 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/bootstrap/GetDiscoveredNodesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/bootstrap/GetDiscoveredNodesRequest.java @@ -35,8 +35,6 @@ */ public class GetDiscoveredNodesRequest extends ActionRequest { - private int waitForNodes = 1; - @Nullable // if the request should wait indefinitely private TimeValue timeout = TimeValue.timeValueSeconds(30); @@ -47,35 +45,10 @@ public GetDiscoveredNodesRequest() { public GetDiscoveredNodesRequest(StreamInput in) throws IOException { super(in); - waitForNodes = in.readInt(); timeout = in.readOptionalTimeValue(); requiredNodes = in.readList(StreamInput::readString); } - /** - * Sometimes it is useful only to receive a successful response after discovering a certain number of master-eligible nodes. This - * parameter controls this behaviour. - * - * @param waitForNodes the minimum number of nodes to have discovered before this request will receive a successful response. Must - * be at least 1, because we always discover the local node. - */ - public void setWaitForNodes(int waitForNodes) { - if (waitForNodes < 1) { - throw new IllegalArgumentException("always finds at least one node, waiting for [" + waitForNodes + "] is not allowed"); - } - this.waitForNodes = waitForNodes; - } - - /** - * Sometimes it is useful only to receive a successful response after discovering a certain number of master-eligible nodes. This - * parameter controls this behaviour. - * - * @return the minimum number of nodes to have discovered before this request will receive a successful response. - */ - public int getWaitForNodes() { - return waitForNodes; - } - /** * Sometimes it is useful to wait until enough nodes have been discovered, rather than failing immediately. This parameter controls how * long to wait, and defaults to 30s. @@ -133,7 +106,6 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeInt(waitForNodes); out.writeOptionalTimeValue(timeout); out.writeStringList(requiredNodes); } @@ -141,8 +113,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String toString() { return "GetDiscoveredNodesRequest{" + - "waitForNodes=" + waitForNodes + - ", timeout=" + timeout + + "timeout=" + timeout + ", requiredNodes=" + requiredNodes + "}"; } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportGetDiscoveredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportGetDiscoveredNodesAction.java index c88454b63559c..6f6336c3bd5f3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportGetDiscoveredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportGetDiscoveredNodesAction.java @@ -152,10 +152,6 @@ private static boolean matchesRequirement(DiscoveryNode discoveryNode, String re } private static boolean checkWaitRequirements(GetDiscoveredNodesRequest request, Set nodes) { - if (nodes.size() < request.getWaitForNodes()) { - return false; - } - List requirements = request.getRequiredNodes(); final Set selectedNodes = new HashSet<>(); for (final String requirement : requirements) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index 061ec41039b43..d5cc35b2205ac 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -21,7 +21,12 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; @@ -29,14 +34,26 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; +import org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.ImmutableOpenIntMap; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + public class TransportClusterRerouteAction extends TransportMasterNodeAction { private final AllocationService allocationService; @@ -69,18 +86,71 @@ protected ClusterRerouteResponse newResponse() { @Override protected void masterOperation(final ClusterRerouteRequest request, final ClusterState state, final ActionListener listener) { - ActionListener logWrapper = ActionListener.wrap( - response -> { - if (request.dryRun() == false) { - response.getExplanations().getYesDecisionMessages().forEach(logger::info); - } - listener.onResponse(response); - }, - listener::onFailure - ); - - clusterService.submitStateUpdateTask("cluster_reroute (api)", new ClusterRerouteResponseAckedClusterStateUpdateTask(logger, - allocationService, request, logWrapper)); + Map> stalePrimaryAllocations = new HashMap<>(); + for (AllocationCommand command : request.getCommands().commands()) { + if (command instanceof AllocateStalePrimaryAllocationCommand) { + final AllocateStalePrimaryAllocationCommand cmd = (AllocateStalePrimaryAllocationCommand) command; + stalePrimaryAllocations.computeIfAbsent(cmd.index(), k -> new ArrayList<>()).add(cmd); + } + } + if (stalePrimaryAllocations.isEmpty()) { + submitStateUpdate(request, listener); + } else { + verifyThenSubmitUpdate(request, listener, stalePrimaryAllocations); + } + } + + private void verifyThenSubmitUpdate(ClusterRerouteRequest request, ActionListener listener, + Map> stalePrimaryAllocations) { + transportService.sendRequest(transportService.getLocalNode(), IndicesShardStoresAction.NAME, + new IndicesShardStoresRequest().indices(stalePrimaryAllocations.keySet().toArray(Strings.EMPTY_ARRAY)), + new ActionListenerResponseHandler<>( + ActionListener.wrap( + response -> { + ImmutableOpenMap>> status = + response.getStoreStatuses(); + Exception e = null; + for (Map.Entry> entry : stalePrimaryAllocations.entrySet()) { + final String index = entry.getKey(); + final ImmutableOpenIntMap> indexStatus = status.get(index); + assert indexStatus != null; + for (AbstractAllocateAllocationCommand command : entry.getValue()) { + final List shardStatus = + indexStatus.get(command.shardId()); + if (shardStatus == null || shardStatus.isEmpty()) { + e = ExceptionsHelper.useOrSuppress(e, new IllegalArgumentException( + "No data for shard [" + command.shardId() + "] of index [" + index + "] found on any node") + ); + } else if (shardStatus.stream().noneMatch(storeStatus -> { + final DiscoveryNode node = storeStatus.getNode(); + final String nodeInCommand = command.node(); + return nodeInCommand.equals(node.getName()) || nodeInCommand.equals(node.getId()); + })) { + e = ExceptionsHelper.useOrSuppress(e, new IllegalArgumentException( + "No data for shard [" + command.shardId() + "] of index [" + index + "] found on node [" + + command.node() + ']')); + } + } + } + if (e == null) { + submitStateUpdate(request, listener); + } else { + listener.onFailure(e); + } + }, listener::onFailure + ), IndicesShardStoresResponse::new)); + } + + private void submitStateUpdate(final ClusterRerouteRequest request, final ActionListener listener) { + clusterService.submitStateUpdateTask("cluster_reroute (api)", + new ClusterRerouteResponseAckedClusterStateUpdateTask(logger, allocationService, request, + ActionListener.wrap( + response -> { + if (request.dryRun() == false) { + response.getExplanations().getYesDecisionMessages().forEach(logger::info); + } + listener.onResponse(response); + }, listener::onFailure))); } static class ClusterRerouteResponseAckedClusterStateUpdateTask extends AckedClusterStateUpdateTask { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java index 834e238e4a0d3..5159f334250a6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java @@ -189,7 +189,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws stage = SnapshotIndexShardStage.valueOf(rawStage); } catch (IllegalArgumentException iae) { throw new ElasticsearchParseException( - "failed to parse snapshot index shard status [{}][{}], unknonwn stage [{}]", + "failed to parse snapshot index shard status [{}][{}], unknown stage [{}]", shard.getIndex().getName(), shard.getId(), rawStage); } return new SnapshotIndexShardStatus(shard, stage, stats, nodeId, failure); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexClusterStateUpdateRequest.java index ba5cc2ab00eaa..bb0f98ac07b7e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexClusterStateUpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexClusterStateUpdateRequest.java @@ -25,7 +25,13 @@ */ public class CloseIndexClusterStateUpdateRequest extends IndicesClusterStateUpdateRequest { - CloseIndexClusterStateUpdateRequest() { + private final long taskId; + public CloseIndexClusterStateUpdateRequest(final long taskId) { + this.taskId = taskId; + } + + public long taskId() { + return taskId; } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index fb5fdf536a248..bb3db084b0c53 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -100,24 +99,32 @@ protected ClusterBlockException checkBlock(CloseIndexRequest request, ClusterSta @Override protected void masterOperation(final CloseIndexRequest request, final ClusterState state, final ActionListener listener) { + throw new UnsupportedOperationException("The task parameter is required"); + } + + @Override + protected void masterOperation(final Task task, final CloseIndexRequest request, final ClusterState state, + final ActionListener listener) throws Exception { final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); if (concreteIndices == null || concreteIndices.length == 0) { listener.onResponse(new AcknowledgedResponse(true)); return; } - CloseIndexClusterStateUpdateRequest updateRequest = new CloseIndexClusterStateUpdateRequest() - .ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()) - .indices(concreteIndices); - indexStateService.closeIndices(updateRequest, new ActionListener() { + final CloseIndexClusterStateUpdateRequest closeRequest = new CloseIndexClusterStateUpdateRequest(task.getId()) + .ackTimeout(request.timeout()) + .masterNodeTimeout(request.masterNodeTimeout()) + .indices(concreteIndices); + + indexStateService.closeIndices(closeRequest, new ActionListener() { @Override - public void onResponse(ClusterStateUpdateResponse response) { - listener.onResponse(new AcknowledgedResponse(response.isAcknowledged())); + public void onResponse(final AcknowledgedResponse response) { + listener.onResponse(response); } @Override - public void onFailure(Exception t) { + public void onFailure(final Exception t) { logger.debug(() -> new ParameterizedMessage("failed to close indices [{}]", (Object) concreteIndices), t); listener.onFailure(t); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java new file mode 100644 index 0000000000000..f08f6ea7dffa2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseAction.java @@ -0,0 +1,176 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.indices.close; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.replication.ReplicationOperation; +import org.elasticsearch.action.support.replication.ReplicationRequest; +import org.elasticsearch.action.support.replication.ReplicationResponse; +import org.elasticsearch.action.support.replication.TransportReplicationAction; +import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.Consumer; + +public class TransportVerifyShardBeforeCloseAction extends TransportReplicationAction< + TransportVerifyShardBeforeCloseAction.ShardRequest, TransportVerifyShardBeforeCloseAction.ShardRequest, ReplicationResponse> { + + public static final String NAME = CloseIndexAction.NAME + "[s]"; + + @Inject + public TransportVerifyShardBeforeCloseAction(final Settings settings, final TransportService transportService, + final ClusterService clusterService, final IndicesService indicesService, + final ThreadPool threadPool, final ShardStateAction stateAction, + final ActionFilters actionFilters, final IndexNameExpressionResolver resolver) { + super(settings, NAME, transportService, clusterService, indicesService, threadPool, stateAction, actionFilters, resolver, + ShardRequest::new, ShardRequest::new, ThreadPool.Names.MANAGEMENT); + } + + @Override + protected ReplicationResponse newResponseInstance() { + return new ReplicationResponse(); + } + + @Override + protected void acquirePrimaryOperationPermit(final IndexShard primary, + final ShardRequest request, + final ActionListener onAcquired) { + primary.acquireAllPrimaryOperationsPermits(onAcquired, request.timeout()); + } + + @Override + protected void acquireReplicaOperationPermit(final IndexShard replica, + final ShardRequest request, + final ActionListener onAcquired, + final long primaryTerm, + final long globalCheckpoint, + final long maxSeqNoOfUpdateOrDeletes) { + replica.acquireAllReplicaOperationsPermits(primaryTerm, globalCheckpoint, maxSeqNoOfUpdateOrDeletes, onAcquired, request.timeout()); + } + + @Override + protected PrimaryResult shardOperationOnPrimary(final ShardRequest shardRequest, + final IndexShard primary) throws Exception { + executeShardOperation(shardRequest, primary); + return new PrimaryResult<>(shardRequest, new ReplicationResponse()); + } + + @Override + protected ReplicaResult shardOperationOnReplica(final ShardRequest shardRequest, final IndexShard replica) throws Exception { + executeShardOperation(shardRequest, replica); + return new ReplicaResult(); + } + + private void executeShardOperation(final ShardRequest request, final IndexShard indexShard) { + final ShardId shardId = indexShard.shardId(); + if (indexShard.getActiveOperationsCount() != 0) { + throw new IllegalStateException("On-going operations in progress while checking index shard " + shardId + " before closing"); + } + + final ClusterBlocks clusterBlocks = clusterService.state().blocks(); + if (clusterBlocks.hasIndexBlock(shardId.getIndexName(), request.clusterBlock()) == false) { + throw new IllegalStateException("Index shard " + shardId + " must be blocked by " + request.clusterBlock() + " before closing"); + } + + final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); + if (indexShard.getGlobalCheckpoint() != maxSeqNo) { + throw new IllegalStateException("Global checkpoint [" + indexShard.getGlobalCheckpoint() + + "] mismatches maximum sequence number [" + maxSeqNo + "] on index shard " + shardId); + } + indexShard.flush(new FlushRequest()); + logger.debug("{} shard is ready for closing", shardId); + } + + @Override + protected ReplicationOperation.Replicas newReplicasProxy(final long primaryTerm) { + return new VerifyShardBeforeCloseActionReplicasProxy(primaryTerm); + } + + /** + * A {@link ReplicasProxy} that marks as stale the shards that are unavailable during the verification + * and the flush of the shard. This is done to ensure that such shards won't be later promoted as primary + * or reopened in an unverified state with potential non flushed translog operations. + */ + class VerifyShardBeforeCloseActionReplicasProxy extends ReplicasProxy { + + VerifyShardBeforeCloseActionReplicasProxy(final long primaryTerm) { + super(primaryTerm); + } + + @Override + public void markShardCopyAsStaleIfNeeded(final ShardId shardId, final String allocationId, final Runnable onSuccess, + final Consumer onPrimaryDemoted, final Consumer onIgnoredFailure) { + shardStateAction.remoteShardFailed(shardId, allocationId, primaryTerm, true, "mark copy as stale", null, + createShardActionListener(onSuccess, onPrimaryDemoted, onIgnoredFailure)); + } + } + + public static class ShardRequest extends ReplicationRequest { + + private ClusterBlock clusterBlock; + + ShardRequest(){ + } + + public ShardRequest(final ShardId shardId, final ClusterBlock clusterBlock, final TaskId parentTaskId) { + super(shardId); + this.clusterBlock = Objects.requireNonNull(clusterBlock); + setParentTask(parentTaskId); + } + + @Override + public String toString() { + return "verify shard " + shardId + " before close with block " + clusterBlock; + } + + @Override + public void readFrom(final StreamInput in) throws IOException { + super.readFrom(in); + clusterBlock = ClusterBlock.readClusterBlock(in); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + clusterBlock.writeTo(out); + } + + public ClusterBlock clusterBlock() { + return clusterBlock; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java index e2b72077b7f21..c7e64143f9c9a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; @@ -34,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.mapper.MapperService; import java.io.IOException; import java.util.ArrayList; @@ -41,9 +43,11 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; /** * A response for a get index action. @@ -249,15 +253,32 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.endObject(); - builder.startObject("mappings"); ImmutableOpenMap indexMappings = mappings.get(index); - if (indexMappings != null) { + boolean includeTypeName = params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, false); + if (includeTypeName) { + builder.startObject("mappings"); + if (indexMappings != null) { + for (final ObjectObjectCursor typeEntry : indexMappings) { + builder.field(typeEntry.key); + builder.map(typeEntry.value.sourceAsMap()); + } + } + builder.endObject(); + } else { + MappingMetaData mappings = null; for (final ObjectObjectCursor typeEntry : indexMappings) { - builder.field(typeEntry.key); - builder.map(typeEntry.value.sourceAsMap()); + if (typeEntry.key.equals(MapperService.DEFAULT_MAPPING) == false) { + assert mappings == null; + mappings = typeEntry.value; + } + } + if (mappings == null) { + // no mappings yet + builder.startObject("mappings").endObject(); + } else { + builder.field("mappings", mappings.sourceAsMap()); } } - builder.endObject(); builder.startObject("settings"); Settings indexSettings = settings.get(index); @@ -292,16 +313,9 @@ private static List parseAliases(XContentParser parser) throws IO private static ImmutableOpenMap parseMappings(XContentParser parser) throws IOException { ImmutableOpenMap.Builder indexMappings = ImmutableOpenMap.builder(); - // We start at START_OBJECT since parseIndexEntry ensures that - while (parser.nextToken() != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); - parser.nextToken(); - if (parser.currentToken() == Token.START_OBJECT) { - String mappingType = parser.currentName(); - indexMappings.put(mappingType, new MappingMetaData(mappingType, parser.map())); - } else if (parser.currentToken() == Token.START_ARRAY) { - parser.skipChildren(); - } + Map map = parser.map(); + if (map.isEmpty() == false) { + indexMappings.put(MapperService.SINGLE_MAPPING_NAME, new MappingMetaData(MapperService.SINGLE_MAPPING_NAME, map)); } return indexMappings.build(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 44a66f497c846..2c07ebc68d0ef 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -34,6 +34,8 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.rest.BaseRestHandler; import java.io.IOException; import java.io.InputStream; @@ -112,19 +114,32 @@ public FieldMappingMetaData fieldMappings(String index, String type, String fiel @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + boolean includeTypeName = params.paramAsBoolean(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, true); + builder.startObject(); for (Map.Entry>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - builder.startObject(typeEntry.getKey()); - for (Map.Entry fieldEntry : typeEntry.getValue().entrySet()) { - builder.startObject(fieldEntry.getKey()); - fieldEntry.getValue().toXContent(builder, params); + + if (includeTypeName == false) { + Map mappings = null; + for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { + if (typeEntry.getKey().equals(MapperService.DEFAULT_MAPPING) == false) { + assert mappings == null; + mappings = typeEntry.getValue(); + } + } + if (mappings != null) { + addFieldMappingsToBuilder(builder, params, mappings); + } + } else { + for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { + builder.startObject(typeEntry.getKey()); + addFieldMappingsToBuilder(builder, params, typeEntry.getValue()); builder.endObject(); } - builder.endObject(); } + builder.endObject(); builder.endObject(); } @@ -132,6 +147,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + private void addFieldMappingsToBuilder(XContentBuilder builder, + Params params, + Map mappings) throws IOException { + for (Map.Entry fieldEntry : mappings.entrySet()) { + builder.startObject(fieldEntry.getKey()); + fieldEntry.getValue().toXContent(builder, params); + builder.endObject(); + } + } + public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java index d87de21bc48d8..ed348539d3562 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java @@ -283,7 +283,11 @@ public IndicesShardStoresResponse(ImmutableOpenMap>>of(), Collections.emptyList()); + this(ImmutableOpenMap.of(), Collections.emptyList()); + } + + public IndicesShardStoresResponse(StreamInput in) throws IOException { + readFrom(in); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java index c10df6aafa557..d4d1e9f299e69 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -42,16 +41,6 @@ public PutIndexTemplateRequestBuilder(ElasticsearchClient client, PutIndexTempla super(client, action, new PutIndexTemplateRequest(name)); } - /** - * Sets the match expression that will be used to match on indices created. - * - * @deprecated Replaced by {@link #setPatterns(List)} - */ - @Deprecated - public PutIndexTemplateRequestBuilder setTemplate(String indexPattern) { - return setPatterns(Collections.singletonList(indexPattern)); - } - /** * Sets the match expression that will be used to match on indices created. */ diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 01b9c22b4d107..292d75db5cff0 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.bulk; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; @@ -36,6 +37,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -45,6 +47,8 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.rest.action.document.RestBulkAction; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -68,7 +72,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesRequest, WriteRequest { private static final int REQUEST_OVERHEAD = 50; - + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(BulkRequest.class)); private static final ParseField INDEX = new ParseField("_index"); private static final ParseField TYPE = new ParseField("_type"); private static final ParseField ID = new ParseField("_id"); @@ -104,6 +108,14 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques public BulkRequest() { } + public BulkRequest(@Nullable String globalIndex) { + this.globalIndex = globalIndex; + } + + /** + * @deprecated Types are in the process of being removed. Use {@link #BulkRequest(String)} instead + */ + @Deprecated public BulkRequest(@Nullable String globalIndex, @Nullable String globalType) { this.globalIndex = globalIndex; this.globalType = globalType; @@ -280,28 +292,71 @@ public BulkRequest add(byte[] data, int from, int length, XContentType xContentT /** * Adds a framed data in binary format + * @deprecated use {@link #add(byte[], int, int, String, XContentType)} instead */ + @Deprecated public BulkRequest add(byte[] data, int from, int length, @Nullable String defaultIndex, @Nullable String defaultType, XContentType xContentType) throws IOException { return add(new BytesArray(data, from, length), defaultIndex, defaultType, xContentType); } + /** * Adds a framed data in binary format */ + public BulkRequest add(byte[] data, int from, int length, @Nullable String defaultIndex, + XContentType xContentType) throws IOException { + return add(new BytesArray(data, from, length), defaultIndex, MapperService.SINGLE_MAPPING_NAME, xContentType); + } + + /** + * Adds a framed data in binary format + * @deprecated use {@link #add(BytesReference, String, XContentType)} instead + */ + @Deprecated public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, XContentType xContentType) throws IOException { return add(data, defaultIndex, defaultType, null, null, null, null, true, xContentType); } + + /** + * Adds a framed data in binary format + */ + public BulkRequest add(BytesReference data, @Nullable String defaultIndex, + XContentType xContentType) throws IOException { + return add(data, defaultIndex, MapperService.SINGLE_MAPPING_NAME, null, null, null, null, true, xContentType); + } /** * Adds a framed data in binary format + * @deprecated use {@link #add(BytesReference, String, boolean, XContentType)} instead */ + @Deprecated public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex, XContentType xContentType) throws IOException { return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex, xContentType); } + + /** + * Adds a framed data in binary format + */ + public BulkRequest add(BytesReference data, @Nullable String defaultIndex, boolean allowExplicitIndex, + XContentType xContentType) throws IOException { + return add(data, defaultIndex, MapperService.SINGLE_MAPPING_NAME, null, null, null, null, allowExplicitIndex, xContentType); + } + + public BulkRequest add(BytesReference data, @Nullable String defaultIndex, + @Nullable String defaultRouting, @Nullable FetchSourceContext defaultFetchSourceContext, + @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex, + XContentType xContentType) throws IOException { + return add(data, defaultIndex, MapperService.SINGLE_MAPPING_NAME, defaultRouting, defaultFetchSourceContext, + defaultPipeline, payload, allowExplicitIndex, xContentType); + } + /** + * @deprecated use {@link #add(BytesReference, String, String, FetchSourceContext, String, Object, boolean, XContentType)} instead + */ + @Deprecated public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex, @@ -371,7 +426,8 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null throw new IllegalArgumentException("explicit index in bulk is not allowed"); } index = parser.text(); - } else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) { + } else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) { + deprecationLogger.deprecatedAndMaybeLog("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE); type = parser.text(); } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { id = parser.text(); @@ -625,7 +681,9 @@ public String getDescription() { private void applyGlobalMandatoryParameters(DocWriteRequest request) { request.index(valueOrDefault(request.index(), globalIndex)); - request.type(valueOrDefault(request.type(), globalType)); + if (Strings.isNullOrEmpty(globalType) == false && MapperService.SINGLE_MAPPING_NAME.equals(globalType) == false) { + request.defaultTypeIfNull(globalType); + } } private static String valueOrDefault(String value, String globalDefault) { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java index fc91f4f907ee2..34837d0e696db 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.MapperService; /** * A bulk request holds an ordered {@link IndexRequest}s and {@link DeleteRequest}s and allows to executes @@ -41,10 +42,18 @@ public class BulkRequestBuilder extends ActionRequestBuilder implements WriteRequestBuilder { + /** + * @deprecated use {@link #BulkRequestBuilder(ElasticsearchClient, BulkAction, String)} instead + */ + @Deprecated public BulkRequestBuilder(ElasticsearchClient client, BulkAction action, @Nullable String globalIndex, @Nullable String globalType) { super(client, action, new BulkRequest(globalIndex, globalType)); } + public BulkRequestBuilder(ElasticsearchClient client, BulkAction action, @Nullable String globalIndex) { + super(client, action, new BulkRequest(globalIndex)); + } + public BulkRequestBuilder(ElasticsearchClient client, BulkAction action) { super(client, action, new BulkRequest()); } @@ -104,19 +113,30 @@ public BulkRequestBuilder add(UpdateRequestBuilder request) { * Adds a framed data in binary format */ public BulkRequestBuilder add(byte[] data, int from, int length, XContentType xContentType) throws Exception { - request.add(data, from, length, null, null, xContentType); + request.add(data, from, length, null, xContentType); return this; } /** * Adds a framed data in binary format + * @deprecated use {@link #add(byte[], int, int, String, XContentType)} instead */ + @Deprecated public BulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, @Nullable String defaultType, XContentType xContentType) throws Exception { request.add(data, from, length, defaultIndex, defaultType, xContentType); return this; } + /** + * Adds a framed data in binary format + */ + public BulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, + XContentType xContentType) throws Exception { + request.add(data, from, length, defaultIndex, MapperService.SINGLE_MAPPING_NAME, xContentType); + return this; + } + /** * Sets the number of shard copies that must be active before proceeding with the write. * See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details. diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index a89d162979f5f..7979b633efebe 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -78,6 +78,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.LongSupplier; +import java.util.function.Supplier; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; @@ -113,7 +114,7 @@ public TransportBulkAction(ThreadPool threadPool, TransportService transportServ TransportShardBulkAction shardBulkAction, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, LongSupplier relativeTimeProvider) { - super(BulkAction.NAME, transportService, actionFilters, BulkRequest::new); + super(BulkAction.NAME, transportService, actionFilters, (Supplier) BulkRequest::new); Objects.requireNonNull(relativeTimeProvider); this.threadPool = threadPool; this.clusterService = clusterService; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index e04da71749f09..d56cd5c8cbce3 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -420,10 +420,8 @@ private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse case INDEX: final IndexRequest indexRequest = (IndexRequest) docWriteRequest; final ShardId shardId = replica.shardId(); - final SourceToParse sourceToParse = - SourceToParse.source(shardId.getIndexName(), - indexRequest.type(), indexRequest.id(), indexRequest.source(), indexRequest.getContentType()) - .routing(indexRequest.routing()); + final SourceToParse sourceToParse = new SourceToParse(shardId.getIndexName(), indexRequest.type(), indexRequest.id(), + indexRequest.source(), indexRequest.getContentType(), indexRequest.routing()); result = replica.applyIndexOperationOnReplica(primaryResponse.getSeqNo(), primaryResponse.getVersion(), indexRequest.getAutoGeneratedTimestamp(), indexRequest.isRetry(), sourceToParse); break; @@ -457,8 +455,7 @@ private static void executeIndexRequestOnPrimary(BulkPrimaryExecutionContext con final IndexRequest request = context.getRequestToExecute(); final IndexShard primary = context.getPrimary(); final SourceToParse sourceToParse = - SourceToParse.source(request.index(), request.type(), request.id(), request.source(), request.getContentType()) - .routing(request.routing()); + new SourceToParse(request.index(), request.type(), request.id(), request.source(), request.getContentType(), request.routing()); executeOnPrimaryWhileHandlingMappingUpdates(context, () -> primary.applyIndexOperationOnPrimary(request.version(), request.versionType(), sourceToParse, diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java index 7fb19bbf9eb93..8d2967fd28ba4 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java @@ -53,7 +53,8 @@ public class DeleteRequest extends ReplicatedWriteRequest implements DocWriteRequest, CompositeIndicesRequest { - private String type = MapperService.SINGLE_MAPPING_NAME; + // Set to null initially so we can know to override in bulk requests that have a default type. + private String type; private String id; @Nullable private String routing; @@ -103,7 +104,7 @@ public DeleteRequest(String index, String id) { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); - if (Strings.isEmpty(type)) { + if (Strings.isEmpty(type())) { validationException = addValidationError("type is missing", validationException); } if (Strings.isEmpty(id)) { @@ -142,6 +143,9 @@ public ActionRequestValidationException validate() { @Deprecated @Override public String type() { + if (type == null) { + return MapperService.SINGLE_MAPPING_NAME; + } return type; } @@ -156,6 +160,22 @@ public DeleteRequest type(String type) { this.type = type; return this; } + + /** + * Set the default type supplied to a bulk + * request if this individual request's type is null + * or empty + * + * @deprecated Types are in the process of being removed. + */ + @Deprecated + @Override + public DeleteRequest defaultTypeIfNull(String defaultType) { + if (Strings.isNullOrEmpty(type)) { + type = defaultType; + } + return this; + } /** * The id of the document to delete. @@ -295,7 +315,9 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(type); + // A 7.x request allows null types but if deserialized in a 6.x node will cause nullpointer exceptions. + // So we use the type accessor method here to make the type non-null (will default it to "_doc"). + out.writeString(type()); out.writeString(id); out.writeOptionalString(routing()); if (out.getVersion().before(Version.V_7_0_0)) { @@ -316,7 +338,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String toString() { - return "delete {[" + index + "][" + type + "][" + id + "]}"; + return "delete {[" + index + "][" + type() + "][" + id + "]}"; } /** diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 195f6863b3d0c..a9aac3025de1e 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -82,7 +83,8 @@ public class IndexRequest extends ReplicatedWriteRequest implement */ static final int MAX_SOURCE_LENGTH_IN_TOSTRING = 2048; - private String type = MapperService.SINGLE_MAPPING_NAME; + // Set to null initially so we can know to override in bulk requests that have a default type. + private String type; private String id; @Nullable private String routing; @@ -152,12 +154,12 @@ public IndexRequest(String index, String type, String id) { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); - if (type == null) { - validationException = addValidationError("type is missing", validationException); - } if (source == null) { validationException = addValidationError("source is missing", validationException); } + if (Strings.isEmpty(type())) { + validationException = addValidationError("type is missing", validationException); + } if (contentType == null) { validationException = addValidationError("content type is missing", validationException); } @@ -239,6 +241,9 @@ public XContentType getContentType() { @Deprecated @Override public String type() { + if (type == null) { + return MapperService.SINGLE_MAPPING_NAME; + } return type; } @@ -253,6 +258,20 @@ public IndexRequest type(String type) { return this; } + /** + * Set the default type supplied to a bulk + * request if this individual request's type is null + * or empty + * @deprecated Types are in the process of being removed. + */ + @Deprecated + @Override + public IndexRequest defaultTypeIfNull(String defaultType) { + if (Strings.isNullOrEmpty(type)) { + type = defaultType; + } + return this; + } /** * The id of the indexed document. If not set, will be automatically generated. */ @@ -563,7 +582,7 @@ public void process(Version indexCreatedVersion, @Nullable MappingMetaData mappi if (mappingMd != null) { // might as well check for routing here if (mappingMd.routing().required() && routing == null) { - throw new RoutingMissingException(concreteIndex, type, id); + throw new RoutingMissingException(concreteIndex, type(), id); } } @@ -629,7 +648,9 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeOptionalString(type); + // A 7.x request allows null types but if deserialized in a 6.x node will cause nullpointer exceptions. + // So we use the type accessor method here to make the type non-null (will default it to "_doc"). + out.writeOptionalString(type()); out.writeOptionalString(id); out.writeOptionalString(routing); if (out.getVersion().before(Version.V_7_0_0)) { @@ -679,7 +700,7 @@ public String toString() { } catch (Exception e) { // ignore } - return "index {[" + index + "][" + type + "][" + id + "], source[" + sSource + "]}"; + return "index {[" + index + "][" + type() + "][" + id + "], source[" + sSource + "]}"; } diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 27293e8e50f8d..f031dfa581064 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.transport.Transport; @@ -113,7 +114,11 @@ public final void start() { if (getNumShards() == 0) { //no search shards to search on, bail with empty response //(it happens with search across _all with no indices around and consistent with broadcast operations) - listener.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, buildTookInMillis(), + + boolean withTotalHits = request.source() != null ? + // total hits is null in the response if the tracking of total hits is disabled + request.source().trackTotalHitsUpTo() != SearchContext.TRACK_TOTAL_HITS_DISABLED : true; + listener.onResponse(new SearchResponse(InternalSearchResponse.empty(withTotalHits), null, 0, 0, 0, buildTookInMillis(), ShardSearchFailure.EMPTY_ARRAY, clusters)); return; } @@ -166,7 +171,6 @@ private void executePhase(SearchPhase phase) { } } - private ShardSearchFailure[] buildShardFailures() { AtomicArray shardFailures = this.shardFailures.get(); if (shardFailures == null) { @@ -318,8 +322,8 @@ public final void onFailure(Exception e) { listener.onFailure(e); } + @Override public final ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt) { - String clusterAlias = shardIt.getClusterAlias(); AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); @@ -327,7 +331,7 @@ public final ShardSearchTransportRequest buildShardSearchRequest(SearchShardIter final String[] routings = indexRoutings.getOrDefault(indexName, Collections.emptySet()) .toArray(new String[0]); return new ShardSearchTransportRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), getNumShards(), - filter, indexBoost, timeProvider.getAbsoluteStartMillis(), clusterAlias, routings); + filter, indexBoost, timeProvider.getAbsoluteStartMillis(), shardIt.getClusterAlias(), routings); } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java index 4e0db4644786e..65f613521c0a8 100644 --- a/server/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java @@ -90,8 +90,7 @@ private void onShardFailure(final int shardIndex, @Nullable ShardRouting shard, final SearchShardIterator shardIt, Exception e) { // we always add the shard failure for a specific shard instance // we do make sure to clean it on a successful response from a shard - SearchShardTarget shardTarget = new SearchShardTarget(nodeId, shardIt.shardId(), shardIt.getClusterAlias(), - shardIt.getOriginalIndices()); + SearchShardTarget shardTarget = shardIt.newSearchShardTarget(nodeId); onShardFailure(shardIndex, shardTarget, e); if (totalOps.incrementAndGet() == expectedTotalOps) { @@ -161,14 +160,6 @@ public final void run() { } } - private void maybeFork(final Thread thread, final Runnable runnable) { - if (thread == Thread.currentThread()) { - fork(runnable); - } else { - runnable.run(); - } - } - private void fork(final Runnable runnable) { executor.execute(new AbstractRunnable() { @Override @@ -233,10 +224,18 @@ private synchronized Runnable tryQueue(Runnable runnable) { } private void executeNext(PendingExecutions pendingExecutions, Thread originalThread) { + executeNext(pendingExecutions == null ? null : pendingExecutions::finishAndRunNext, originalThread); + } + + protected void executeNext(Runnable runnable, Thread originalThread) { if (throttleConcurrentRequests) { - maybeFork(originalThread, pendingExecutions::finishAndRunNext); + if (originalThread == Thread.currentThread()) { + fork(runnable); + } else { + runnable.run(); + } } else { - assert pendingExecutions == null; + assert runnable == null; } } @@ -257,28 +256,26 @@ private void performPhaseOnShard(final int shardIndex, final SearchShardIterator Runnable r = () -> { final Thread thread = Thread.currentThread(); try { - executePhaseOnShard(shardIt, shard, new SearchActionListener(new SearchShardTarget(shard.currentNodeId(), - shardIt.shardId(), shardIt.getClusterAlias(), shardIt.getOriginalIndices()), shardIndex) { - @Override - public void innerOnResponse(FirstResult result) { - try { - onShardResult(result, shardIt); - } finally { - executeNext(pendingExecutions, thread); + executePhaseOnShard(shardIt, shard, + new SearchActionListener(shardIt.newSearchShardTarget(shard.currentNodeId()), shardIndex) { + @Override + public void innerOnResponse(FirstResult result) { + try { + onShardResult(result, shardIt); + } finally { + executeNext(pendingExecutions, thread); + } } - } - @Override - public void onFailure(Exception t) { - try { - onShardFailure(shardIndex, shard, shard.currentNodeId(), shardIt, t); - } finally { - executeNext(pendingExecutions, thread); + @Override + public void onFailure(Exception t) { + try { + onShardFailure(shardIndex, shard, shard.currentNodeId(), shardIt, t); + } finally { + executeNext(pendingExecutions, thread); + } } - } - }); - - + }); } catch (final Exception e) { try { /* diff --git a/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java b/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java index fc6585054ddf9..18b61516897d1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java +++ b/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java @@ -36,6 +36,7 @@ public String getNode() { return node; } + @Nullable public String getClusterAlias() { return clusterAlias; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 82f7760c1abdc..418d95b2077a9 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -48,6 +48,7 @@ import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.profile.SearchProfileShardResults; import org.elasticsearch.search.query.QuerySearchResult; @@ -407,8 +408,8 @@ private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFr * Reduces the given query results and consumes all aggregations and profile results. * @param queryResults a list of non-null query shard results */ - public ReducedQueryPhase reducedScrollQueryPhase(Collection queryResults) { - return reducedQueryPhase(queryResults, true, true); + ReducedQueryPhase reducedScrollQueryPhase(Collection queryResults) { + return reducedQueryPhase(queryResults, true, SearchContext.TRACK_TOTAL_HITS_ACCURATE, true); } /** @@ -416,8 +417,9 @@ public ReducedQueryPhase reducedScrollQueryPhase(Collection queryResults, - boolean isScrollRequest, boolean trackTotalHits) { - return reducedQueryPhase(queryResults, null, new ArrayList<>(), new TopDocsStats(trackTotalHits), 0, isScrollRequest); + boolean isScrollRequest, int trackTotalHitsUpTo, boolean performFinalReduce) { + return reducedQueryPhase(queryResults, null, new ArrayList<>(), new TopDocsStats(trackTotalHitsUpTo), + 0, isScrollRequest, performFinalReduce); } /** @@ -433,7 +435,8 @@ public ReducedQueryPhase reducedQueryPhase(Collection queryResults, List bufferedAggs, List bufferedTopDocs, - TopDocsStats topDocsStats, int numReducePhases, boolean isScrollRequest) { + TopDocsStats topDocsStats, int numReducePhases, boolean isScrollRequest, + boolean performFinalReduce) { assert numReducePhases >= 0 : "num reduce phases must be >= 0 but was: " + numReducePhases; numReducePhases++; // increment for this phase boolean timedOut = false; @@ -499,7 +502,7 @@ private ReducedQueryPhase reducedQueryPhase(Collection= 2 if there is more than one expected result"); @@ -644,6 +648,8 @@ private QueryPhaseResultConsumer(SearchPhaseController controller, int expectedR this.hasTopDocs = hasTopDocs; this.hasAggs = hasAggs; this.bufferSize = bufferSize; + this.topDocsStats = new TopDocsStats(trackTotalHitsUpTo); + this.performFinalReduce = performFinalReduce; } @Override @@ -693,7 +699,7 @@ private synchronized List getRemainingTopDocs() { @Override public ReducedQueryPhase reduce() { return controller.reducedQueryPhase(results.asList(), getRemainingAggs(), getRemainingTopDocs(), topDocsStats, - numReducePhases, false); + numReducePhases, false, performFinalReduce); } /** @@ -714,46 +720,65 @@ InitialSearchPhase.ArraySearchPhaseResults newSearchPhaseResu boolean isScrollRequest = request.scroll() != null; final boolean hasAggs = source != null && source.aggregations() != null; final boolean hasTopDocs = source == null || source.size() != 0; - final boolean trackTotalHits = source == null || source.trackTotalHits(); + final int trackTotalHitsUpTo = source == null ? SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO : source.trackTotalHitsUpTo(); + final boolean finalReduce = request.getLocalClusterAlias() == null; if (isScrollRequest == false && (hasAggs || hasTopDocs)) { // no incremental reduce if scroll is used - we only hit a single shard or sometimes more... if (request.getBatchedReduceSize() < numShards) { // only use this if there are aggs and if there are more shards than we should reduce at once - return new QueryPhaseResultConsumer(this, numShards, request.getBatchedReduceSize(), hasTopDocs, hasAggs); + return new QueryPhaseResultConsumer(this, numShards, request.getBatchedReduceSize(), hasTopDocs, hasAggs, + trackTotalHitsUpTo, finalReduce); } } return new InitialSearchPhase.ArraySearchPhaseResults(numShards) { @Override ReducedQueryPhase reduce() { - return reducedQueryPhase(results.asList(), isScrollRequest, trackTotalHits); + return reducedQueryPhase(results.asList(), isScrollRequest, trackTotalHitsUpTo, finalReduce); } }; } static final class TopDocsStats { - final boolean trackTotalHits; + final int trackTotalHitsUpTo; private long totalHits; private TotalHits.Relation totalHitsRelation; long fetchHits; float maxScore = Float.NEGATIVE_INFINITY; TopDocsStats() { - this(true); + this(SearchContext.TRACK_TOTAL_HITS_ACCURATE); } - TopDocsStats(boolean trackTotalHits) { - this.trackTotalHits = trackTotalHits; + TopDocsStats(int trackTotalHitsUpTo) { + this.trackTotalHitsUpTo = trackTotalHitsUpTo; this.totalHits = 0; - this.totalHitsRelation = trackTotalHits ? Relation.EQUAL_TO : Relation.GREATER_THAN_OR_EQUAL_TO; + this.totalHitsRelation = Relation.EQUAL_TO; } TotalHits getTotalHits() { - return trackTotalHits ? new TotalHits(totalHits, totalHitsRelation) : null; + if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) { + return null; + } else if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_ACCURATE) { + assert totalHitsRelation == Relation.EQUAL_TO; + return new TotalHits(totalHits, totalHitsRelation); + } else { + if (totalHits < trackTotalHitsUpTo) { + return new TotalHits(totalHits, totalHitsRelation); + } else { + /** + * The user requested to count the total hits up to trackTotalHitsUpTo + * so we return this lower bound when the total hits is greater than this value. + * This can happen when multiple shards are merged since the limit to track total hits + * is applied per shard. + */ + return new TotalHits(trackTotalHitsUpTo, Relation.GREATER_THAN_OR_EQUAL_TO); + } + } } void add(TopDocsAndMaxScore topDocs) { - if (trackTotalHits) { + if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits += topDocs.topDocs.totalHits.value; if (topDocs.topDocs.totalHits.relation == Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 68968c071f43c..69b090fb89a5a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -62,6 +62,11 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest public static final int DEFAULT_PRE_FILTER_SHARD_SIZE = 128; public static final int DEFAULT_BATCHED_REDUCE_SIZE = 512; + private static final long DEFAULT_ABSOLUTE_START_MILLIS = -1; + + private final String localClusterAlias; + private final long absoluteStartMillis; + private SearchType searchType = SearchType.DEFAULT; private String[] indices = Strings.EMPTY_ARRAY; @@ -92,6 +97,8 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest private IndicesOptions indicesOptions = DEFAULT_INDICES_OPTIONS; public SearchRequest() { + this.localClusterAlias = null; + this.absoluteStartMillis = DEFAULT_ABSOLUTE_START_MILLIS; } /** @@ -111,6 +118,8 @@ public SearchRequest(SearchRequest searchRequest) { this.searchType = searchRequest.searchType; this.source = searchRequest.source; this.types = searchRequest.types; + this.localClusterAlias = searchRequest.localClusterAlias; + this.absoluteStartMillis = searchRequest.absoluteStartMillis; } /** @@ -125,6 +134,7 @@ public SearchRequest(String... indices) { * Constructs a new search request against the provided indices with the given search source. */ public SearchRequest(String[] indices, SearchSourceBuilder source) { + this(); if (source == null) { throw new IllegalArgumentException("source must not be null"); } @@ -132,6 +142,20 @@ public SearchRequest(String[] indices, SearchSourceBuilder source) { this.source = source; } + /** + * Creates a new search request by providing the alias of the cluster where it will be executed, as well as the current time in + * milliseconds from the epoch time. Used when a {@link SearchRequest} is created and executed as part of a cross-cluster search + * request performing local reduction on each cluster. The coordinating CCS node provides the alias to prefix index names with in + * the returned search results, and the current time to be used on the remote clusters to ensure that the same value is used. + */ + SearchRequest(String localClusterAlias, long absoluteStartMillis) { + this.localClusterAlias = Objects.requireNonNull(localClusterAlias, "cluster alias must not be null"); + if (absoluteStartMillis < 0) { + throw new IllegalArgumentException("absoluteStartMillis must not be negative but was [" + absoluteStartMillis + "]"); + } + this.absoluteStartMillis = absoluteStartMillis; + } + /** * Constructs a new search request from reading the specified stream. * @@ -141,10 +165,7 @@ public SearchRequest(String[] indices, SearchSourceBuilder source) { public SearchRequest(StreamInput in) throws IOException { super(in); searchType = SearchType.fromId(in.readByte()); - indices = new String[in.readVInt()]; - for (int i = 0; i < indices.length; i++) { - indices[i] = in.readString(); - } + indices = in.readStringArray(); routing = in.readOptionalString(); preference = in.readOptionalString(); scroll = in.readOptionalWriteable(Scroll::new); @@ -158,16 +179,24 @@ public SearchRequest(StreamInput in) throws IOException { if (in.getVersion().onOrAfter(Version.V_6_3_0)) { allowPartialSearchResults = in.readOptionalBoolean(); } + if (in.getVersion().onOrAfter(Version.V_6_7_0)) { + localClusterAlias = in.readOptionalString(); + if (localClusterAlias != null) { + absoluteStartMillis = in.readVLong(); + } else { + absoluteStartMillis = DEFAULT_ABSOLUTE_START_MILLIS; + } + } else { + localClusterAlias = null; + absoluteStartMillis = DEFAULT_ABSOLUTE_START_MILLIS; + } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeByte(searchType.id()); - out.writeVInt(indices.length); - for (String index : indices) { - out.writeString(index); - } + out.writeStringArray(indices); out.writeOptionalString(routing); out.writeOptionalString(preference); out.writeOptionalWriteable(scroll); @@ -181,6 +210,12 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_6_3_0)) { out.writeOptionalBoolean(allowPartialSearchResults); } + if (out.getVersion().onOrAfter(Version.V_6_7_0)) { + out.writeOptionalString(localClusterAlias); + if (localClusterAlias != null) { + out.writeVLong(absoluteStartMillis); + } + } } @Override @@ -209,6 +244,27 @@ public ActionRequestValidationException validate() { return validationException; } + /** + * Returns the alias of the cluster that this search request is being executed on. A non-null value indicates that this search request + * is being executed as part of a locally reduced cross-cluster search request. The cluster alias is used to prefix index names + * returned as part of search hits with the alias of the cluster they came from. + */ + @Nullable + String getLocalClusterAlias() { + return localClusterAlias; + } + + /** + * Returns the current time in milliseconds from the time epoch, to be used for the execution of this search request. Used to + * ensure that the same value, determined by the coordinating node, is used on all nodes involved in the execution of the search + * request. When created through {@link #SearchRequest(String, long)}, this method returns the provided current time, otherwise + * it will return {@link System#currentTimeMillis()}. + * + */ + long getOrCreateAbsoluteStartMillis() { + return absoluteStartMillis == DEFAULT_ABSOLUTE_START_MILLIS ? System.currentTimeMillis() : absoluteStartMillis; + } + /** * Sets the indices the search will be executed on. */ @@ -401,7 +457,6 @@ public Boolean allowPartialSearchResults() { return this.allowPartialSearchResults; } - /** * Sets the number of shard results that should be reduced at once on the coordinating node. This value should be used as a protection * mechanism to reduce the memory overhead per search request if the potential number of shards in the request can be large. @@ -464,13 +519,6 @@ public int getPreFilterShardSize() { return preFilterShardSize; } - /** - * Returns true iff the maxConcurrentShardRequest is set. - */ - boolean isMaxConcurrentShardRequestsSet() { - return maxConcurrentShardRequests != 0; - } - /** * @return true if the request only has suggest */ @@ -504,7 +552,7 @@ public String getDescription() { } @Override - public void readFrom(StreamInput in) throws IOException { + public void readFrom(StreamInput in) { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @@ -529,14 +577,16 @@ public boolean equals(Object o) { Objects.equals(maxConcurrentShardRequests, that.maxConcurrentShardRequests) && Objects.equals(preFilterShardSize, that.preFilterShardSize) && Objects.equals(indicesOptions, that.indicesOptions) && - Objects.equals(allowPartialSearchResults, that.allowPartialSearchResults); + Objects.equals(allowPartialSearchResults, that.allowPartialSearchResults) && + Objects.equals(localClusterAlias, that.localClusterAlias) && + absoluteStartMillis == that.absoluteStartMillis; } @Override public int hashCode() { return Objects.hash(searchType, Arrays.hashCode(indices), routing, preference, source, requestCache, scroll, Arrays.hashCode(types), indicesOptions, batchedReduceSize, maxConcurrentShardRequests, preFilterShardSize, - allowPartialSearchResults); + allowPartialSearchResults, localClusterAlias, absoluteStartMillis); } @Override @@ -554,6 +604,8 @@ public String toString() { ", batchedReduceSize=" + batchedReduceSize + ", preFilterShardSize=" + preFilterShardSize + ", allowPartialSearchResults=" + allowPartialSearchResults + + ", localClusterAlias=" + localClusterAlias + + ", getOrCreateAbsoluteStartMillis=" + absoluteStartMillis + ", source=" + source + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 732debf2a1305..e3d63cac05069 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -376,6 +376,14 @@ public SearchRequestBuilder setTrackTotalHits(boolean trackTotalHits) { return this; } + /** + * Indicates if the total hit count for the query should be tracked. Defaults to {@code true} + */ + public SearchRequestBuilder setTrackTotalHitsUpTo(int trackTotalHitsUpTo) { + sourceBuilder().trackTotalHitsUpTo(trackTotalHitsUpTo); + return this; + } + /** * Adds stored fields to load and return (note, it must be stored) as part of the search request. * To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}. diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java index c36d2b7908f78..be3b5d7a9c2b3 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java @@ -22,28 +22,34 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.Nullable; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchShardTarget; import java.util.List; /** * Extension of {@link PlainShardIterator} used in the search api, which also holds the {@link OriginalIndices} - * of the search request. Useful especially with cross cluster search, as each cluster has its own set of original indices. + * of the search request (useful especially with cross-cluster search, as each cluster has its own set of original indices) as well as + * the cluster alias. + * @see OriginalIndices */ public final class SearchShardIterator extends PlainShardIterator { private final OriginalIndices originalIndices; - private String clusterAlias; + private final String clusterAlias; private boolean skip = false; /** * Creates a {@link PlainShardIterator} instance that iterates over a subset of the given shards * this the a given shardId. * + * @param clusterAlias the alias of the cluster where the shard is located * @param shardId shard id of the group * @param shards shards to iterate + * @param originalIndices the indices that the search request originally related to (before any rewriting happened) */ - public SearchShardIterator(String clusterAlias, ShardId shardId, List shards, OriginalIndices originalIndices) { + public SearchShardIterator(@Nullable String clusterAlias, ShardId shardId, List shards, OriginalIndices originalIndices) { super(shardId, shards); this.originalIndices = originalIndices; this.clusterAlias = clusterAlias; @@ -56,10 +62,22 @@ public OriginalIndices getOriginalIndices() { return originalIndices; } + /** + * Returns the alias of the cluster where the shard is located. + */ + @Nullable public String getClusterAlias() { return clusterAlias; } + /** + * Creates a new shard target from this iterator, pointing at the node identified by the provided identifier. + * @see SearchShardTarget + */ + SearchShardTarget newSearchShardTarget(String nodeId) { + return new SearchShardTarget(nodeId, shardId(), clusterAlias, originalIndices); + } + /** * Reset the iterator and mark it as skippable * @see #skip() diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 54d7aee8f0d62..577ce4f6b7aec 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.support.HandledTransportAction.ChannelActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -401,11 +402,11 @@ public void onFailure(Exception e) { /** * Returns a connection to the given node on the provided cluster. If the cluster alias is null the node will be resolved * against the local cluster. - * @param clusterAlias the cluster alias the node should be resolve against + * @param clusterAlias the cluster alias the node should be resolved against * @param node the node to resolve * @return a connection to the given node belonging to the cluster with the provided alias. */ - Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + Transport.Connection getConnection(@Nullable String clusterAlias, DiscoveryNode node) { if (clusterAlias == null) { return transportService.getConnection(node); } else { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchType.java b/server/src/main/java/org/elasticsearch/action/search/SearchType.java index 910bc3d676a05..1a382fa9caf27 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchType.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchType.java @@ -36,14 +36,9 @@ public enum SearchType { * document content. The return number of hits is exactly as specified in size, since they are the only ones that * are fetched. This is very handy when the index has a lot of shards (not replicas, shard id groups). */ - QUERY_THEN_FETCH((byte) 1), + QUERY_THEN_FETCH((byte) 1); // 2 used to be DFS_QUERY_AND_FETCH - - /** - * Only used for pre 5.3 request where this type is still needed - */ - @Deprecated - QUERY_AND_FETCH((byte) 3); + // 3 used to be QUERY_AND_FETCH /** * The default search type ({@link #QUERY_THEN_FETCH}. @@ -99,5 +94,5 @@ public static SearchType fromString(String searchType) { throw new IllegalArgumentException("No search type for [" + searchType + "]"); } } - + } diff --git a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index ddfadfa57e31e..451ceda70fdc4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -98,8 +98,8 @@ public static ShardSearchFailure readShardSearchFailure(StreamInput in) throws I @Override public void readFrom(StreamInput in) throws IOException { - if (in.readBoolean()) { - shardTarget = new SearchShardTarget(in); + shardTarget = in.readOptionalWriteable(SearchShardTarget::new); + if (shardTarget != null) { index = shardTarget.getFullyQualifiedIndexName(); shardId = shardTarget.getShardId().getId(); } @@ -110,12 +110,7 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (shardTarget == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - shardTarget.writeTo(out); - } + out.writeOptionalWriteable(shardTarget); out.writeString(reason); RestStatus.writeTo(out, status); out.writeException(cause); @@ -175,7 +170,7 @@ public static ShardSearchFailure fromXContent(XContentParser parser) throws IOEx SearchShardTarget searchShardTarget = null; if (nodeId != null) { searchShardTarget = new SearchShardTarget(nodeId, - new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); } return new ShardSearchFailure(exception, searchShardTarget); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 4c1a953965fcd..88e2764982cb4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; @@ -41,6 +42,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; @@ -60,6 +62,7 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.function.LongSupplier; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; @@ -178,10 +181,9 @@ long getRelativeCurrentNanos() { @Override protected void doExecute(Task task, SearchRequest searchRequest, ActionListener listener) { - final long absoluteStartMillis = System.currentTimeMillis(); final long relativeStartNanos = System.nanoTime(); final SearchTimeProvider timeProvider = - new SearchTimeProvider(absoluteStartMillis, relativeStartNanos, System::nanoTime); + new SearchTimeProvider(searchRequest.getOrCreateAbsoluteStartMillis(), relativeStartNanos, System::nanoTime); ActionListener rewriteListener = ActionListener.wrap(source -> { if (source != searchRequest.source()) { // only set it if it changed - we don't allow null values to be set but it might be already null be we want to catch @@ -311,7 +313,7 @@ private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, Sea GroupShardsIterator localShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, searchRequest.preference(), searchService.getResponseCollectorService(), nodeSearchCounts); GroupShardsIterator shardIterators = mergeShardsIterators(localShardsIterator, localIndices, - remoteShardIterators); + searchRequest.getLocalClusterAlias(), remoteShardIterators); failIfOverShardCountLimit(clusterService, shardIterators.size()); @@ -338,19 +340,37 @@ private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, Sea } final DiscoveryNodes nodes = clusterState.nodes(); - BiFunction connectionLookup = (clusterName, nodeId) -> { - final DiscoveryNode discoveryNode = clusterName == null ? nodes.get(nodeId) : remoteConnections.apply(clusterName, nodeId); + BiFunction connectionLookup = buildConnectionLookup(searchRequest.getLocalClusterAlias(), + nodes::get, remoteConnections, searchTransportService::getConnection); + boolean preFilterSearchShards = shouldPreFilterSearchShards(searchRequest, shardIterators); + searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState.version(), + Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, routingMap, listener, preFilterSearchShards, clusters).start(); + } + + static BiFunction buildConnectionLookup(String requestClusterAlias, + Function localNodes, + BiFunction remoteNodes, + BiFunction nodeToConnection) { + return (clusterAlias, nodeId) -> { + final DiscoveryNode discoveryNode; + final boolean remoteCluster; + if (clusterAlias == null || requestClusterAlias != null) { + assert requestClusterAlias == null || requestClusterAlias.equals(clusterAlias); + discoveryNode = localNodes.apply(nodeId); + remoteCluster = false; + } else { + discoveryNode = remoteNodes.apply(clusterAlias, nodeId); + remoteCluster = true; + } if (discoveryNode == null) { throw new IllegalStateException("no node found for id: " + nodeId); } - return searchTransportService.getConnection(clusterName, discoveryNode); + return nodeToConnection.apply(remoteCluster ? clusterAlias : null, discoveryNode); }; - boolean preFilterSearchShards = shouldPreFilterSearchShards(searchRequest, shardIterators); - searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState.version(), - Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, routingMap, listener, preFilterSearchShards, clusters).start(); } - private boolean shouldPreFilterSearchShards(SearchRequest searchRequest, GroupShardsIterator shardIterators) { + private static boolean shouldPreFilterSearchShards(SearchRequest searchRequest, + GroupShardsIterator shardIterators) { SearchSourceBuilder source = searchRequest.source(); return searchRequest.searchType() == QUERY_THEN_FETCH && // we can't do this for DFS it needs to fan out to all shards all the time SearchService.canRewriteToMatchNone(source) && @@ -359,10 +379,11 @@ private boolean shouldPreFilterSearchShards(SearchRequest searchRequest, GroupSh static GroupShardsIterator mergeShardsIterators(GroupShardsIterator localShardsIterator, OriginalIndices localIndices, + @Nullable String localClusterAlias, List remoteShardIterators) { List shards = new ArrayList<>(remoteShardIterators); for (ShardIterator shardIterator : localShardsIterator) { - shards.add(new SearchShardIterator(null, shardIterator.shardId(), shardIterator.getShardRoutings(), localIndices)); + shards.add(new SearchShardIterator(localClusterAlias, shardIterator.shardId(), shardIterator.getShardRoutings(), localIndices)); } return new GroupShardsIterator<>(shards); } @@ -393,14 +414,13 @@ public void run() { }; }, clusters); } else { - AbstractSearchAsyncAction searchAsyncAction; + AbstractSearchAsyncAction searchAsyncAction; switch (searchRequest.searchType()) { case DFS_QUERY_THEN_FETCH: searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, connectionLookup, aliasFilter, concreteIndexBoosts, indexRoutings, searchPhaseController, executor, searchRequest, listener, shardIterators, timeProvider, clusterStateVersion, task, clusters); break; - case QUERY_AND_FETCH: case QUERY_THEN_FETCH: searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, connectionLookup, aliasFilter, concreteIndexBoosts, indexRoutings, searchPhaseController, executor, searchRequest, listener, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index 7a0bb63478c76..c848e227af4c0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -44,8 +44,8 @@ static String buildScrollId(AtomicArray searchPhase out.writeLong(searchPhaseResult.getRequestId()); SearchShardTarget searchShardTarget = searchPhaseResult.getSearchShardTarget(); if (searchShardTarget.getClusterAlias() != null) { - out.writeString(RemoteClusterAware.buildRemoteIndexName(searchShardTarget.getClusterAlias(), - searchShardTarget.getNodeId())); + out.writeString( + RemoteClusterAware.buildRemoteIndexName(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId())); } else { out.writeString(searchShardTarget.getNodeId()); } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index cd0b5629b7600..a7805b4cbdbad 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -91,7 +91,8 @@ public class UpdateRequest extends InstanceShardOperationRequest ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING); } - private String type = MapperService.SINGLE_MAPPING_NAME; + // Set to null initially so we can know to override in bulk requests that have a default type. + private String type; private String id; @Nullable private String routing; @@ -146,7 +147,7 @@ public ActionRequestValidationException validate() { if(upsertRequest != null && upsertRequest.version() != Versions.MATCH_ANY) { validationException = addValidationError("can't provide version in upsert request", validationException); } - if (Strings.isEmpty(type)) { + if (Strings.isEmpty(type())) { validationException = addValidationError("type is missing", validationException); } if (Strings.isEmpty(id)) { @@ -189,6 +190,9 @@ public ActionRequestValidationException validate() { @Deprecated @Override public String type() { + if (type == null) { + return MapperService.SINGLE_MAPPING_NAME; + } return type; } @@ -203,6 +207,21 @@ public UpdateRequest type(String type) { return this; } + /** + * Set the default type supplied to a bulk + * request if this individual request's type is null + * or empty + * @deprecated Types are in the process of being removed. + */ + @Deprecated + @Override + public UpdateRequest defaultTypeIfNull(String defaultType) { + if (Strings.isNullOrEmpty(type)) { + type = defaultType; + } + return this; + } + /** * The id of the indexed document. */ @@ -800,7 +819,9 @@ public void readFrom(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); waitForActiveShards.writeTo(out); - out.writeString(type); + // A 7.x request allows null types but if deserialized in a 6.x node will cause nullpointer exceptions. + // So we use the type accessor method here to make the type non-null (will default it to "_doc"). + out.writeString(type()); out.writeString(id); out.writeOptionalString(routing); if (out.getVersion().before(Version.V_7_0_0)) { @@ -887,7 +908,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public String toString() { StringBuilder res = new StringBuilder() .append("update {[").append(index) - .append("][").append(type) + .append("][").append(type()) .append("][").append(id).append("]"); res.append(", doc_as_upsert[").append(docAsUpsert).append("]"); if (doc != null) { diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index eb53cbaef70ba..e77568105db7d 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -407,8 +407,8 @@ static class MaxMapCountCheck implements BootstrapCheck { @Override public BootstrapCheckResult check(final BootstrapContext context) { - // we only enforce the check if mmapfs is an allowed store type - if (IndexModule.NODE_STORE_ALLOW_MMAPFS.get(context.settings())) { + // we only enforce the check if a store is allowed to use mmap at all + if (IndexModule.NODE_STORE_ALLOW_MMAP.get(context.settings())) { if (getMaxMapCount() != -1 && getMaxMapCount() < LIMIT) { final String message = String.format( Locale.ROOT, diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 14493e276ed48..8dd7291410ecc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -69,6 +69,8 @@ import java.util.Optional; import java.util.Set; +import static org.elasticsearch.cluster.coordination.Coordinator.ZEN1_BWC_TERM; + /** * Represents the current state of the cluster. *

@@ -210,6 +212,12 @@ public long getVersion() { return version(); } + public long getVersionOrMetaDataVersion() { + // When following a Zen1 master, the cluster state version is not guaranteed to increase, so instead it is preferable to use the + // metadata version to determine the freshest node. However when following a Zen2 master the cluster state version should be used. + return term() == ZEN1_BWC_TERM ? metaData().version() : version(); + } + /** * This stateUUID is automatically generated for for each version of cluster state. It is used to make sure that * we are applying diffs to the right previous state. diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java index fafd397722025..5713462b9212f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java @@ -19,6 +19,8 @@ package org.elasticsearch.cluster.block; +import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -30,29 +32,31 @@ import java.util.ArrayList; import java.util.EnumSet; import java.util.Locale; +import java.util.Objects; public class ClusterBlock implements Streamable, ToXContentFragment { private int id; - + private @Nullable String uuid; private String description; - private EnumSet levels; - private boolean retryable; - private boolean disableStatePersistence = false; - private boolean allowReleaseResources; - private RestStatus status; - ClusterBlock() { + private ClusterBlock() { + } + + public ClusterBlock(int id, String description, boolean retryable, boolean disableStatePersistence, + boolean allowReleaseResources, RestStatus status, EnumSet levels) { + this(id, null, description, retryable, disableStatePersistence, allowReleaseResources, status, levels); } - public ClusterBlock(int id, String description, boolean retryable, boolean disableStatePersistence, boolean allowReleaseResources, - RestStatus status, EnumSet levels) { + public ClusterBlock(int id, String uuid, String description, boolean retryable, boolean disableStatePersistence, + boolean allowReleaseResources, RestStatus status, EnumSet levels) { this.id = id; + this.uuid = uuid; this.description = description; this.retryable = retryable; this.disableStatePersistence = disableStatePersistence; @@ -65,6 +69,10 @@ public int id() { return this.id; } + public String uuid() { + return uuid; + } + public String description() { return this.description; } @@ -104,6 +112,9 @@ public boolean disableStatePersistence() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Integer.toString(id)); + if (uuid != null) { + builder.field("uuid", uuid); + } builder.field("description", description); builder.field("retryable", retryable); if (disableStatePersistence) { @@ -127,6 +138,11 @@ public static ClusterBlock readClusterBlock(StreamInput in) throws IOException { @Override public void readFrom(StreamInput in) throws IOException { id = in.readVInt(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + uuid = in.readOptionalString(); + } else { + uuid = null; + } description = in.readString(); final int len = in.readVInt(); ArrayList levels = new ArrayList<>(len); @@ -143,6 +159,9 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(id); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeOptionalString(uuid); + } out.writeString(description); out.writeVInt(levels.size()); for (ClusterBlockLevel level : levels) { @@ -157,7 +176,11 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append(id).append(",").append(description).append(", blocks "); + sb.append(id).append(","); + if (uuid != null) { + sb.append(uuid).append(','); + } + sb.append(description).append(", blocks "); String delimiter = ""; for (ClusterBlockLevel level : levels) { sb.append(delimiter).append(level.name()); @@ -168,19 +191,19 @@ public String toString() { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - ClusterBlock that = (ClusterBlock) o; - - if (id != that.id) return false; - - return true; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final ClusterBlock that = (ClusterBlock) o; + return id == that.id && Objects.equals(uuid, that.uuid); } @Override public int hashCode() { - return id; + return Objects.hash(id, uuid); } public boolean isAllowReleaseResources() { diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index 6343c2f72746d..0de7bce115943 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -20,11 +20,11 @@ package org.elasticsearch.cluster.block; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -119,7 +119,7 @@ public boolean hasGlobalBlock(ClusterBlock block) { return global.contains(block); } - public boolean hasGlobalBlock(int blockId) { + public boolean hasGlobalBlockWithId(final int blockId) { for (ClusterBlock clusterBlock : global) { if (clusterBlock.id() == blockId) { return true; @@ -128,14 +128,14 @@ public boolean hasGlobalBlock(int blockId) { return false; } - public boolean hasGlobalBlock(ClusterBlockLevel level) { + public boolean hasGlobalBlockWithLevel(ClusterBlockLevel level) { return global(level).size() > 0; } /** * Is there a global block with the provided status? */ - public boolean hasGlobalBlock(RestStatus status) { + public boolean hasGlobalBlockWithStatus(final RestStatus status) { for (ClusterBlock clusterBlock : global) { if (clusterBlock.status().equals(status)) { return true; @@ -148,6 +148,31 @@ public boolean hasIndexBlock(String index, ClusterBlock block) { return indicesBlocks.containsKey(index) && indicesBlocks.get(index).contains(block); } + public boolean hasIndexBlockWithId(String index, int blockId) { + final Set clusterBlocks = indicesBlocks.get(index); + if (clusterBlocks != null) { + for (ClusterBlock clusterBlock : clusterBlocks) { + if (clusterBlock.id() == blockId) { + return true; + } + } + } + return false; + } + + @Nullable + public ClusterBlock getIndexBlockWithId(final String index, final int blockId) { + final Set clusterBlocks = indicesBlocks.get(index); + if (clusterBlocks != null) { + for (ClusterBlock clusterBlock : clusterBlocks) { + if (clusterBlock.id() == blockId) { + return clusterBlock; + } + } + } + return null; + } + public void globalBlockedRaiseException(ClusterBlockLevel level) throws ClusterBlockException { ClusterBlockException blockException = globalBlockedException(level); if (blockException != null) { @@ -404,6 +429,18 @@ public Builder removeIndexBlock(String index, ClusterBlock block) { return this; } + public Builder removeIndexBlockWithId(String index, int blockId) { + final Set indexBlocks = indices.get(index); + if (indexBlocks == null) { + return this; + } + indexBlocks.removeIf(block -> block.id() == blockId); + if (indexBlocks.isEmpty()) { + indices.remove(index); + } + return this; + } + public ClusterBlocks build() { // We copy the block sets here in case of the builder is modified after build is called ImmutableOpenMap.Builder> indicesBuilder = ImmutableOpenMap.builder(indices.size()); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterBootstrapService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterBootstrapService.java index dfb9f06c854b0..fc3f4493104fc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterBootstrapService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterBootstrapService.java @@ -54,11 +54,6 @@ public class ClusterBootstrapService { private static final Logger logger = LogManager.getLogger(ClusterBootstrapService.class); - // The number of master-eligible nodes which, if discovered, can be used to bootstrap the cluster. This setting is unsafe in the event - // that more master nodes are started than expected. - public static final Setting INITIAL_MASTER_NODE_COUNT_SETTING = - Setting.intSetting("cluster.unsafe_initial_master_node_count", 0, 0, Property.NodeScope); - public static final Setting> INITIAL_MASTER_NODES_SETTING = Setting.listSetting("cluster.initial_master_nodes", Collections.emptyList(), Function.identity(), Property.NodeScope); @@ -66,7 +61,6 @@ public class ClusterBootstrapService { Setting.timeSetting("discovery.unconfigured_bootstrap_timeout", TimeValue.timeValueSeconds(3), TimeValue.timeValueMillis(1), Property.NodeScope); - private final int initialMasterNodeCount; private final List initialMasterNodes; @Nullable private final TimeValue unconfiguredBootstrapTimeout; @@ -74,15 +68,14 @@ public class ClusterBootstrapService { private volatile boolean running; public ClusterBootstrapService(Settings settings, TransportService transportService) { - initialMasterNodeCount = INITIAL_MASTER_NODE_COUNT_SETTING.get(settings); initialMasterNodes = INITIAL_MASTER_NODES_SETTING.get(settings); unconfiguredBootstrapTimeout = discoveryIsConfigured(settings) ? null : UNCONFIGURED_BOOTSTRAP_TIMEOUT_SETTING.get(settings); this.transportService = transportService; } public static boolean discoveryIsConfigured(Settings settings) { - return Stream.of(DISCOVERY_HOSTS_PROVIDER_SETTING, DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, - INITIAL_MASTER_NODE_COUNT_SETTING, INITIAL_MASTER_NODES_SETTING).anyMatch(s -> s.exists(settings)); + return Stream.of(DISCOVERY_HOSTS_PROVIDER_SETTING, DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, INITIAL_MASTER_NODES_SETTING) + .anyMatch(s -> s.exists(settings)); } public void start() { @@ -144,17 +137,14 @@ public String toString() { }); } - } else if (initialMasterNodeCount > 0 || initialMasterNodes.isEmpty() == false) { - logger.debug("unsafely waiting for discovery of [{}] master-eligible nodes", initialMasterNodeCount); + } else if (initialMasterNodes.isEmpty() == false) { + logger.debug("waiting for discovery of master-eligible nodes matching {}", initialMasterNodes); final ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { threadContext.markAsSystemContext(); final GetDiscoveredNodesRequest request = new GetDiscoveredNodesRequest(); - if (initialMasterNodeCount > 0) { - request.setWaitForNodes(initialMasterNodeCount); - } request.setRequiredNodes(initialMasterNodes); request.setTimeout(null); logger.trace("sending {}", request); @@ -162,7 +152,6 @@ public String toString() { new TransportResponseHandler() { @Override public void handleResponse(GetDiscoveredNodesResponse response) { - assert response.getNodes().size() >= initialMasterNodeCount; assert response.getNodes().stream().allMatch(DiscoveryNode::isMasterNode); logger.debug("discovered {}, starting to bootstrap", response.getNodes()); awaitBootstrap(response.getBootstrapConfiguration()); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java index dc6de08f74c6b..6338b49f5d14f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -42,7 +42,6 @@ import java.util.stream.StreamSupport; import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; -import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODE_COUNT_SETTING; public class ClusterFormationFailureHelper { private static final Logger logger = LogManager.getLogger(ClusterFormationFailureHelper.class); @@ -148,23 +147,13 @@ String getDescription() { final String bootstrappingDescription; - if (INITIAL_MASTER_NODE_COUNT_SETTING.get(Settings.EMPTY).equals(INITIAL_MASTER_NODE_COUNT_SETTING.get(settings)) - && INITIAL_MASTER_NODES_SETTING.get(Settings.EMPTY).equals(INITIAL_MASTER_NODES_SETTING.get(settings))) { + if (INITIAL_MASTER_NODES_SETTING.get(Settings.EMPTY).equals(INITIAL_MASTER_NODES_SETTING.get(settings))) { bootstrappingDescription = "[" + INITIAL_MASTER_NODES_SETTING.getKey() + "] is empty on this node"; - } else if (INITIAL_MASTER_NODES_SETTING.get(Settings.EMPTY).equals(INITIAL_MASTER_NODES_SETTING.get(settings))) { - bootstrappingDescription = String.format(Locale.ROOT, - "this node must discover at least [%d] master-eligible nodes to bootstrap a cluster", - INITIAL_MASTER_NODE_COUNT_SETTING.get(settings)); - } else if (INITIAL_MASTER_NODE_COUNT_SETTING.get(settings) <= INITIAL_MASTER_NODES_SETTING.get(settings).size()) { + } else { // TODO update this when we can bootstrap on only a quorum of the initial nodes bootstrappingDescription = String.format(Locale.ROOT, "this node must discover master-eligible nodes %s to bootstrap a cluster", INITIAL_MASTER_NODES_SETTING.get(settings)); - } else { - // TODO update this when we can bootstrap on only a quorum of the initial nodes - bootstrappingDescription = String.format(Locale.ROOT, - "this node must discover at least [%d] master-eligible nodes, including %s, to bootstrap a cluster", - INITIAL_MASTER_NODE_COUNT_SETTING.get(settings), INITIAL_MASTER_NODES_SETTING.get(settings)); } return String.format(Locale.ROOT, diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java index 4cea05726b0ba..fc511870b332e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java @@ -86,6 +86,10 @@ public long getLastAcceptedVersion() { return getLastAcceptedState().version(); } + private long getLastAcceptedVersionOrMetaDataVersion() { + return getLastAcceptedState().getVersionOrMetaDataVersion(); + } + public VotingConfiguration getLastCommittedConfiguration() { return getLastAcceptedState().getLastCommittedConfiguration(); } @@ -126,27 +130,29 @@ public boolean joinVotesHaveQuorumFor(VotingConfiguration votingConfiguration) { /** * Used to bootstrap a cluster by injecting the initial state and configuration. * - * @param initialState The initial state to use. Must have term 0, version 1, and non-empty configurations. + * @param initialState The initial state to use. Must have term 0, version equal to the last-accepted version, and non-empty + * configurations. * @throws CoordinationStateRejectedException if the arguments were incompatible with the current state of this object. */ public void setInitialState(ClusterState initialState) { - final long lastAcceptedVersion = getLastAcceptedVersion(); - if (lastAcceptedVersion != 0) { - logger.debug("setInitialState: rejecting since last-accepted version {} > 0", lastAcceptedVersion); - throw new CoordinationStateRejectedException("initial state already set: last-accepted version now " + lastAcceptedVersion); + + final VotingConfiguration lastAcceptedConfiguration = getLastAcceptedConfiguration(); + if (lastAcceptedConfiguration.isEmpty() == false) { + logger.debug("setInitialState: rejecting since last-accepted configuration is nonempty: {}", lastAcceptedConfiguration); + throw new CoordinationStateRejectedException( + "initial state already set: last-accepted configuration now " + lastAcceptedConfiguration); } assert getLastAcceptedTerm() == 0 : getLastAcceptedTerm(); - assert getLastAcceptedConfiguration().isEmpty() : getLastAcceptedConfiguration(); assert getLastCommittedConfiguration().isEmpty() : getLastCommittedConfiguration(); - assert lastPublishedVersion == 0 : lastAcceptedVersion; + assert lastPublishedVersion == 0 : lastPublishedVersion; assert lastPublishedConfiguration.isEmpty() : lastPublishedConfiguration; assert electionWon == false; assert joinVotes.isEmpty() : joinVotes; assert publishVotes.isEmpty() : publishVotes; - assert initialState.term() == 0 : initialState; - assert initialState.version() == 1 : initialState; + assert initialState.term() == 0 : initialState + " should have term 0"; + assert initialState.version() == getLastAcceptedVersion() : initialState + " should have version " + getLastAcceptedVersion(); assert initialState.getLastAcceptedConfiguration().isEmpty() == false; assert initialState.getLastCommittedConfiguration().isEmpty() == false; @@ -191,7 +197,8 @@ public Join handleStartJoin(StartJoinRequest startJoinRequest) { joinVotes = new VoteCollection(); publishVotes = new VoteCollection(); - return new Join(localNode, startJoinRequest.getSourceNode(), getCurrentTerm(), getLastAcceptedTerm(), getLastAcceptedVersion()); + return new Join(localNode, startJoinRequest.getSourceNode(), getCurrentTerm(), getLastAcceptedTerm(), + getLastAcceptedVersionOrMetaDataVersion()); } /** @@ -224,20 +231,22 @@ public boolean handleJoin(Join join) { " of join higher than current last accepted term " + lastAcceptedTerm); } - if (join.getLastAcceptedTerm() == lastAcceptedTerm && join.getLastAcceptedVersion() > getLastAcceptedVersion()) { - logger.debug("handleJoin: ignored join as joiner has a better last accepted version (expected: <=[{}], actual: [{}])", - getLastAcceptedVersion(), join.getLastAcceptedVersion()); + if (join.getLastAcceptedTerm() == lastAcceptedTerm && join.getLastAcceptedVersion() > getLastAcceptedVersionOrMetaDataVersion()) { + logger.debug( + "handleJoin: ignored join as joiner has a better last accepted version (expected: <=[{}], actual: [{}]) in term {}", + getLastAcceptedVersionOrMetaDataVersion(), join.getLastAcceptedVersion(), lastAcceptedTerm); throw new CoordinationStateRejectedException("incoming last accepted version " + join.getLastAcceptedVersion() + - " of join higher than current last accepted version " + getLastAcceptedVersion()); + " of join higher than current last accepted version " + getLastAcceptedVersionOrMetaDataVersion() + + " in term " + lastAcceptedTerm); } - if (getLastAcceptedVersion() == 0) { + if (getLastAcceptedConfiguration().isEmpty()) { // We do not check for an election won on setting the initial configuration, so it would be possible to end up in a state where // we have enough join votes to have won the election immediately on setting the initial configuration. It'd be quite // complicated to restore all the appropriate invariants when setting the initial configuration (it's not just electionWon) // so instead we just reject join votes received prior to receiving the initial configuration. - logger.debug("handleJoin: ignored join because initial configuration not set"); - throw new CoordinationStateRejectedException("initial configuration not set"); + logger.debug("handleJoin: rejecting join since this node has not received its initial configuration yet"); + throw new CoordinationStateRejectedException("rejecting join since this node has not received its initial configuration yet"); } boolean added = joinVotes.addVote(join.getSourceNode()); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 01b2fae997749..233423a391c19 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterState.Builder; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -57,6 +56,8 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; @@ -70,12 +71,14 @@ import org.elasticsearch.transport.TransportService; import java.util.ArrayList; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.BiConsumer; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -103,10 +106,10 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery private final NodeRemovalClusterStateTaskExecutor nodeRemovalExecutor; private final Supplier persistedStateSupplier; private final DiscoverySettings discoverySettings; - // TODO: the following two fields are package-private as some tests require access to them + // TODO: the following field is package-private as some tests require access to it // These tests can be rewritten to use public methods once Coordinator is more feature-complete final Object mutex = new Object(); - final SetOnce coordinationState = new SetOnce<>(); // initialized on start-up (see doStart) + private final SetOnce coordinationState = new SetOnce<>(); // initialized on start-up (see doStart) private volatile ClusterState applierState; // the state that should be exposed to the cluster state applier private final PeerFinder peerFinder; @@ -118,6 +121,7 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery private final LeaderChecker leaderChecker; private final FollowersChecker followersChecker; private final ClusterApplier clusterApplier; + private final Collection> onJoinValidators; @Nullable private Releasable electionScheduler; @Nullable @@ -140,13 +144,14 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery public Coordinator(String nodeName, Settings settings, ClusterSettings clusterSettings, TransportService transportService, NamedWriteableRegistry namedWriteableRegistry, AllocationService allocationService, MasterService masterService, Supplier persistedStateSupplier, UnicastHostsProvider unicastHostsProvider, - ClusterApplier clusterApplier, Random random) { + ClusterApplier clusterApplier, Collection> onJoinValidators, Random random) { super(settings); this.settings = settings; this.transportService = transportService; this.masterService = masterService; + this.onJoinValidators = JoinTaskExecutor.addBuiltInJoinValidators(onJoinValidators); this.joinHelper = new JoinHelper(settings, allocationService, masterService, transportService, - this::getCurrentTerm, this::handleJoinRequest, this::joinLeaderInTerm); + this::getCurrentTerm, this::handleJoinRequest, this::joinLeaderInTerm, this.onJoinValidators); this.persistedStateSupplier = persistedStateSupplier; this.discoverySettings = new DiscoverySettings(settings, clusterSettings); this.lastKnownLeader = Optional.empty(); @@ -168,7 +173,7 @@ public Coordinator(String nodeName, Settings settings, ClusterSettings clusterSe this.reconfigurator = new Reconfigurator(settings, clusterSettings); this.clusterBootstrapService = new ClusterBootstrapService(settings, transportService); this.discoveryUpgradeService = new DiscoveryUpgradeService(settings, clusterSettings, transportService, - this::isInitialConfigurationSet, joinHelper, peerFinder::getFoundPeers, this::unsafelySetConfigurationForUpgrade); + this::isInitialConfigurationSet, joinHelper, peerFinder::getFoundPeers, this::setInitialConfiguration); this.lagDetector = new LagDetector(settings, transportService.getThreadPool(), n -> removeNode(n, "lagging"), transportService::getLocalNode); this.clusterFormationFailureHelper = new ClusterFormationFailureHelper(settings, this::getClusterFormationState, @@ -210,7 +215,6 @@ private void removeNode(DiscoveryNode discoveryNode, String reason) { void onFollowerCheckRequest(FollowerCheckRequest followerCheckRequest) { synchronized (mutex) { - final long previousTerm = getCurrentTerm(); ensureTermAtLeast(followerCheckRequest.getSender(), followerCheckRequest.getTerm()); if (getCurrentTerm() != followerCheckRequest.getTerm()) { @@ -219,7 +223,11 @@ void onFollowerCheckRequest(FollowerCheckRequest followerCheckRequest) { + getCurrentTerm() + "], rejecting " + followerCheckRequest); } - if (previousTerm != getCurrentTerm()) { + // check if node has accepted a state in this term already. If not, this node has never committed a cluster state in this + // term and therefore never removed the NO_MASTER_BLOCK for this term. This logic ensures that we quickly turn a node + // into follower, even before receiving the first cluster state update, but also don't have to deal with the situation + // where we would possibly have to remove the NO_MASTER_BLOCK from the applierState when turning a candidate back to follower. + if (getLastAcceptedState().term() < getCurrentTerm()) { becomeFollower("onFollowerCheckRequest", followerCheckRequest.getSender()); } } @@ -275,6 +283,11 @@ PublishWithJoinResponse handlePublishRequest(PublishRequest publishRequest) { + lastKnownLeader + ", rejecting"); } + if (publishRequest.getAcceptedState().term() > coordinationState.get().getLastAcceptedState().term()) { + // only do join validation if we have not accepted state from this master yet + onJoinValidators.forEach(a -> a.accept(getLocalNode(), publishRequest.getAcceptedState())); + } + ensureTermAtLeast(sourceNode, publishRequest.getAcceptedState().term()); final PublishResponse publishResponse = coordinationState.get().handlePublishRequest(publishRequest); @@ -387,6 +400,41 @@ private void handleJoinRequest(JoinRequest joinRequest, JoinHelper.JoinCallback logger.trace("handleJoinRequest: as {}, handling {}", mode, joinRequest); transportService.connectToNode(joinRequest.getSourceNode()); + final ClusterState stateForJoinValidation = getStateForMasterService(); + + if (stateForJoinValidation.nodes().isLocalNodeElectedMaster()) { + onJoinValidators.forEach(a -> a.accept(joinRequest.getSourceNode(), stateForJoinValidation)); + if (stateForJoinValidation.getBlocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK) == false) { + // we do this in a couple of places including the cluster update thread. This one here is really just best effort + // to ensure we fail as fast as possible. + JoinTaskExecutor.ensureMajorVersionBarrier(joinRequest.getSourceNode().getVersion(), + stateForJoinValidation.getNodes().getMinNodeVersion()); + } + + // validate the join on the joining node, will throw a failure if it fails the validation + joinHelper.sendValidateJoinRequest(joinRequest.getSourceNode(), stateForJoinValidation, new ActionListener() { + @Override + public void onResponse(Empty empty) { + try { + processJoinRequest(joinRequest, joinCallback); + } catch (Exception e) { + joinCallback.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + logger.warn(() -> new ParameterizedMessage("failed to validate incoming join request from node [{}]", + joinRequest.getSourceNode()), e); + joinCallback.onFailure(new IllegalStateException("failure when sending a validation request to node", e)); + } + }); + } else { + processJoinRequest(joinRequest, joinCallback); + } + } + + private void processJoinRequest(JoinRequest joinRequest, JoinHelper.JoinCallback joinCallback) { final Optional optionalJoin = joinRequest.getOptionalJoin(); synchronized (mutex) { final CoordinationState coordState = coordinationState.get(); @@ -494,7 +542,7 @@ void becomeFollower(String method, DiscoveryNode leaderNode) { private PreVoteResponse getPreVoteResponse() { return new PreVoteResponse(getCurrentTerm(), coordinationState.get().getLastAcceptedTerm(), - coordinationState.get().getLastAcceptedVersion()); + coordinationState.get().getLastAcceptedState().getVersionOrMetaDataVersion()); } // package-visible for testing @@ -512,7 +560,7 @@ Mode getMode() { } // visible for testing - public DiscoveryNode getLocalNode() { + DiscoveryNode getLocalNode() { return transportService.getLocalNode(); } @@ -573,7 +621,7 @@ public void invariant() { assert peerFinder.getCurrentTerm() == getCurrentTerm(); assert followersChecker.getFastResponseState().term == getCurrentTerm() : followersChecker.getFastResponseState(); assert followersChecker.getFastResponseState().mode == getMode() : followersChecker.getFastResponseState(); - assert (applierState.nodes().getMasterNodeId() == null) == applierState.blocks().hasGlobalBlock(NO_MASTER_BLOCK_ID); + assert (applierState.nodes().getMasterNodeId() == null) == applierState.blocks().hasGlobalBlockWithId(NO_MASTER_BLOCK_ID); assert preVoteCollector.getPreVoteResponse().equals(getPreVoteResponse()) : preVoteCollector + " vs " + getPreVoteResponse(); @@ -592,7 +640,8 @@ public void invariant() { assert prevotingRound == null : prevotingRound; assert becomingMaster || getStateForMasterService().nodes().getMasterNodeId() != null : getStateForMasterService(); assert leaderChecker.leader() == null : leaderChecker.leader(); - assert applierState.nodes().getMasterNodeId() == null || getLocalNode().equals(applierState.nodes().getMasterNode()); + assert getLocalNode().equals(applierState.nodes().getMasterNode()) || + (applierState.nodes().getMasterNodeId() == null && applierState.term() < getCurrentTerm()); assert preVoteCollector.getLeader() == getLocalNode() : preVoteCollector; assert clusterFormationFailureHelper.isRunning() == false; @@ -620,7 +669,6 @@ public void invariant() { coordinationState.get().getLastAcceptedConfiguration().equals(coordinationState.get().getLastCommittedConfiguration()) : coordinationState.get().getLastAcceptedConfiguration() + " != " + coordinationState.get().getLastCommittedConfiguration(); - } else if (mode == Mode.FOLLOWER) { assert coordinationState.get().electionWon() == false : getLocalNode() + " is FOLLOWER so electionWon() should be false"; assert lastKnownLeader.isPresent() && (lastKnownLeader.get().equals(getLocalNode()) == false); @@ -632,6 +680,9 @@ public void invariant() { assert leaderChecker.currentNodeIsMaster() == false; assert lastKnownLeader.equals(Optional.of(leaderChecker.leader())); assert followersChecker.getKnownFollowers().isEmpty(); + assert lastKnownLeader.get().equals(applierState.nodes().getMasterNode()) || + (applierState.nodes().getMasterNodeId() == null && + (applierState.term() < getCurrentTerm() || applierState.version() < getLastAcceptedState().version())); assert currentPublication.map(Publication::isCommitted).orElse(true); assert preVoteCollector.getLeader().equals(lastKnownLeader.get()) : preVoteCollector; assert clusterFormationFailureHelper.isRunning() == false; @@ -699,7 +750,6 @@ public boolean setInitialConfiguration(final VotingConfiguration votingConfigura } logger.info("setting initial configuration to {}", votingConfiguration); - final Builder builder = masterService.incrementVersion(currentState); final CoordinationMetaData coordinationMetaData = CoordinationMetaData.builder(currentState.coordinationMetaData()) .lastAcceptedConfiguration(votingConfiguration) .lastCommittedConfiguration(votingConfiguration) @@ -709,57 +759,14 @@ public boolean setInitialConfiguration(final VotingConfiguration votingConfigura // automatically generate a UID for the metadata if we need to metaDataBuilder.generateClusterUuidIfNeeded(); // TODO generate UUID in bootstrapping tool? metaDataBuilder.coordinationMetaData(coordinationMetaData); - builder.metaData(metaDataBuilder); - coordinationState.get().setInitialState(builder.build()); + + coordinationState.get().setInitialState(ClusterState.builder(currentState).metaData(metaDataBuilder).build()); preVoteCollector.update(getPreVoteResponse(), null); // pick up the change to last-accepted version startElectionScheduler(); return true; } } - private void unsafelySetConfigurationForUpgrade(VotingConfiguration votingConfiguration) { - assert Version.CURRENT.major == Version.V_6_6_0.major + 1 : "remove this method once unsafe upgrades are no longer needed"; - synchronized (mutex) { - if (mode != Mode.CANDIDATE) { - throw new IllegalStateException("Cannot overwrite configuration in mode " + mode); - } - - if (isInitialConfigurationSet()) { - throw new IllegalStateException("Cannot overwrite configuration: configuration is already set to " - + getLastAcceptedState().getLastAcceptedConfiguration()); - } - - if (lastKnownLeader.map(Coordinator::isZen1Node).orElse(false) == false) { - throw new IllegalStateException("Cannot upgrade from last-known leader: " + lastKnownLeader); - } - - if (getCurrentTerm() != ZEN1_BWC_TERM) { - throw new IllegalStateException("Cannot upgrade, term is " + getCurrentTerm()); - } - - logger.info("automatically bootstrapping during rolling upgrade, using initial configuration {}", votingConfiguration); - - final ClusterState currentState = getStateForMasterService(); - final Builder builder = masterService.incrementVersion(currentState); - builder.metaData(MetaData.builder(currentState.metaData()).coordinationMetaData( - CoordinationMetaData.builder(currentState.metaData().coordinationMetaData()) - .term(1) - .lastAcceptedConfiguration(votingConfiguration) - .lastCommittedConfiguration(votingConfiguration) - .build())); - final ClusterState newClusterState = builder.build(); - - coordinationState.get().handleStartJoin(new StartJoinRequest(getLocalNode(), newClusterState.term())); - coordinationState.get().handlePublishRequest(new PublishRequest(newClusterState)); - - followersChecker.clearCurrentNodes(); - followersChecker.updateFastResponseState(getCurrentTerm(), mode); - - peerFinder.deactivate(getLocalNode()); - peerFinder.activate(newClusterState.nodes()); - } - } - // Package-private for testing ClusterState improveConfiguration(ClusterState clusterState) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; @@ -878,7 +885,7 @@ ClusterState getStateForMasterService() { private ClusterState clusterStateWithNoMasterBlock(ClusterState clusterState) { if (clusterState.nodes().getMasterNodeId() != null) { // remove block if it already exists before adding new one - assert clusterState.blocks().hasGlobalBlock(NO_MASTER_BLOCK_ID) == false : + assert clusterState.blocks().hasGlobalBlockWithId(NO_MASTER_BLOCK_ID) == false : "NO_MASTER_BLOCK should only be added by Coordinator"; final ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(clusterState.blocks()).addGlobalBlock( discoverySettings.getNoMasterBlock()).build(); @@ -911,12 +918,7 @@ public void publish(ClusterChangedEvent clusterChangedEvent, ActionListener ActionListener wrapWithMutex(ActionListener listener) { return new ActionListener() { @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index 0154919de74ff..8c41d7b2eaa52 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -21,6 +21,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskListener; @@ -40,15 +41,18 @@ import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; +import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponse.Empty; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; @@ -64,6 +68,7 @@ public class JoinHelper { private static final Logger logger = LogManager.getLogger(JoinHelper.class); public static final String JOIN_ACTION_NAME = "internal:cluster/coordination/join"; + public static final String VALIDATE_JOIN_ACTION_NAME = "internal:cluster/coordination/join/validate"; public static final String START_JOIN_ACTION_NAME = "internal:cluster/coordination/start_join"; // the timeout for each join attempt @@ -80,7 +85,8 @@ public class JoinHelper { public JoinHelper(Settings settings, AllocationService allocationService, MasterService masterService, TransportService transportService, LongSupplier currentTermSupplier, - BiConsumer joinHandler, Function joinLeaderInTerm) { + BiConsumer joinHandler, Function joinLeaderInTerm, + Collection> joinValidators) { this.masterService = masterService; this.transportService = transportService; this.joinTimeout = JOIN_TIMEOUT_SETTING.get(settings); @@ -123,9 +129,19 @@ public ClusterTasksResult execute(ClusterState currentSta channel.sendResponse(Empty.INSTANCE); }); + transportService.registerRequestHandler(VALIDATE_JOIN_ACTION_NAME, + MembershipAction.ValidateJoinRequest::new, ThreadPool.Names.GENERIC, + (request, channel, task) -> { + joinValidators.forEach(action -> action.accept(transportService.getLocalNode(), request.getState())); + channel.sendResponse(Empty.INSTANCE); + }); + transportService.registerRequestHandler(MembershipAction.DISCOVERY_JOIN_VALIDATE_ACTION_NAME, - () -> new MembershipAction.ValidateJoinRequest(), ThreadPool.Names.GENERIC, - (request, channel, task) -> channel.sendResponse(Empty.INSTANCE)); // TODO: implement join validation + MembershipAction.ValidateJoinRequest::new, ThreadPool.Names.GENERIC, + (request, channel, task) -> { + joinValidators.forEach(action -> action.accept(transportService.getLocalNode(), request.getState())); + channel.sendResponse(Empty.INSTANCE); + }); transportService.registerRequestHandler( ZenDiscovery.DISCOVERY_REJOIN_ACTION_NAME, ZenDiscovery.RejoinClusterRequest::new, ThreadPool.Names.SAME, @@ -244,6 +260,29 @@ public String executor() { }); } + public void sendValidateJoinRequest(DiscoveryNode node, ClusterState state, ActionListener listener) { + final String actionName; + if (Coordinator.isZen1Node(node)) { + actionName = MembershipAction.DISCOVERY_JOIN_VALIDATE_ACTION_NAME; + } else { + actionName = VALIDATE_JOIN_ACTION_NAME; + } + transportService.sendRequest(node, actionName, + new MembershipAction.ValidateJoinRequest(state), + TransportRequestOptions.builder().withTimeout(joinTimeout).build(), + new EmptyTransportResponseHandler(ThreadPool.Names.GENERIC) { + @Override + public void handleResponse(TransportResponse.Empty response) { + listener.onResponse(response); + } + + @Override + public void handleException(TransportException exp) { + listener.onFailure(exp); + } + }); + } + public interface JoinCallback { void onSuccess(); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java index 9544cf15a0c4e..c4c76d8a8fe74 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java @@ -31,7 +31,11 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.discovery.DiscoverySettings; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.function.BiConsumer; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; @@ -259,4 +263,15 @@ public static void ensureMajorVersionBarrier(Version joiningNodeVersion, Version "All nodes in the cluster are of a higher major [" + clusterMajor + "]."); } } + + public static Collection> addBuiltInJoinValidators( + Collection> onJoinValidators) { + final Collection> validators = new ArrayList<>(); + validators.add((node, state) -> { + ensureNodesCompatibility(node.getVersion(), state.getNodes()); + ensureIndexCompatibility(node.getVersion(), state.getMetaData()); + }); + validators.addAll(onJoinValidators); + return Collections.unmodifiableCollection(validators); + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/PreVoteCollector.java b/server/src/main/java/org/elasticsearch/cluster/coordination/PreVoteCollector.java index 6137f5a6d0f77..bb15f08c58e13 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/PreVoteCollector.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/PreVoteCollector.java @@ -182,7 +182,7 @@ private void handlePreVoteResponse(final PreVoteResponse response, final Discove if (response.getLastAcceptedTerm() > clusterState.term() || (response.getLastAcceptedTerm() == clusterState.term() - && response.getLastAcceptedVersion() > clusterState.version())) { + && response.getLastAcceptedVersion() > clusterState.getVersionOrMetaDataVersion())) { logger.debug("{} ignoring {} from {} as it is fresher", this, response, sender); return; } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java b/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java index 87ce488345db0..b0b91cd0980f2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/PublicationTransportHandler.java @@ -389,7 +389,13 @@ private PublishWithJoinResponse handleIncomingPublishRequest(BytesTransportReque in.setVersion(request.version()); // If true we received full cluster state - otherwise diffs if (in.readBoolean()) { - final ClusterState incomingState = ClusterState.readFrom(in, transportService.getLocalNode()); + final ClusterState incomingState; + try { + incomingState = ClusterState.readFrom(in, transportService.getLocalNode()); + } catch (Exception e){ + logger.warn("unexpected error while deserializing an incoming cluster state", e); + throw e; + } fullClusterStateReceivedCount.incrementAndGet(); logger.debug("received full cluster state version [{}] with size [{}]", incomingState.version(), request.bytes().length()); @@ -400,10 +406,20 @@ private PublishWithJoinResponse handleIncomingPublishRequest(BytesTransportReque final ClusterState lastSeen = lastSeenClusterState.get(); if (lastSeen == null) { logger.debug("received diff for but don't have any local cluster state - requesting full state"); + incompatibleClusterStateDiffReceivedCount.incrementAndGet(); throw new IncompatibleClusterStateVersionException("have no local cluster state"); } else { - Diff diff = ClusterState.readDiffFrom(in, lastSeen.nodes().getLocalNode()); - final ClusterState incomingState = diff.apply(lastSeen); // might throw IncompatibleClusterStateVersionException + final ClusterState incomingState; + try { + Diff diff = ClusterState.readDiffFrom(in, lastSeen.nodes().getLocalNode()); + incomingState = diff.apply(lastSeen); // might throw IncompatibleClusterStateVersionException + } catch (IncompatibleClusterStateVersionException e) { + incompatibleClusterStateDiffReceivedCount.incrementAndGet(); + throw e; + } catch (Exception e){ + logger.warn("unexpected error while deserializing an incoming cluster state", e); + throw e; + } compatibleClusterStateDiffReceivedCount.incrementAndGet(); logger.debug("received diff cluster state version [{}] with uuid [{}], diff size [{}]", incomingState.version(), incomingState.stateUUID(), request.bytes().length()); @@ -412,12 +428,6 @@ private PublishWithJoinResponse handleIncomingPublishRequest(BytesTransportReque return response; } } - } catch (IncompatibleClusterStateVersionException e) { - incompatibleClusterStateDiffReceivedCount.incrementAndGet(); - throw e; - } catch (Exception e) { - logger.warn("unexpected error while deserializing an incoming cluster state", e); - throw e; } finally { IOUtils.close(in); } diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java index 6d36f4dec1d26..ad1561e4f0eda 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterStateHealth.java @@ -100,7 +100,7 @@ public ClusterStateHealth(final ClusterState clusterState, final String[] concre } } - if (clusterState.blocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE)) { + if (clusterState.blocks().hasGlobalBlockWithStatus(RestStatus.SERVICE_UNAVAILABLE)) { computeStatus = ClusterHealthStatus.RED; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 06d5cd6f85deb..73e3c6b67eccf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -157,6 +157,10 @@ static Setting buildNumberOfShardsSetting() { public static final Setting INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING = Setting.intSetting("index.number_of_routing_shards", INDEX_NUMBER_OF_SHARDS_SETTING, 1, new Setting.Validator() { + @Override + public void validate(Integer value) { + } + @Override public void validate(Integer numRoutingShards, Map, Integer> settings) { Integer numShards = settings.get(INDEX_NUMBER_OF_SHARDS_SETTING); @@ -223,14 +227,14 @@ public Iterator> settings() { public static final String INDEX_ROUTING_INCLUDE_GROUP_PREFIX = "index.routing.allocation.include"; public static final String INDEX_ROUTING_EXCLUDE_GROUP_PREFIX = "index.routing.allocation.exclude"; public static final Setting.AffixSetting INDEX_ROUTING_REQUIRE_GROUP_SETTING = - Setting.prefixKeySetting(INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ".", (key) -> - Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.IndexScope)); + Setting.prefixKeySetting(INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ".", key -> + Setting.simpleString(key, value -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.IndexScope)); public static final Setting.AffixSetting INDEX_ROUTING_INCLUDE_GROUP_SETTING = - Setting.prefixKeySetting(INDEX_ROUTING_INCLUDE_GROUP_PREFIX + ".", (key) -> - Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.IndexScope)); + Setting.prefixKeySetting(INDEX_ROUTING_INCLUDE_GROUP_PREFIX + ".", key -> + Setting.simpleString(key, value -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.IndexScope)); public static final Setting.AffixSetting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = - Setting.prefixKeySetting(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + ".", (key) -> - Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.IndexScope)); + Setting.prefixKeySetting(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + ".", key -> + Setting.simpleString(key, value -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.IndexScope)); public static final Setting.AffixSetting INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING = Setting.prefixKeySetting("index.routing.allocation.initial_recovery.", key -> Setting.simpleString(key)); // this is only setable internally not a registered setting!! @@ -1111,7 +1115,7 @@ public IndexMetaData build() { } int numberOfReplicas = maybeNumberOfReplicas; if (numberOfReplicas < 0) { - throw new IllegalArgumentException("must specify non-negative number of shards for index [" + index + "]"); + throw new IllegalArgumentException("must specify non-negative number of replicas for index [" + index + "]"); } int routingPartitionSize = INDEX_ROUTING_PARTITION_SIZE_SETTING.get(settings); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index f9ef5786afdca..4055af3e2f460 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -20,7 +20,6 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; @@ -42,6 +41,8 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.rest.BaseRestHandler; import java.io.IOException; import java.util.ArrayList; @@ -342,6 +343,8 @@ public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XCont public static void toInnerXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { + boolean includeTypeName = params.paramAsBoolean(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, true); + builder.field("order", indexTemplateMetaData.order()); if (indexTemplateMetaData.version() != null) { builder.field("version", indexTemplateMetaData.version()); @@ -353,18 +356,35 @@ public static void toInnerXContent(IndexTemplateMetaData indexTemplateMetaData, builder.endObject(); if (params.paramAsBoolean("reduce_mappings", false)) { - builder.startObject("mappings"); - for (ObjectObjectCursor cursor : indexTemplateMetaData.mappings()) { - byte[] mappingSource = cursor.value.uncompressed(); - Map mapping = XContentHelper.convertToMap(new BytesArray(mappingSource), true).v2(); - if (mapping.size() == 1 && mapping.containsKey(cursor.key)) { - // the type name is the root value, reduce it - mapping = (Map) mapping.get(cursor.key); + // The parameter include_type_name is only ever used in the REST API, where reduce_mappings is + // always set to true. We therefore only check for include_type_name in this branch. + if (includeTypeName == false) { + Map documentMapping = null; + for (ObjectObjectCursor cursor : indexTemplateMetaData.mappings()) { + if (!cursor.key.equals(MapperService.DEFAULT_MAPPING)) { + assert documentMapping == null; + byte[] mappingSource = cursor.value.uncompressed(); + Map mapping = XContentHelper.convertToMap(new BytesArray(mappingSource), true).v2(); + documentMapping = reduceMapping(cursor.key, mapping); + } + } + + if (documentMapping != null) { + builder.field("mappings", documentMapping); + } else { + builder.startObject("mappings").endObject(); + } + } else { + builder.startObject("mappings"); + for (ObjectObjectCursor cursor : indexTemplateMetaData.mappings()) { + byte[] mappingSource = cursor.value.uncompressed(); + Map mapping = XContentHelper.convertToMap(new BytesArray(mappingSource), true).v2(); + mapping = reduceMapping(cursor.key, mapping); + builder.field(cursor.key); + builder.map(mapping); } - builder.field(cursor.key); - builder.map(mapping); + builder.endObject(); } - builder.endObject(); } else { builder.startArray("mappings"); for (ObjectObjectCursor cursor : indexTemplateMetaData.mappings()) { @@ -381,6 +401,16 @@ public static void toInnerXContent(IndexTemplateMetaData indexTemplateMetaData, builder.endObject(); } + @SuppressWarnings("unchecked") + private static Map reduceMapping(String type, Map mapping) { + if (mapping.size() == 1 && mapping.containsKey(type)) { + // the type name is the root value, reduce it + return (Map) mapping.get(type); + } else { + return mapping; + } + } + public static IndexTemplateMetaData fromXContent(XContentParser parser, String templateName) throws IOException { Builder builder = new Builder(templateName); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index a36a11f805785..aa4434a0a74c6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -19,40 +19,67 @@ package org.elasticsearch.cluster.metadata; +import com.carrotsearch.hppc.cursors.IntObjectCursor; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.NotifyOnceListener; import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.close.TransportVerifyShardBeforeCloseAction; import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; import org.elasticsearch.action.support.ActiveShardsObserver; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.OpenIndexClusterStateUpdateResponse; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Arrays; +import java.util.EnumSet; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import static java.util.Collections.unmodifiableMap; /** * Service responsible for submitting open/close index requests @@ -60,54 +87,130 @@ public class MetaDataIndexStateService { private static final Logger logger = LogManager.getLogger(MetaDataIndexStateService.class); + public static final int INDEX_CLOSED_BLOCK_ID = 4; public static final ClusterBlock INDEX_CLOSED_BLOCK = new ClusterBlock(4, "index closed", false, false, false, RestStatus.FORBIDDEN, ClusterBlockLevel.READ_WRITE); private final ClusterService clusterService; - private final AllocationService allocationService; - private final MetaDataIndexUpgradeService metaDataIndexUpgradeService; private final IndicesService indicesService; + private final ThreadPool threadPool; + private final TransportVerifyShardBeforeCloseAction transportVerifyShardBeforeCloseAction; private final ActiveShardsObserver activeShardsObserver; @Inject public MetaDataIndexStateService(ClusterService clusterService, AllocationService allocationService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, - IndicesService indicesService, ThreadPool threadPool) { + IndicesService indicesService, ThreadPool threadPool, + TransportVerifyShardBeforeCloseAction transportVerifyShardBeforeCloseAction) { this.indicesService = indicesService; this.clusterService = clusterService; this.allocationService = allocationService; + this.threadPool = threadPool; + this.transportVerifyShardBeforeCloseAction = transportVerifyShardBeforeCloseAction; this.metaDataIndexUpgradeService = metaDataIndexUpgradeService; this.activeShardsObserver = new ActiveShardsObserver(clusterService, threadPool); } - public void closeIndices(final CloseIndexClusterStateUpdateRequest request, final ActionListener listener) { - if (request.indices() == null || request.indices().length == 0) { + /** + * Closes one or more indices. + * + * Closing indices is a 3 steps process: it first adds a write block to every indices to close, then waits for the operations on shards + * to be terminated and finally closes the indices by moving their state to CLOSE. + */ + public void closeIndices(final CloseIndexClusterStateUpdateRequest request, final ActionListener listener) { + final Index[] concreteIndices = request.indices(); + if (concreteIndices == null || concreteIndices.length == 0) { throw new IllegalArgumentException("Index name is required"); } - final String indicesAsString = Arrays.toString(request.indices()); - clusterService.submitStateUpdateTask("close-indices " + indicesAsString, - new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { - @Override - protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { - return new ClusterStateUpdateResponse(acknowledged); - } + clusterService.submitStateUpdateTask("add-block-index-to-close " + Arrays.toString(concreteIndices), + new ClusterStateUpdateTask(Priority.URGENT) { + + private final Map blockedIndices = new HashMap<>(); + + @Override + public ClusterState execute(final ClusterState currentState) { + return addIndexClosedBlocks(concreteIndices, blockedIndices, currentState); + } - @Override - public ClusterState execute(ClusterState currentState) { - return closeIndices(currentState, request.indices(), indicesAsString); + @Override + public void clusterStateProcessed(final String source, final ClusterState oldState, final ClusterState newState) { + if (oldState == newState) { + assert blockedIndices.isEmpty() : "List of blocked indices is not empty but cluster state wasn't changed"; + listener.onResponse(new AcknowledgedResponse(true)); + } else { + assert blockedIndices.isEmpty() == false : "List of blocked indices is empty but cluster state was changed"; + threadPool.executor(ThreadPool.Names.MANAGEMENT) + .execute(new WaitForClosedBlocksApplied(blockedIndices, request, + ActionListener.wrap(results -> + clusterService.submitStateUpdateTask("close-indices", new ClusterStateUpdateTask(Priority.URGENT) { + + boolean acknowledged = true; + + @Override + public ClusterState execute(final ClusterState currentState) throws Exception { + final ClusterState updatedState = closeRoutingTable(currentState, blockedIndices, results); + for (Map.Entry result : results.entrySet()) { + IndexMetaData updatedMetaData = updatedState.metaData().index(result.getKey()); + if (updatedMetaData != null && updatedMetaData.getState() != IndexMetaData.State.CLOSE) { + acknowledged = false; + break; + } + } + return allocationService.reroute(updatedState, "indices closed"); + } + + @Override + public void onFailure(final String source, final Exception e) { + listener.onFailure(e); + } + + @Override + public void clusterStateProcessed(final String source, + final ClusterState oldState, final ClusterState newState) { + listener.onResponse(new AcknowledgedResponse(acknowledged)); + } + }), + listener::onFailure) + ) + ); + } + } + + @Override + public void onFailure(final String source, final Exception e) { + listener.onFailure(e); + } + + @Override + public TimeValue timeout() { + return request.masterNodeTimeout(); + } } - }); + ); } - public ClusterState closeIndices(ClusterState currentState, final Index[] indices, String indicesAsString) { - Set indicesToClose = new HashSet<>(); + /** + * Step 1 - Start closing indices by adding a write block + * + * This step builds the list of indices to close (the ones explicitly requested that are not in CLOSE state) and adds a unique cluster + * block (or reuses an existing one) to every index to close in the cluster state. After the cluster state is published, the shards + * should start to reject writing operations and we can proceed with step 2. + */ + static ClusterState addIndexClosedBlocks(final Index[] indices, final Map blockedIndices, + final ClusterState currentState) { + final MetaData.Builder metadata = MetaData.builder(currentState.metaData()); + + final Set indicesToClose = new HashSet<>(); for (Index index : indices) { - final IndexMetaData indexMetaData = currentState.metaData().getIndexSafe(index); + final IndexMetaData indexMetaData = metadata.getSafe(index); if (indexMetaData.getState() != IndexMetaData.State.CLOSE) { indicesToClose.add(indexMetaData); + } else { + logger.debug("index {} is already closed, ignoring", index); + assert currentState.blocks().hasIndexBlock(index.getName(), INDEX_CLOSED_BLOCK); } } @@ -119,28 +222,204 @@ public ClusterState closeIndices(ClusterState currentState, final Index[] indice RestoreService.checkIndexClosing(currentState, indicesToClose); // Check if index closing conflicts with any running snapshots SnapshotsService.checkIndexClosing(currentState, indicesToClose); - logger.info("closing indices [{}]", indicesAsString); - - MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); - ClusterBlocks.Builder blocksBuilder = ClusterBlocks.builder() - .blocks(currentState.blocks()); - for (IndexMetaData openIndexMetadata : indicesToClose) { - final String indexName = openIndexMetadata.getIndex().getName(); - mdBuilder.put(IndexMetaData.builder(openIndexMetadata).state(IndexMetaData.State.CLOSE)); - blocksBuilder.addIndexBlock(indexName, INDEX_CLOSED_BLOCK); + + // If the cluster is in a mixed version that does not support the shard close action, + // we use the previous way to close indices and directly close them without sanity checks + final boolean useDirectClose = currentState.nodes().getMinNodeVersion().before(Version.V_7_0_0); + + final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + + for (IndexMetaData indexToClose : indicesToClose) { + final Index index = indexToClose.getIndex(); + + ClusterBlock indexBlock = null; + final Set clusterBlocks = currentState.blocks().indices().get(index.getName()); + if (clusterBlocks != null) { + for (ClusterBlock clusterBlock : clusterBlocks) { + if (clusterBlock.id() == INDEX_CLOSED_BLOCK_ID) { + // Reuse the existing index closed block + indexBlock = clusterBlock; + break; + } + } + } + if (useDirectClose) { + logger.debug("closing index {} directly", index); + metadata.put(IndexMetaData.builder(indexToClose).state(IndexMetaData.State.CLOSE)); // increment version? + blocks.removeIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID); + routingTable.remove(index.getName()); + indexBlock = INDEX_CLOSED_BLOCK; + } else { + if (indexBlock == null) { + // Create a new index closed block + indexBlock = createIndexClosingBlock(); + } + assert Strings.hasLength(indexBlock.uuid()) : "Closing block should have a UUID"; + } + blocks.addIndexBlock(index.getName(), indexBlock); + blockedIndices.put(index, indexBlock); } - ClusterState updatedState = ClusterState.builder(currentState).metaData(mdBuilder).blocks(blocksBuilder).build(); + logger.info(() -> new ParameterizedMessage("closing indices {}", + blockedIndices.keySet().stream().map(Object::toString).collect(Collectors.joining(",")))); + return ClusterState.builder(currentState).blocks(blocks).metaData(metadata).routingTable(routingTable.build()).build(); + } - RoutingTable.Builder rtBuilder = RoutingTable.builder(currentState.routingTable()); - for (IndexMetaData index : indicesToClose) { - rtBuilder.remove(index.getIndex().getName()); + /** + * Step 2 - Wait for indices to be ready for closing + *

+ * This step iterates over the indices previously blocked and sends a {@link TransportVerifyShardBeforeCloseAction} to each shard. If + * this action succeed then the shard is considered to be ready for closing. When all shards of a given index are ready for closing, + * the index is considered ready to be closed. + */ + class WaitForClosedBlocksApplied extends AbstractRunnable { + + private final Map blockedIndices; + private final CloseIndexClusterStateUpdateRequest request; + private final ActionListener> listener; + + private WaitForClosedBlocksApplied(final Map blockedIndices, + final CloseIndexClusterStateUpdateRequest request, + final ActionListener> listener) { + if (blockedIndices == null || blockedIndices.isEmpty()) { + throw new IllegalArgumentException("Cannot wait for closed blocks to be applied, list of blocked indices is empty or null"); + } + this.blockedIndices = blockedIndices; + this.request = request; + this.listener = listener; + } + + @Override + public void onFailure(final Exception e) { + listener.onFailure(e); } - //no explicit wait for other nodes needed as we use AckedClusterStateUpdateTask - return allocationService.reroute( - ClusterState.builder(updatedState).routingTable(rtBuilder.build()).build(), - "indices closed [" + indicesAsString + "]"); + @Override + protected void doRun() throws Exception { + final Map results = ConcurrentCollections.newConcurrentMap(); + final CountDown countDown = new CountDown(blockedIndices.size()); + final ClusterState state = clusterService.state(); + blockedIndices.forEach((index, block) -> { + waitForShardsReadyForClosing(index, block, state, response -> { + results.put(index, response); + if (countDown.countDown()) { + listener.onResponse(unmodifiableMap(results)); + } + }); + }); + } + + private void waitForShardsReadyForClosing(final Index index, final ClusterBlock closingBlock, + final ClusterState state, final Consumer onResponse) { + final IndexMetaData indexMetaData = state.metaData().index(index); + if (indexMetaData == null) { + logger.debug("index {} has been blocked before closing and is now deleted, ignoring", index); + onResponse.accept(new AcknowledgedResponse(true)); + return; + } + final IndexRoutingTable indexRoutingTable = state.routingTable().index(index); + if (indexRoutingTable == null || indexMetaData.getState() == IndexMetaData.State.CLOSE) { + assert state.blocks().hasIndexBlock(index.getName(), INDEX_CLOSED_BLOCK); + logger.debug("index {} has been blocked before closing and is already closed, ignoring", index); + onResponse.accept(new AcknowledgedResponse(true)); + return; + } + + final ImmutableOpenIntMap shards = indexRoutingTable.getShards(); + final AtomicArray results = new AtomicArray<>(shards.size()); + final CountDown countDown = new CountDown(shards.size()); + + for (IntObjectCursor shard : shards) { + final IndexShardRoutingTable shardRoutingTable = shard.value; + final ShardId shardId = shardRoutingTable.shardId(); + sendVerifyShardBeforeCloseRequest(shardRoutingTable, closingBlock, new NotifyOnceListener() { + @Override + public void innerOnResponse(final ReplicationResponse replicationResponse) { + ReplicationResponse.ShardInfo shardInfo = replicationResponse.getShardInfo(); + results.setOnce(shardId.id(), new AcknowledgedResponse(shardInfo.getFailed() == 0)); + processIfFinished(); + } + + @Override + public void innerOnFailure(final Exception e) { + results.setOnce(shardId.id(), new AcknowledgedResponse(false)); + processIfFinished(); + } + + private void processIfFinished() { + if (countDown.countDown()) { + final boolean acknowledged = results.asList().stream().allMatch(AcknowledgedResponse::isAcknowledged); + onResponse.accept(new AcknowledgedResponse(acknowledged)); + } + } + }); + } + } + + private void sendVerifyShardBeforeCloseRequest(final IndexShardRoutingTable shardRoutingTable, + final ClusterBlock closingBlock, + final ActionListener listener) { + final ShardId shardId = shardRoutingTable.shardId(); + if (shardRoutingTable.primaryShard().unassigned()) { + logger.debug("primary shard {} is unassigned, ignoring", shardId); + final ReplicationResponse response = new ReplicationResponse(); + response.setShardInfo(new ReplicationResponse.ShardInfo(shardRoutingTable.size(), shardRoutingTable.size())); + listener.onResponse(response); + return; + } + final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), request.taskId()); + final TransportVerifyShardBeforeCloseAction.ShardRequest shardRequest = + new TransportVerifyShardBeforeCloseAction.ShardRequest(shardId, closingBlock, parentTaskId); + if (request.ackTimeout() != null) { + shardRequest.timeout(request.ackTimeout()); + } + transportVerifyShardBeforeCloseAction.execute(shardRequest, listener); + } + } + + /** + * Step 3 - Move index states from OPEN to CLOSE in cluster state for indices that are ready for closing. + */ + static ClusterState closeRoutingTable(final ClusterState currentState, + final Map blockedIndices, + final Map results) { + final MetaData.Builder metadata = MetaData.builder(currentState.metaData()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + + final Set closedIndices = new HashSet<>(); + for (Map.Entry result : results.entrySet()) { + final Index index = result.getKey(); + final boolean acknowledged = result.getValue().isAcknowledged(); + try { + if (acknowledged == false) { + logger.debug("verification of shards before closing {} failed", index); + continue; + } + final IndexMetaData indexMetaData = metadata.getSafe(index); + if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { + logger.debug("verification of shards before closing {} succeeded but index is already closed", index); + assert currentState.blocks().hasIndexBlock(index.getName(), INDEX_CLOSED_BLOCK); + continue; + } + final ClusterBlock closingBlock = blockedIndices.get(index); + if (currentState.blocks().hasIndexBlock(index.getName(), closingBlock) == false) { + logger.debug("verification of shards before closing {} succeeded but block has been removed in the meantime", index); + continue; + } + + logger.debug("closing index {} succeeded", index); + blocks.removeIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID).addIndexBlock(index.getName(), INDEX_CLOSED_BLOCK); + metadata.put(IndexMetaData.builder(indexMetaData).state(IndexMetaData.State.CLOSE)); + routingTable.remove(index.getName()); + closedIndices.add(index.getName()); + } catch (final IndexNotFoundException e) { + logger.debug("index {} has been deleted since it was blocked before closing, ignoring", index); + } + } + logger.info("completed closing of indices {}", closedIndices); + return ClusterState.builder(currentState).blocks(blocks).metaData(metadata).routingTable(routingTable.build()).build(); } public void openIndex(final OpenIndexClusterStateUpdateRequest request, @@ -170,64 +449,73 @@ private void onlyOpenIndex(final OpenIndexClusterStateUpdateRequest request, final String indicesAsString = Arrays.toString(request.indices()); clusterService.submitStateUpdateTask("open-indices " + indicesAsString, - new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { - @Override - protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { - return new ClusterStateUpdateResponse(acknowledged); - } - - @Override - public ClusterState execute(ClusterState currentState) { - List indicesToOpen = new ArrayList<>(); - for (Index index : request.indices()) { - final IndexMetaData indexMetaData = currentState.metaData().getIndexSafe(index); - if (indexMetaData.getState() != IndexMetaData.State.OPEN) { - indicesToOpen.add(indexMetaData); - } + new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { + @Override + protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { + return new ClusterStateUpdateResponse(acknowledged); } - validateShardLimit(currentState, request.indices()); - - if (indicesToOpen.isEmpty()) { - return currentState; + @Override + public ClusterState execute(final ClusterState currentState) { + final ClusterState updatedState = openIndices(request.indices(), currentState); + //no explicit wait for other nodes needed as we use AckedClusterStateUpdateTask + return allocationService.reroute(updatedState, "indices opened [" + indicesAsString + "]"); } + } + ); + } - logger.info("opening indices [{}]", indicesAsString); - - MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); - ClusterBlocks.Builder blocksBuilder = ClusterBlocks.builder() - .blocks(currentState.blocks()); - final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion() - .minimumIndexCompatibilityVersion(); - for (IndexMetaData closedMetaData : indicesToOpen) { - final String indexName = closedMetaData.getIndex().getName(); - IndexMetaData indexMetaData = IndexMetaData.builder(closedMetaData).state(IndexMetaData.State.OPEN).build(); - // The index might be closed because we couldn't import it due to old incompatible version - // We need to check that this index can be upgraded to the current version - indexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData, minIndexCompatibilityVersion); - try { - indicesService.verifyIndexMetadata(indexMetaData, indexMetaData); - } catch (Exception e) { - throw new ElasticsearchException("Failed to verify index " + indexMetaData.getIndex(), e); - } + ClusterState openIndices(final Index[] indices, final ClusterState currentState) { + final List indicesToOpen = new ArrayList<>(); + for (Index index : indices) { + final IndexMetaData indexMetaData = currentState.metaData().getIndexSafe(index); + if (indexMetaData.getState() != IndexMetaData.State.OPEN) { + indicesToOpen.add(indexMetaData); + } else if (currentState.blocks().hasIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID)) { + indicesToOpen.add(indexMetaData); + } + } - mdBuilder.put(indexMetaData, true); - blocksBuilder.removeIndexBlock(indexName, INDEX_CLOSED_BLOCK); + validateShardLimit(currentState, indices); + if (indicesToOpen.isEmpty()) { + return currentState; + } + + logger.info(() -> new ParameterizedMessage("opening indices [{}]", + String.join(",", indicesToOpen.stream().map(i -> (CharSequence) i.getIndex().toString())::iterator))); + + final MetaData.Builder metadata = MetaData.builder(currentState.metaData()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion().minimumIndexCompatibilityVersion(); + + for (IndexMetaData indexMetaData : indicesToOpen) { + final Index index = indexMetaData.getIndex(); + if (indexMetaData.getState() != IndexMetaData.State.OPEN) { + IndexMetaData updatedIndexMetaData = IndexMetaData.builder(indexMetaData).state(IndexMetaData.State.OPEN).build(); + // The index might be closed because we couldn't import it due to old incompatible version + // We need to check that this index can be upgraded to the current version + updatedIndexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(updatedIndexMetaData, minIndexCompatibilityVersion); + try { + indicesService.verifyIndexMetadata(updatedIndexMetaData, updatedIndexMetaData); + } catch (Exception e) { + throw new ElasticsearchException("Failed to verify index " + index, e); } + metadata.put(updatedIndexMetaData, true); + } - ClusterState updatedState = ClusterState.builder(currentState).metaData(mdBuilder).blocks(blocksBuilder).build(); + // Always removes index closed blocks (note: this can fail on-going close index actions) + blocks.removeIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID); + } - RoutingTable.Builder rtBuilder = RoutingTable.builder(updatedState.routingTable()); - for (IndexMetaData index : indicesToOpen) { - rtBuilder.addAsFromCloseToOpen(updatedState.metaData().getIndexSafe(index.getIndex())); - } + ClusterState updatedState = ClusterState.builder(currentState).metaData(metadata).blocks(blocks).build(); - //no explicit wait for other nodes needed as we use AckedClusterStateUpdateTask - return allocationService.reroute( - ClusterState.builder(updatedState).routingTable(rtBuilder.build()).build(), - "indices opened [" + indicesAsString + "]"); + final RoutingTable.Builder routingTable = RoutingTable.builder(updatedState.routingTable()); + for (IndexMetaData previousIndexMetaData : indicesToOpen) { + if (previousIndexMetaData.getState() != IndexMetaData.State.OPEN) { + routingTable.addAsFromCloseToOpen(updatedState.metaData().getIndexSafe(previousIndexMetaData.getIndex())); } - }); + } + return ClusterState.builder(updatedState).routingTable(routingTable.build()).build(); } /** @@ -250,7 +538,6 @@ static void validateShardLimit(ClusterState currentState, Index[] indices) { ex.addValidationError(error.get()); throw ex; } - } private static int getTotalShardCount(ClusterState state, Index index) { @@ -258,4 +545,14 @@ private static int getTotalShardCount(ClusterState state, Index index) { return indexMetaData.getNumberOfShards() * (1 + indexMetaData.getNumberOfReplicas()); } + /** + * @return Generates a {@link ClusterBlock} that blocks read and write operations on soon-to-be-closed indices. The + * cluster block is generated with the id value equals to {@link #INDEX_CLOSED_BLOCK_ID} and a unique UUID. + */ + public static ClusterBlock createIndexClosingBlock() { + return new ClusterBlock(INDEX_CLOSED_BLOCK_ID, UUIDs.randomBase64UUID(), "index preparing to close. Reopen the index to allow " + + "writes again or retry closing the index to fully close the index.", false, false, false, RestStatus.FORBIDDEN, + EnumSet.of(ClusterBlockLevel.WRITE)); + } + } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java index ccd64827b32f6..b8d234e9f1086 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java @@ -93,6 +93,10 @@ public DiskThresholdSettings(Settings settings, ClusterSettings clusterSettings) static final class LowDiskWatermarkValidator implements Setting.Validator { + @Override + public void validate(String value) { + } + @Override public void validate(String value, Map, String> settings) { final String highWatermarkRaw = settings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING); @@ -112,6 +116,10 @@ public Iterator> settings() { static final class HighDiskWatermarkValidator implements Setting.Validator { + @Override + public void validate(String value) { + } + @Override public void validate(String value, Map, String> settings) { final String lowWatermarkRaw = settings.get(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING); @@ -131,6 +139,10 @@ public Iterator> settings() { static final class FloodStageValidator implements Setting.Validator { + @Override + public void validate(String value) { + } + @Override public void validate(String value, Map, String> settings) { final String lowWatermarkRaw = settings.get(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index 053d696f6768c..7d24d46318585 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -72,14 +72,14 @@ public class FilterAllocationDecider extends AllocationDecider { private static final String CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX = "cluster.routing.allocation.include"; private static final String CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX = "cluster.routing.allocation.exclude"; public static final Setting.AffixSetting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = - Setting.prefixKeySetting(CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX + ".", (key) -> - Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope)); + Setting.prefixKeySetting(CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX + ".", key -> + Setting.simpleString(key, value -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope)); public static final Setting.AffixSetting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = - Setting.prefixKeySetting(CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX + ".", (key) -> - Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope)); + Setting.prefixKeySetting(CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX + ".", key -> + Setting.simpleString(key, value -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope)); public static final Setting.AffixSettingCLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = - Setting.prefixKeySetting(CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX + ".", (key) -> - Setting.simpleString(key, (value, map) -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope)); + Setting.prefixKeySetting(CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX + ".", key -> + Setting.simpleString(key, value -> IP_VALIDATOR.accept(key, value), Property.Dynamic, Property.NodeScope)); /** * The set of {@link RecoverySource.Type} values for which the diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index c331a9a137e0f..5bd441419d136 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -89,7 +89,7 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements private final Collection clusterStateListeners = new CopyOnWriteArrayList<>(); private final Collection timeoutClusterStateListeners = - Collections.newSetFromMap(new ConcurrentHashMap()); + Collections.newSetFromMap(new ConcurrentHashMap<>()); private final LocalNodeMasterListeners localNodeMasterListeners; @@ -134,11 +134,15 @@ protected synchronized void doStart() { Objects.requireNonNull(nodeConnectionsService, "please set the node connection service before starting"); Objects.requireNonNull(state.get(), "please set initial state before starting"); addListener(localNodeMasterListeners); - threadPoolExecutor = EsExecutors.newSinglePrioritizing( - nodeName + "/" + CLUSTER_UPDATE_THREAD_NAME, - daemonThreadFactory(nodeName, CLUSTER_UPDATE_THREAD_NAME), - threadPool.getThreadContext(), - threadPool.scheduler()); + threadPoolExecutor = createThreadPoolExecutor(); + } + + protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { + return EsExecutors.newSinglePrioritizing( + nodeName + "/" + CLUSTER_UPDATE_THREAD_NAME, + daemonThreadFactory(nodeName, CLUSTER_UPDATE_THREAD_NAME), + threadPool.getThreadContext(), + threadPool.scheduler()); } class UpdateTask extends SourcePrioritizedRunnable implements Function { diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index f16f9ecaf207c..12d45c4fb88f4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -72,10 +72,16 @@ public class ClusterService extends AbstractLifecycleComponent { private final String nodeName; public ClusterService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { + this(settings, clusterSettings, new MasterService(Node.NODE_NAME_SETTING.get(settings), settings, threadPool), + new ClusterApplierService(Node.NODE_NAME_SETTING.get(settings), settings, clusterSettings, threadPool)); + } + + public ClusterService(Settings settings, ClusterSettings clusterSettings, MasterService masterService, + ClusterApplierService clusterApplierService) { super(settings); this.settings = settings; this.nodeName = Node.NODE_NAME_SETTING.get(settings); - this.masterService = new MasterService(nodeName, settings, threadPool); + this.masterService = masterService; this.operationRouting = new OperationRouting(settings, clusterSettings); this.clusterSettings = clusterSettings; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); @@ -83,7 +89,7 @@ public ClusterService(Settings settings, ClusterSettings clusterSettings, Thread this::setSlowTaskLoggingThreshold); // Add a no-op update consumer so changes are logged this.clusterSettings.addAffixUpdateConsumer(USER_DEFINED_META_DATA, (first, second) -> {}, (first, second) -> {}); - this.clusterApplierService = new ClusterApplierService(nodeName, settings, clusterSettings, threadPool); + this.clusterApplierService = clusterApplierService; } private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) { diff --git a/server/src/main/java/org/elasticsearch/common/Numbers.java b/server/src/main/java/org/elasticsearch/common/Numbers.java index 7561175f3fe35..27c1dd18e97b8 100644 --- a/server/src/main/java/org/elasticsearch/common/Numbers.java +++ b/server/src/main/java/org/elasticsearch/common/Numbers.java @@ -33,48 +33,6 @@ public final class Numbers { private static final BigInteger MIN_LONG_VALUE = BigInteger.valueOf(Long.MIN_VALUE); private Numbers() { - - } - - /** - * Converts a byte array to an short. - * - * @param arr The byte array to convert to an short - * @return The int converted - */ - public static short bytesToShort(byte[] arr) { - return (short) (((arr[0] & 0xff) << 8) | (arr[1] & 0xff)); - } - - public static short bytesToShort(BytesRef bytes) { - return (short) (((bytes.bytes[bytes.offset] & 0xff) << 8) | (bytes.bytes[bytes.offset + 1] & 0xff)); - } - - /** - * Converts a byte array to an int. - * - * @param arr The byte array to convert to an int - * @return The int converted - */ - public static int bytesToInt(byte[] arr) { - return (arr[0] << 24) | ((arr[1] & 0xff) << 16) | ((arr[2] & 0xff) << 8) | (arr[3] & 0xff); - } - - public static int bytesToInt(BytesRef bytes) { - return (bytes.bytes[bytes.offset] << 24) | ((bytes.bytes[bytes.offset + 1] & 0xff) << 16) | - ((bytes.bytes[bytes.offset + 2] & 0xff) << 8) | (bytes.bytes[bytes.offset + 3] & 0xff); - } - - /** - * Converts a byte array to a long. - * - * @param arr The byte array to convert to a long - * @return The long converter - */ - public static long bytesToLong(byte[] arr) { - int high = (arr[0] << 24) | ((arr[1] & 0xff) << 16) | ((arr[2] & 0xff) << 8) | (arr[3] & 0xff); - int low = (arr[4] << 24) | ((arr[5] & 0xff) << 16) | ((arr[6] & 0xff) << 8) | (arr[7] & 0xff); - return (((long) high) << 32) | (low & 0x0ffffffffL); } public static long bytesToLong(BytesRef bytes) { @@ -85,40 +43,6 @@ public static long bytesToLong(BytesRef bytes) { return (((long) high) << 32) | (low & 0x0ffffffffL); } - /** - * Converts a byte array to float. - * - * @param arr The byte array to convert to a float - * @return The float converted - */ - public static float bytesToFloat(byte[] arr) { - return Float.intBitsToFloat(bytesToInt(arr)); - } - - public static float bytesToFloat(BytesRef bytes) { - return Float.intBitsToFloat(bytesToInt(bytes)); - } - - /** - * Converts a byte array to double. - * - * @param arr The byte array to convert to a double - * @return The double converted - */ - public static double bytesToDouble(byte[] arr) { - return Double.longBitsToDouble(bytesToLong(arr)); - } - - public static double bytesToDouble(BytesRef bytes) { - return Double.longBitsToDouble(bytesToLong(bytes)); - } - - /** - * Converts an int to a byte array. - * - * @param val The int to convert to a byte array - * @return The byte array converted - */ public static byte[] intToBytes(int val) { byte[] arr = new byte[4]; arr[0] = (byte) (val >>> 24); @@ -160,16 +84,6 @@ public static byte[] longToBytes(long val) { return arr; } - /** - * Converts a float to a byte array. - * - * @param val The float to convert to a byte array - * @return The byte array converted - */ - public static byte[] floatToBytes(float val) { - return intToBytes(Float.floatToRawIntBits(val)); - } - /** * Converts a double to a byte array. * diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index 2ecce44b55c1e..ab3971c32838b 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -78,9 +78,7 @@ public interface BlobContainer { /** * Reads blob content from the input stream and writes it to the container in a new blob with the given name, - * using an atomic write operation if the implementation supports it. When the BlobContainer implementation - * does not provide a specific implementation of writeBlobAtomic(String, InputStream, long), then - * the {@link #writeBlob(String, InputStream, long, boolean)} method is used. + * using an atomic write operation if the implementation supports it. * * This method assumes the container does not already contain a blob of the same blobName. If a blob by the * same name already exists, the operation will fail and an {@link IOException} will be thrown. @@ -97,11 +95,7 @@ public interface BlobContainer { * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists * @throws IOException if the input stream could not be read, or the target blob could not be written to. */ - default void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize, boolean failIfAlreadyExists) - throws IOException { - writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); - } - + void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException; /** * Deletes a blob with giving name, if the blob exists. If the blob does not exist, * this method throws a NoSuchFileException. diff --git a/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java b/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java index f0eba78690a6e..e2e177c8f0fd2 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java +++ b/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java @@ -19,7 +19,7 @@ package org.elasticsearch.common.geo; -import org.apache.lucene.document.XLatLonShape.QueryRelation; +import org.apache.lucene.document.LatLonShape.QueryRelation; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 8f7876d2ba9f2..ac19642949c86 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -342,6 +342,8 @@ protected static org.apache.lucene.geo.Polygon polygonLucene(Coordinate[][] poly holes = new org.apache.lucene.geo.Polygon[polygon.length - 1]; for (int i = 0; i < holes.length; ++i) { Coordinate[] coords = polygon[i+1]; + //We do not have holes on the dateline as they get eliminated + //when breaking the polygon around it. double[] x = new double[coords.length]; double[] y = new double[coords.length]; for (int c = 0; c < coords.length; ++c) { @@ -357,7 +359,9 @@ protected static org.apache.lucene.geo.Polygon polygonLucene(Coordinate[][] poly double[] x = new double[shell.length]; double[] y = new double[shell.length]; for (int i = 0; i < shell.length; ++i) { - x[i] = normalizeLon(shell[i].x); + //Lucene Tessellator treats different +180 and -180 and we should keep the sign. + //normalizeLon method excludes -180. + x[i] = Math.abs(shell[i].x) > 180 ? normalizeLon(shell[i].x) : shell[i].x; y[i] = normalizeLat(shell[i].y); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java index d1ac53fff3b99..e8f06a43a5c13 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java +++ b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import java.nio.charset.Charset; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; @@ -298,7 +300,7 @@ void deprecated(final Set threadContexts, final String message, f deprecated(threadContexts, message, true, params); } - @SuppressLoggerChecks(reason = "safely delegates to logger") + void deprecated(final Set threadContexts, final String message, final boolean log, final Object... params) { final Iterator iterator = threadContexts.iterator(); @@ -318,7 +320,14 @@ void deprecated(final Set threadContexts, final String message, f } if (log) { - logger.warn(message, params); + AccessController.doPrivileged(new PrivilegedAction() { + @SuppressLoggerChecks(reason = "safely delegates to logger") + @Override + public Void run() { + logger.warn(message, params); + return null; + } + }); } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 752a9d5aba1eb..b49f0f8225016 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -723,7 +723,7 @@ private boolean updateSettings(Settings toApply, Settings.Builder target, Settin } else if (get(key) == null) { throw new IllegalArgumentException(type + " setting [" + key + "], not recognized"); } else if (isDelete == false && canUpdate.test(key)) { - validate(key, toApply, false); // we might not have a full picture here do to a dependency validation + get(key).validateWithoutDependencies(toApply); // we might not have a full picture here do to a dependency validation settingsBuilder.copy(key, toApply); updates.copy(key, toApply); changed |= toApply.get(key).equals(target.get(key)) == false; diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 1e9736bccc81b..9b2388aa4b73c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -284,7 +284,7 @@ public void apply(Settings value, Settings current, Settings previous) { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING, HierarchyCircuitBreakerService.ACCOUNTING_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.ACCOUNTING_CIRCUIT_BREAKER_OVERHEAD_SETTING, - IndexModule.NODE_STORE_ALLOW_MMAPFS, + IndexModule.NODE_STORE_ALLOW_MMAP, ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING, ClusterService.USER_DEFINED_META_DATA, SearchService.DEFAULT_SEARCH_TIMEOUT_SETTING, @@ -485,7 +485,6 @@ public void apply(Settings value, Settings current, Settings previous) { Reconfigurator.CLUSTER_AUTO_SHRINK_VOTING_CONFIGURATION, TransportAddVotingConfigExclusionsAction.MAXIMUM_VOTING_CONFIG_EXCLUSIONS_SETTING, ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING, - ClusterBootstrapService.INITIAL_MASTER_NODE_COUNT_SETTING, ClusterBootstrapService.UNCONFIGURED_BOOTSTRAP_TIMEOUT_SETTING, LagDetector.CLUSTER_FOLLOWER_LAG_TIMEOUT_SETTING, DiscoveryUpgradeService.BWC_PING_TIMEOUT_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index c4e0257c95395..1d2e54ae86d7c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -133,6 +133,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexSettings.INDEX_GC_DELETES_SETTING, IndexSettings.INDEX_SOFT_DELETES_SETTING, IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING, + IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_SETTING, IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING, UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 127f06da1a44d..9c3762f857e4a 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -186,7 +186,7 @@ private void checkPropertyRequiresIndexScope(final EnumSet properties, * @param properties properties for this setting like scope, filtering... */ public Setting(Key key, Function defaultValue, Function parser, Property... properties) { - this(key, defaultValue, parser, (v, s) -> {}, properties); + this(key, defaultValue, parser, v -> {}, properties); } /** @@ -246,7 +246,7 @@ public Setting(String key, Function defaultValue, Function fallbackSetting, Function parser, Property... properties) { - this(key, fallbackSetting, fallbackSetting::getRaw, parser, (v, m) -> {}, properties); + this(key, fallbackSetting, fallbackSetting::getRaw, parser, v -> {}, properties); } /** @@ -354,6 +354,14 @@ boolean hasComplexMatcher() { return isGroupSetting(); } + /** + * Validate the current setting value only without dependencies with {@link Setting.Validator#validate(Object)}. + * @param settings a settings object for settings that has a default value depending on another setting if available + */ + void validateWithoutDependencies(Settings settings) { + validator.validate(get(settings, false)); + } + /** * Returns the default value string representation for this setting. * @param settings a settings object for settings that has a default value depending on another setting if available @@ -414,6 +422,7 @@ private T get(Settings settings, boolean validate) { } else { map = Collections.emptyMap(); } + validator.validate(parsed); validator.validate(parsed, map); } return parsed; @@ -805,8 +814,10 @@ public Map getAsMap(Settings settings) { } /** - * Represents a validator for a setting. The {@link #validate(Object, Map)} method is invoked with the value of this setting and a map - * from the settings specified by {@link #settings()}} to their values. All these values come from the same {@link Settings} instance. + * Represents a validator for a setting. The {@link #validate(Object)} method is invoked early in the update setting process with the + * value of this setting for a fail-fast validation. Later on, the {@link #validate(Object, Map)} method is invoked with the value of + * this setting and a map from the settings specified by {@link #settings()}} to their values. All these values come from the same + * {@link Settings} instance. * * @param the type of the {@link Setting} */ @@ -814,17 +825,28 @@ public Map getAsMap(Settings settings) { public interface Validator { /** - * The validation routine for this validator. + * Validate this setting's value in isolation. + * + * @param value the value of this setting + */ + void validate(T value); + + /** + * Validate this setting against its dependencies, specified by {@link #settings()}. The default implementation does nothing, + * accepting any value as valid as long as it passes the validation in {@link #validate(Object)}. * * @param value the value of this setting * @param settings a map from the settings specified by {@link #settings()}} to their values */ - void validate(T value, Map, T> settings); + default void validate(T value, Map, T> settings) { + } /** - * The settings needed by this validator. + * The settings on which the validity of this setting depends. The values of the specified settings are passed to + * {@link #validate(Object, Map)}. By default this returns an empty iterator, indicating that this setting does not depend on any + * other settings. * - * @return the settings needed to validate; these can be used for cross-settings validation + * @return the settings on which the validity of this setting depends. */ default Iterator> settings() { return Collections.emptyIterator(); @@ -1021,8 +1043,8 @@ public static Setting simpleString(String key, Property... properties) { return new Setting<>(key, s -> "", Function.identity(), properties); } - public static Setting simpleString(String key, Function parser, Property... properties) { - return new Setting<>(key, s -> "", parser, properties); + public static Setting simpleString(String key, Validator validator, Property... properties) { + return new Setting<>(new SimpleKey(key), null, s -> "", Function.identity(), validator, properties); } public static Setting simpleString(String key, Setting fallback, Property... properties) { @@ -1037,10 +1059,6 @@ public static Setting simpleString( return new Setting<>(key, fallback, parser, properties); } - public static Setting simpleString(String key, Validator validator, Property... properties) { - return new Setting<>(new SimpleKey(key), null, s -> "", Function.identity(), validator, properties); - } - /** * Creates a new Setting instance with a String value * @@ -1279,9 +1297,9 @@ private ListSetting( super( new ListKey(key), fallbackSetting, - (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s)), + s -> Setting.arrayToParsableString(defaultStringValue.apply(s)), parser, - (v,s) -> {}, + v -> {}, properties); this.defaultStringValue = defaultStringValue; } @@ -1339,7 +1357,7 @@ public static Setting timeSetting( fallbackSetting, fallbackSetting::getRaw, minTimeValueParser(key, minValue), - (v, s) -> {}, + v -> {}, properties); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/Settings.java b/server/src/main/java/org/elasticsearch/common/settings/Settings.java index 06bec217acf7f..ac43a1800b40f 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -850,8 +850,8 @@ public Builder put(String key, Path path) { * @param timeValue The setting timeValue * @return The builder */ - public Builder put(String key, TimeValue timeValue) { - return put(key, timeValue.toString()); + public Builder put(final String key, final TimeValue timeValue) { + return put(key, timeValue.getStringRep()); } /** @@ -861,8 +861,8 @@ public Builder put(String key, TimeValue timeValue) { * @param byteSizeValue The setting value * @return The builder */ - public Builder put(String key, ByteSizeValue byteSizeValue) { - return put(key, byteSizeValue.toString()); + public Builder put(final String key, final ByteSizeValue byteSizeValue) { + return put(key, byteSizeValue.getStringRep()); } /** @@ -1019,8 +1019,8 @@ public Builder put(String setting, double value) { * @param value The time value * @return The builder */ - public Builder put(String setting, long value, TimeUnit timeUnit) { - put(setting, timeUnit.toMillis(value) + "ms"); + public Builder put(final String setting, final long value, final TimeUnit timeUnit) { + put(setting, new TimeValue(value, timeUnit)); return this; } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java index 49c5e7626072b..e89317ad288c0 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java @@ -145,7 +145,7 @@ static DateFormatter forPattern(String input) { if (formatters.size() == 1) { return formatters.get(0); } - return new DateFormatters.MergedDateFormatter(input, formatters); + return DateFormatters.merge(input, formatters); } } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 0f1234dde0214..d3bf5eb2a641c 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.time; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import java.time.DateTimeException; @@ -31,18 +30,17 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; -import java.time.format.DateTimeParseException; import java.time.format.ResolverStyle; import java.time.format.SignStyle; import java.time.temporal.ChronoField; import java.time.temporal.IsoFields; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalAdjusters; +import java.time.temporal.TemporalQueries; import java.time.temporal.WeekFields; -import java.util.Collections; +import java.util.ArrayList; import java.util.List; import java.util.Locale; -import java.util.stream.Collectors; import static java.time.temporal.ChronoField.DAY_OF_MONTH; import static java.time.temporal.ChronoField.DAY_OF_WEEK; @@ -76,21 +74,17 @@ public class DateFormatters { .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) .toFormatter(Locale.ROOT); - private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_1 = new DateTimeFormatterBuilder() + private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER = new DateTimeFormatterBuilder() .append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .optionalStart() .appendLiteral('T') .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .optionalStart() - .appendFraction(MILLI_OF_SECOND, 3, 3, true) - .optionalEnd() + .appendFraction(NANO_OF_SECOND, 3, 9, true) .optionalStart() .appendZoneOrOffsetId() .optionalEnd() - .optionalEnd() .toFormatter(Locale.ROOT); - private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_2 = new DateTimeFormatterBuilder() + private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER = new DateTimeFormatterBuilder() .append(STRICT_YEAR_MONTH_DAY_FORMATTER) .optionalStart() .appendLiteral('T') @@ -99,7 +93,10 @@ public class DateFormatters { .appendFraction(MILLI_OF_SECOND, 3, 3, true) .optionalEnd() .optionalStart() - .appendOffset("+HHmm", "Z") + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .append(TIME_ZONE_FORMATTER_NO_COLON) .optionalEnd() .optionalEnd() .toFormatter(Locale.ROOT); @@ -108,10 +105,9 @@ public class DateFormatters { * Returns a generic ISO datetime parser where the date is mandatory and the time is optional. */ private static final DateFormatter STRICT_DATE_OPTIONAL_TIME = - new JavaDateFormatter("strict_date_optional_time", STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, - STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2); + new JavaDateFormatter("strict_date_optional_time", STRICT_DATE_OPTIONAL_TIME_PRINTER, STRICT_DATE_OPTIONAL_TIME_FORMATTER); - private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1 = new DateTimeFormatterBuilder() + private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS = new DateTimeFormatterBuilder() .append(STRICT_YEAR_MONTH_DAY_FORMATTER) .optionalStart() .appendLiteral('T') @@ -122,19 +118,8 @@ public class DateFormatters { .optionalStart() .appendZoneOrOffsetId() .optionalEnd() - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_2 = new DateTimeFormatterBuilder() - .append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .optionalStart() - .appendLiteral('T') - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) .optionalStart() - .appendFraction(NANO_OF_SECOND, 3, 9, true) - .optionalEnd() - .optionalStart() - .appendOffset("+HHmm", "Z") + .append(TIME_ZONE_FORMATTER_NO_COLON) .optionalEnd() .optionalEnd() .toFormatter(Locale.ROOT); @@ -143,8 +128,7 @@ public class DateFormatters { * Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution. */ private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter("strict_date_optional_time_nanos", - STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1, - STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS_2); + STRICT_DATE_OPTIONAL_TIME_PRINTER, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS); ///////////////////////////////////////// // @@ -279,6 +263,8 @@ public class DateFormatters { * digit year and three digit dayOfYear (yyyyDDD'T'HHmmss.SSSZ). */ private static final DateFormatter BASIC_ORDINAL_DATE_TIME = new JavaDateFormatter("basic_ordinal_date_time", + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_PRINTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_PRINTER) .appendZoneOrOffsetId().toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_FORMATTER) @@ -346,10 +332,28 @@ public class DateFormatters { */ private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_basic_week_date_no_millis", new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_PRINTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) + .append(STRICT_BASIC_WEEK_DATE_PRINTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendZoneOrOffsetId() .toFormatter(Locale.ROOT), new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_FORMATTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) + .append(STRICT_BASIC_WEEK_DATE_PRINTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendZoneOrOffsetId() + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_PRINTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .append(TIME_ZONE_FORMATTER_NO_COLON) .toFormatter(Locale.ROOT) ); @@ -363,9 +367,23 @@ public class DateFormatters { .append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT)) .toFormatter(Locale.ROOT), new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_FORMATTER) - .append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT)) - .toFormatter(Locale.ROOT) + .append(STRICT_BASIC_WEEK_DATE_FORMATTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .appendZoneOrOffsetId() + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_FORMATTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .append(TIME_ZONE_FORMATTER_NO_COLON) + .toFormatter(Locale.ROOT) ); /* @@ -447,6 +465,8 @@ public class DateFormatters { * using a four digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ssZZ). */ private static final DateFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_ordinal_date_time_no_millis", + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE) .appendZoneOrOffsetId().toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE) @@ -551,6 +571,8 @@ public class DateFormatters { * digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ). */ private static final DateFormatter STRICT_ORDINAL_DATE_TIME = new JavaDateFormatter("strict_ordinal_date_time", + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) .appendZoneOrOffsetId().toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) @@ -782,7 +804,7 @@ public class DateFormatters { * yyyy-MM-dd'T'HH:mm:ss.SSSZ */ private static final DateFormatter DATE_OPTIONAL_TIME = new JavaDateFormatter("date_optional_time", - STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, + STRICT_DATE_OPTIONAL_TIME_PRINTER, new DateTimeFormatterBuilder() .append(DATE_FORMATTER) .optionalStart() @@ -800,26 +822,6 @@ public class DateFormatters { .appendFraction(MILLI_OF_SECOND, 1, 3, true) .optionalEnd() .optionalStart().appendZoneOrOffsetId().optionalEnd() - .optionalEnd() - .optionalEnd() - .optionalEnd() - .toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .optionalStart() - .appendLiteral('T') - .optionalStart() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .optionalEnd() - .optionalStart() - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .optionalEnd() .optionalStart().appendOffset("+HHmm", "Z").optionalEnd() .optionalEnd() .optionalEnd() @@ -970,7 +972,7 @@ public class DateFormatters { * (yyyy-MM-dd'T'HH:mm:ss.SSSZZ). */ private static final DateFormatter DATE_TIME = new JavaDateFormatter("date_time", - STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, + STRICT_DATE_OPTIONAL_TIME_PRINTER, new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) ); @@ -1366,9 +1368,9 @@ public static DateFormatter forPattern(String input) { } else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) { return YEAR_MONTH_DAY; } else if ("epoch_second".equals(input)) { - return EpochSecondsDateFormatter.INSTANCE; + return EpochTime.SECONDS_FORMATTER; } else if ("epoch_millis".equals(input)) { - return EpochMillisDateFormatter.INSTANCE; + return EpochTime.MILLIS_FORMATTER; // strict date formats here, must be at least 4 digits for year and two for months and two for day } else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) { return STRICT_BASIC_WEEK_DATE; @@ -1447,90 +1449,22 @@ public static DateFormatter forPattern(String input) { } } - static class MergedDateFormatter implements DateFormatter { - - private final String pattern; - // package private for tests - final List formatters; - private final List dateMathParsers; - - MergedDateFormatter(String pattern, List formatters) { - assert formatters.size() > 0; - this.pattern = pattern; - this.formatters = Collections.unmodifiableList(formatters); - this.dateMathParsers = formatters.stream().map(DateFormatter::toDateMathParser).collect(Collectors.toList()); - } - - @Override - public TemporalAccessor parse(String input) { - IllegalArgumentException failure = null; - for (DateFormatter formatter : formatters) { - try { - return formatter.parse(input); - // TODO: remove DateTimeParseException when JavaDateFormatter throws IAE - } catch (IllegalArgumentException | DateTimeParseException e) { - if (failure == null) { - // wrap so the entire multi format is in the message - failure = new IllegalArgumentException("failed to parse date field [" + input + "] with format [" + pattern + "]", - e); - } else { - failure.addSuppressed(e); - } - } + static JavaDateFormatter merge(String pattern, List formatters) { + assert formatters.size() > 0; + + List dateTimeFormatters = new ArrayList<>(formatters.size()); + DateTimeFormatter printer = null; + for (DateFormatter formatter : formatters) { + assert formatter instanceof JavaDateFormatter; + JavaDateFormatter javaDateFormatter = (JavaDateFormatter) formatter; + DateTimeFormatter dateTimeFormatter = javaDateFormatter.getParser(); + if (printer == null) { + printer = javaDateFormatter.getPrinter(); } - throw failure; - } - - @Override - public DateFormatter withZone(ZoneId zoneId) { - return new MergedDateFormatter(pattern, formatters.stream().map(f -> f.withZone(zoneId)).collect(Collectors.toList())); - } - - @Override - public DateFormatter withLocale(Locale locale) { - return new MergedDateFormatter(pattern, formatters.stream().map(f -> f.withLocale(locale)).collect(Collectors.toList())); - } - - @Override - public String format(TemporalAccessor accessor) { - return formatters.get(0).format(accessor); - } - - @Override - public String pattern() { - return pattern; - } - - @Override - public Locale locale() { - return formatters.get(0).locale(); + dateTimeFormatters.add(dateTimeFormatter); } - @Override - public ZoneId zone() { - return formatters.get(0).zone(); - } - - @Override - public DateMathParser toDateMathParser() { - return (text, now, roundUp, tz) -> { - ElasticsearchParseException failure = null; - for (DateMathParser parser : dateMathParsers) { - try { - return parser.parse(text, now, roundUp, tz); - } catch (ElasticsearchParseException e) { - if (failure == null) { - // wrap so the entire multi format is in the message - failure = new ElasticsearchParseException("failed to parse date field [" + text + "] with format [" - + pattern + "]", e); - } else { - failure.addSuppressed(e); - } - } - } - throw failure; - }; - } + return new JavaDateFormatter(pattern, printer, dateTimeFormatters.toArray(new DateTimeFormatter[0])); } private static final ZonedDateTime EPOCH_ZONED_DATE_TIME = Instant.EPOCH.atZone(ZoneOffset.UTC); @@ -1627,6 +1561,11 @@ public static ZonedDateTime toZonedDateTime(TemporalAccessor accessor, ZonedDate result = result.with(ChronoField.NANO_OF_SECOND, accessor.getLong(ChronoField.NANO_OF_SECOND)); } + ZoneId zoneOffset = accessor.query(TemporalQueries.zone()); + if (zoneOffset != null) { + result = result.withZoneSameLocal(zoneOffset); + } + return result; } } diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java deleted file mode 100644 index b7276e4fd1466..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/time/EpochMillisDateFormatter.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.time; - -import java.math.BigDecimal; -import java.time.Instant; -import java.time.ZoneId; -import java.time.ZoneOffset; -import java.time.format.DateTimeParseException; -import java.time.temporal.TemporalAccessor; -import java.util.Locale; -import java.util.regex.Pattern; - -/** - * This is a special formatter to parse the milliseconds since the epoch. - * There is no way using a native java time date formatter to resemble - * the required behaviour to parse negative milliseconds as well. - * - * This implementation simply tries to convert the input to a long and uses - * this as the milliseconds since the epoch without involving any other - * java time code - */ -class EpochMillisDateFormatter implements DateFormatter { - - private static final Pattern SPLIT_BY_DOT_PATTERN = Pattern.compile("\\."); - static final DateFormatter INSTANCE = new EpochMillisDateFormatter(); - static final DateMathParser DATE_MATH_INSTANCE = new JavaDateMathParser(INSTANCE, INSTANCE); - - private EpochMillisDateFormatter() { - } - - @Override - public TemporalAccessor parse(String input) { - try { - if (input.contains(".")) { - String[] inputs = SPLIT_BY_DOT_PATTERN.split(input, 2); - Long milliSeconds = Long.valueOf(inputs[0]); - if (inputs[1].length() == 0) { - // this is BWC compatible to joda time, nothing after the dot is allowed - return Instant.ofEpochMilli(milliSeconds).atZone(ZoneOffset.UTC); - } - // scientific notation it is! - if (inputs[1].contains("e")) { - return Instant.ofEpochMilli(Double.valueOf(input).longValue()).atZone(ZoneOffset.UTC); - } - - if (inputs[1].length() > 6) { - throw new DateTimeParseException("too much granularity after dot [" + input + "]", input, 0); - } - Long nanos = new BigDecimal(inputs[1]).movePointRight(6 - inputs[1].length()).longValueExact(); - if (milliSeconds < 0) { - nanos = nanos * -1; - } - return Instant.ofEpochMilli(milliSeconds).plusNanos(nanos).atZone(ZoneOffset.UTC); - } else { - return Instant.ofEpochMilli(Long.valueOf(input)).atZone(ZoneOffset.UTC); - } - } catch (NumberFormatException e) { - throw new DateTimeParseException("invalid number [" + input + "]", input, 0, e); - } - } - @Override - public DateFormatter withZone(ZoneId zoneId) { - if (ZoneOffset.UTC.equals(zoneId) == false) { - throw new IllegalArgumentException(pattern() + " date formatter can only be in zone offset UTC"); - } - return INSTANCE; - } - - @Override - public DateFormatter withLocale(Locale locale) { - if (Locale.ROOT.equals(locale) == false) { - throw new IllegalArgumentException(pattern() + " date formatter can only be in locale ROOT"); - } - return this; - } - - @Override - public String format(TemporalAccessor accessor) { - return String.valueOf(Instant.from(accessor).toEpochMilli()); - } - - @Override - public String pattern() { - return "epoch_millis"; - } - - @Override - public Locale locale() { - return Locale.ROOT; - } - - @Override - public ZoneId zone() { - return ZoneOffset.UTC; - } - - @Override - public DateMathParser toDateMathParser() { - return DATE_MATH_INSTANCE; - } -} diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java deleted file mode 100644 index 3d33d083ec6af..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/time/EpochSecondsDateFormatter.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.time; - -import java.math.BigDecimal; -import java.time.Instant; -import java.time.ZoneId; -import java.time.ZoneOffset; -import java.time.format.DateTimeParseException; -import java.time.temporal.TemporalAccessor; -import java.util.Locale; -import java.util.regex.Pattern; - -public class EpochSecondsDateFormatter implements DateFormatter { - - public static DateFormatter INSTANCE = new EpochSecondsDateFormatter(); - static final DateMathParser DATE_MATH_INSTANCE = new JavaDateMathParser(INSTANCE, INSTANCE); - private static final Pattern SPLIT_BY_DOT_PATTERN = Pattern.compile("\\."); - - private EpochSecondsDateFormatter() {} - - @Override - public TemporalAccessor parse(String input) { - try { - if (input.contains(".")) { - String[] inputs = SPLIT_BY_DOT_PATTERN.split(input, 2); - Long seconds = Long.valueOf(inputs[0]); - if (inputs[1].length() == 0) { - // this is BWC compatible to joda time, nothing after the dot is allowed - return Instant.ofEpochSecond(seconds, 0).atZone(ZoneOffset.UTC); - } - // scientific notation it is! - if (inputs[1].contains("e")) { - return Instant.ofEpochSecond(Double.valueOf(input).longValue()).atZone(ZoneOffset.UTC); - } - if (inputs[1].length() > 9) { - throw new DateTimeParseException("too much granularity after dot [" + input + "]", input, 0); - } - Long nanos = new BigDecimal(inputs[1]).movePointRight(9 - inputs[1].length()).longValueExact(); - if (seconds < 0) { - nanos = nanos * -1; - } - return Instant.ofEpochSecond(seconds, nanos).atZone(ZoneOffset.UTC); - } else { - return Instant.ofEpochSecond(Long.valueOf(input)).atZone(ZoneOffset.UTC); - } - } catch (NumberFormatException e) { - throw new DateTimeParseException("invalid number [" + input + "]", input, 0, e); - } - } - - @Override - public String format(TemporalAccessor accessor) { - Instant instant = Instant.from(accessor); - if (instant.getNano() != 0) { - return String.valueOf(instant.getEpochSecond()) + "." + String.valueOf(instant.getNano()).replaceAll("0*$", ""); - } - return String.valueOf(instant.getEpochSecond()); - } - - @Override - public String pattern() { - return "epoch_second"; - } - - @Override - public Locale locale() { - return Locale.ROOT; - } - - @Override - public ZoneId zone() { - return ZoneOffset.UTC; - } - - @Override - public DateMathParser toDateMathParser() { - return DATE_MATH_INSTANCE; - } - - @Override - public DateFormatter withZone(ZoneId zoneId) { - if (zoneId.equals(ZoneOffset.UTC) == false) { - throw new IllegalArgumentException(pattern() + " date formatter can only be in zone offset UTC"); - } - return this; - } - - @Override - public DateFormatter withLocale(Locale locale) { - if (Locale.ROOT.equals(locale) == false) { - throw new IllegalArgumentException(pattern() + " date formatter can only be in locale ROOT"); - } - return this; - } -} diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java new file mode 100644 index 0000000000000..518957cd2eb9d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java @@ -0,0 +1,219 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.ResolverStyle; +import java.time.format.SignStyle; +import java.time.temporal.ChronoField; +import java.time.temporal.ChronoUnit; +import java.time.temporal.Temporal; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalField; +import java.time.temporal.TemporalUnit; +import java.time.temporal.ValueRange; +import java.util.Locale; +import java.util.Map; + +/** + * This class provides {@link DateTimeFormatter}s capable of parsing epoch seconds and milliseconds. + *

+ * The seconds formatter is provided by {@link #SECONDS_FORMATTER}. + * The milliseconds formatter is provided by {@link #MILLIS_FORMATTER}. + *

+ * Both formatters support fractional time, up to nanosecond precision. Values must be positive numbers. + */ +class EpochTime { + + private static final ValueRange LONG_POSITIVE_RANGE = ValueRange.of(0, Long.MAX_VALUE); + + private static final EpochField SECONDS = new EpochField(ChronoUnit.SECONDS, ChronoUnit.FOREVER, LONG_POSITIVE_RANGE) { + @Override + public boolean isSupportedBy(TemporalAccessor temporal) { + return temporal.isSupported(ChronoField.INSTANT_SECONDS); + } + @Override + public long getFrom(TemporalAccessor temporal) { + return temporal.getLong(ChronoField.INSTANT_SECONDS); + } + @Override + public TemporalAccessor resolve(Map fieldValues, + TemporalAccessor partialTemporal, ResolverStyle resolverStyle) { + long seconds = fieldValues.remove(this); + fieldValues.put(ChronoField.INSTANT_SECONDS, seconds); + Long nanos = fieldValues.remove(NANOS_OF_SECOND); + if (nanos != null) { + fieldValues.put(ChronoField.NANO_OF_SECOND, nanos); + } + return null; + } + }; + + private static final EpochField NANOS_OF_SECOND = new EpochField(ChronoUnit.NANOS, ChronoUnit.SECONDS, ValueRange.of(0, 999_999_999)) { + @Override + public boolean isSupportedBy(TemporalAccessor temporal) { + return temporal.isSupported(ChronoField.NANO_OF_SECOND) && temporal.getLong(ChronoField.NANO_OF_SECOND) != 0; + } + @Override + public long getFrom(TemporalAccessor temporal) { + return temporal.getLong(ChronoField.NANO_OF_SECOND); + } + }; + + private static final EpochField MILLIS = new EpochField(ChronoUnit.MILLIS, ChronoUnit.FOREVER, LONG_POSITIVE_RANGE) { + @Override + public boolean isSupportedBy(TemporalAccessor temporal) { + return temporal.isSupported(ChronoField.INSTANT_SECONDS) && temporal.isSupported(ChronoField.MILLI_OF_SECOND); + } + @Override + public long getFrom(TemporalAccessor temporal) { + return temporal.getLong(ChronoField.INSTANT_SECONDS) * 1_000 + temporal.getLong(ChronoField.MILLI_OF_SECOND); + } + @Override + public TemporalAccessor resolve(Map fieldValues, + TemporalAccessor partialTemporal, ResolverStyle resolverStyle) { + long secondsAndMillis = fieldValues.remove(this); + long seconds = secondsAndMillis / 1_000; + long nanos = secondsAndMillis % 1000 * 1_000_000; + Long nanosOfMilli = fieldValues.remove(NANOS_OF_MILLI); + if (nanosOfMilli != null) { + nanos += nanosOfMilli; + } + fieldValues.put(ChronoField.INSTANT_SECONDS, seconds); + fieldValues.put(ChronoField.NANO_OF_SECOND, nanos); + return null; + } + }; + + private static final EpochField NANOS_OF_MILLI = new EpochField(ChronoUnit.NANOS, ChronoUnit.MILLIS, ValueRange.of(0, 999_999)) { + @Override + public boolean isSupportedBy(TemporalAccessor temporal) { + return temporal.isSupported(ChronoField.NANO_OF_SECOND) && temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000 != 0; + } + @Override + public long getFrom(TemporalAccessor temporal) { + return temporal.getLong(ChronoField.NANO_OF_SECOND); + } + }; + + // this supports seconds without any fraction + private static final DateTimeFormatter SECONDS_FORMATTER1 = new DateTimeFormatterBuilder() + .appendValue(SECONDS, 1, 19, SignStyle.NORMAL) + .toFormatter(Locale.ROOT); + + // this supports seconds ending in dot + private static final DateTimeFormatter SECONDS_FORMATTER2 = new DateTimeFormatterBuilder() + .append(SECONDS_FORMATTER1) + .appendLiteral('.') + .toFormatter(Locale.ROOT); + + // this supports seconds with a fraction and is also used for printing + private static final DateTimeFormatter SECONDS_FORMATTER3 = new DateTimeFormatterBuilder() + .append(SECONDS_FORMATTER1) + .optionalStart() // optional is used so isSupported will be called when printing + .appendFraction(NANOS_OF_SECOND, 1, 9, true) + .optionalEnd() + .toFormatter(Locale.ROOT); + + // this supports milliseconds without any fraction + private static final DateTimeFormatter MILLISECONDS_FORMATTER1 = new DateTimeFormatterBuilder() + .appendValue(MILLIS, 1, 19, SignStyle.NORMAL) + .toFormatter(Locale.ROOT); + + // this supports milliseconds ending in dot + private static final DateTimeFormatter MILLISECONDS_FORMATTER2 = new DateTimeFormatterBuilder() + .append(MILLISECONDS_FORMATTER1) + .appendLiteral('.') + .toFormatter(Locale.ROOT); + + // this supports milliseconds with a fraction and is also used for printing + private static final DateTimeFormatter MILLISECONDS_FORMATTER3 = new DateTimeFormatterBuilder() + .append(MILLISECONDS_FORMATTER1) + .optionalStart() // optional is used so isSupported will be called when printing + .appendFraction(NANOS_OF_MILLI, 1, 6, true) + .optionalEnd() + .toFormatter(Locale.ROOT); + + static final DateFormatter SECONDS_FORMATTER = new JavaDateFormatter("epoch_second", SECONDS_FORMATTER3, + SECONDS_FORMATTER1, SECONDS_FORMATTER2, SECONDS_FORMATTER3); + + static final DateFormatter MILLIS_FORMATTER = new JavaDateFormatter("epoch_millis", MILLISECONDS_FORMATTER3, + MILLISECONDS_FORMATTER1, MILLISECONDS_FORMATTER2, MILLISECONDS_FORMATTER3); + + private abstract static class EpochField implements TemporalField { + + private final TemporalUnit baseUnit; + private final TemporalUnit rangeUnit; + private final ValueRange range; + + private EpochField(TemporalUnit baseUnit, TemporalUnit rangeUnit, ValueRange range) { + this.baseUnit = baseUnit; + this.rangeUnit = rangeUnit; + this.range = range; + } + + @Override + public String getDisplayName(Locale locale) { + return toString(); + } + + @Override + public String toString() { + return "Epoch" + baseUnit.toString() + (rangeUnit != ChronoUnit.FOREVER ? "Of" + rangeUnit.toString() : ""); + } + + @Override + public TemporalUnit getBaseUnit() { + return baseUnit; + } + + @Override + public TemporalUnit getRangeUnit() { + return rangeUnit; + } + + @Override + public ValueRange range() { + return range; + } + + @Override + public boolean isDateBased() { + return false; + } + + @Override + public boolean isTimeBased() { + return true; + } + + @Override + public ValueRange rangeRefinedBy(TemporalAccessor temporal) { + return range(); + } + + @SuppressWarnings("unchecked") + @Override + public R adjustInto(R temporal, long newValue) { + return (R) temporal.with(this, newValue); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java index 68e2cfd4fe317..0fce14b764ef1 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java @@ -19,10 +19,11 @@ package org.elasticsearch.common.time; +import org.elasticsearch.common.Strings; + import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; -import java.time.format.DateTimeParseException; import java.time.temporal.ChronoField; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalField; @@ -47,7 +48,7 @@ class JavaDateFormatter implements DateFormatter { private final String format; private final DateTimeFormatter printer; - private final DateTimeFormatter[] parsers; + private final DateTimeFormatter parser; JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeFormatter... parsers) { if (printer == null) { @@ -62,61 +63,54 @@ class JavaDateFormatter implements DateFormatter { throw new IllegalArgumentException("formatters must have the same locale"); } if (parsers.length == 0) { - this.parsers = new DateTimeFormatter[]{printer}; + this.parser = printer; + } else if (parsers.length == 1) { + this.parser = parsers[0]; } else { - this.parsers = parsers; + DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); + for (DateTimeFormatter parser : parsers) { + builder.appendOptional(parser); + } + this.parser = builder.toFormatter(Locale.ROOT); } this.format = format; this.printer = printer; } + DateTimeFormatter getParser() { + return parser; + } + + DateTimeFormatter getPrinter() { + return printer; + } + @Override public TemporalAccessor parse(String input) { - DateTimeParseException failure = null; - for (int i = 0; i < parsers.length; i++) { - try { - return parsers[i].parse(input); - } catch (DateTimeParseException e) { - if (failure == null) { - failure = e; - } else { - failure.addSuppressed(e); - } - } + if (Strings.isNullOrEmpty(input)) { + throw new IllegalArgumentException("cannot parse empty date"); } - - // ensure that all parsers exceptions are returned instead of only the last one - throw failure; + return parser.parse(input); } @Override public DateFormatter withZone(ZoneId zoneId) { // shortcurt to not create new objects unnecessarily - if (zoneId.equals(parsers[0].getZone())) { + if (zoneId.equals(parser.getZone())) { return this; } - final DateTimeFormatter[] parsersWithZone = new DateTimeFormatter[parsers.length]; - for (int i = 0; i < parsers.length; i++) { - parsersWithZone[i] = parsers[i].withZone(zoneId); - } - - return new JavaDateFormatter(format, printer.withZone(zoneId), parsersWithZone); + return new JavaDateFormatter(format, printer.withZone(zoneId), parser.withZone(zoneId)); } @Override public DateFormatter withLocale(Locale locale) { // shortcurt to not create new objects unnecessarily - if (locale.equals(parsers[0].getLocale())) { + if (locale.equals(parser.getLocale())) { return this; } - final DateTimeFormatter[] parsersWithZone = new DateTimeFormatter[parsers.length]; - for (int i = 0; i < parsers.length; i++) { - parsersWithZone[i] = parsers[i].withLocale(locale); - } - - return new JavaDateFormatter(format, printer.withLocale(locale), parsersWithZone); + return new JavaDateFormatter(format, printer.withLocale(locale), parser.withLocale(locale)); } @Override @@ -132,17 +126,7 @@ public String pattern() { JavaDateFormatter parseDefaulting(Map fields) { final DateTimeFormatterBuilder parseDefaultingBuilder = new DateTimeFormatterBuilder().append(printer); fields.forEach(parseDefaultingBuilder::parseDefaulting); - if (parsers.length == 1 && parsers[0].equals(printer)) { - return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT)); - } else { - final DateTimeFormatter[] parsersWithDefaulting = new DateTimeFormatter[parsers.length]; - for (int i = 0; i < parsers.length; i++) { - DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(parsers[i]); - fields.forEach(builder::parseDefaulting); - parsersWithDefaulting[i] = builder.toFormatter(Locale.ROOT); - } - return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT), parsersWithDefaulting); - } + return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT)); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java b/server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java index c2f55b8d9b939..3037c7a73c0d5 100644 --- a/server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java +++ b/server/src/main/java/org/elasticsearch/common/util/CancellableThreads.java @@ -29,7 +29,7 @@ /** * A utility class for multi threaded operation that needs to be cancellable via interrupts. Every cancellable operation should be - * executed via {@link #execute(Interruptable)}, which will capture the executing thread and make sure it is interrupted in the case + * executed via {@link #execute(Interruptible)}, which will capture the executing thread and make sure it is interrupted in the case * of cancellation. * * Cancellation policy: This class does not support external interruption via Thread#interrupt(). Always use #cancel() instead. @@ -77,33 +77,33 @@ private synchronized boolean add() { } /** - * run the Interruptable, capturing the executing thread. Concurrent calls to {@link #cancel(String)} will interrupt this thread + * run the Interruptible, capturing the executing thread. Concurrent calls to {@link #cancel(String)} will interrupt this thread * causing the call to prematurely return. * - * @param interruptable code to run + * @param interruptible code to run */ - public void execute(Interruptable interruptable) { + public void execute(Interruptible interruptible) { try { - executeIO(interruptable); + executeIO(interruptible); } catch (IOException e) { - assert false : "the passed interruptable can not result in an IOException"; + assert false : "the passed interruptible can not result in an IOException"; throw new RuntimeException("unexpected IO exception", e); } } /** - * run the Interruptable, capturing the executing thread. Concurrent calls to {@link #cancel(String)} will interrupt this thread + * run the Interruptible, capturing the executing thread. Concurrent calls to {@link #cancel(String)} will interrupt this thread * causing the call to prematurely return. * - * @param interruptable code to run + * @param interruptible code to run */ - public void executeIO(IOInterruptable interruptable) throws IOException { + public void executeIO(IOInterruptible interruptible) throws IOException { boolean wasInterrupted = add(); boolean cancelledByExternalInterrupt = false; RuntimeException runtimeException = null; IOException ioException = null; try { - interruptable.run(); + interruptible.run(); } catch (InterruptedException | ThreadInterruptedException e) { // ignore, this interrupt has been triggered by us in #cancel()... assert cancelled : "Interruption via Thread#interrupt() is unsupported. Use CancellableThreads#cancel() instead"; @@ -167,11 +167,11 @@ public synchronized void cancel(String reason) { } - public interface Interruptable extends IOInterruptable { + public interface Interruptible extends IOInterruptible { void run() throws InterruptedException; } - public interface IOInterruptable { + public interface IOInterruptible { void run() throws IOException, InterruptedException; } diff --git a/server/src/main/java/org/elasticsearch/common/util/iterable/Iterables.java b/server/src/main/java/org/elasticsearch/common/util/iterable/Iterables.java index 3783de95585cd..0574895af656b 100644 --- a/server/src/main/java/org/elasticsearch/common/util/iterable/Iterables.java +++ b/server/src/main/java/org/elasticsearch/common/util/iterable/Iterables.java @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Objects; @@ -29,8 +28,6 @@ import java.util.stream.StreamSupport; public class Iterables { - public Iterables() { - } public static Iterable concat(Iterable... inputs) { Objects.requireNonNull(inputs); @@ -80,45 +77,6 @@ public Iterator iterator() { } } - public static boolean allElementsAreEqual(Iterable left, Iterable right) { - Objects.requireNonNull(left); - Objects.requireNonNull(right); - if (left instanceof Collection && right instanceof Collection) { - Collection collection1 = (Collection) left; - Collection collection2 = (Collection) right; - if (collection1.size() != collection2.size()) { - return false; - } - } - - Iterator leftIt = left.iterator(); - Iterator rightIt = right.iterator(); - - while (true) { - if (leftIt.hasNext()) { - if (!rightIt.hasNext()) { - return false; - } - - Object o1 = leftIt.next(); - Object o2 = rightIt.next(); - if (Objects.equals(o1, o2)) { - continue; - } - - return false; - } - - return !rightIt.hasNext(); - } - } - - public static T getFirst(Iterable collection, T defaultValue) { - Objects.requireNonNull(collection); - Iterator iterator = collection.iterator(); - return iterator.hasNext() ? iterator.next() : defaultValue; - } - public static T get(Iterable iterable, int position) { Objects.requireNonNull(iterable); if (position < 0) { diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 1572548b1b1fc..042eb9daa0d9d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -127,11 +127,11 @@ public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportServic Map> discoveryTypes = new HashMap<>(); discoveryTypes.put(ZEN_DISCOVERY_TYPE, () -> new ZenDiscovery(settings, threadPool, transportService, namedWriteableRegistry, masterService, clusterApplier, - clusterSettings, hostsProvider, allocationService, Collections.unmodifiableCollection(joinValidators), gatewayMetaState)); + clusterSettings, hostsProvider, allocationService, joinValidators, gatewayMetaState)); discoveryTypes.put(ZEN2_DISCOVERY_TYPE, () -> new Coordinator(NODE_NAME_SETTING.get(settings), settings, clusterSettings, transportService, namedWriteableRegistry, allocationService, masterService, () -> gatewayMetaState.getPersistedState(settings, (ClusterApplierService) clusterApplier), hostsProvider, clusterApplier, - Randomness.get())); + joinValidators, Randomness.get())); discoveryTypes.put("single-node", () -> new SingleNodeDiscovery(settings, transportService, masterService, clusterApplier, gatewayMetaState)); for (DiscoveryPlugin plugin : plugins) { diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java index a136d28305252..550b25083fb96 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java @@ -164,7 +164,7 @@ public static class ValidateJoinRequest extends TransportRequest { public ValidateJoinRequest() {} - ValidateJoinRequest(ClusterState state) { + public ValidateJoinRequest(ClusterState state) { this.state = state; } @@ -179,6 +179,10 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); this.state.writeTo(out); } + + public ClusterState getState() { + return state; + } } static class ValidateJoinRequestRequestHandler implements TransportRequestHandler { diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 87ecbf03b8609..05d0bfa27188a 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -73,7 +73,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Set; @@ -163,7 +162,7 @@ public ZenDiscovery(Settings settings, ThreadPool threadPool, TransportService t ClusterSettings clusterSettings, UnicastHostsProvider hostsProvider, AllocationService allocationService, Collection> onJoinValidators, GatewayMetaState gatewayMetaState) { super(settings); - this.onJoinValidators = addBuiltInJoinValidators(onJoinValidators); + this.onJoinValidators = JoinTaskExecutor.addBuiltInJoinValidators(onJoinValidators); this.masterService = masterService; this.clusterApplier = clusterApplier; this.transportService = transportService; @@ -235,17 +234,6 @@ public ZenDiscovery(Settings settings, ThreadPool threadPool, TransportService t } } - static Collection> addBuiltInJoinValidators( - Collection> onJoinValidators) { - Collection> validators = new ArrayList<>(); - validators.add((node, state) -> { - JoinTaskExecutor.ensureNodesCompatibility(node.getVersion(), state.getNodes()); - JoinTaskExecutor.ensureIndexCompatibility(node.getVersion(), state.getMetaData()); - }); - validators.addAll(onJoinValidators); - return Collections.unmodifiableCollection(validators); - } - // protected to allow overriding in tests protected ZenPing newZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, UnicastHostsProvider hostsProvider) { @@ -910,7 +898,7 @@ protected void rejoin(String reason) { if (clusterState.nodes().getMasterNodeId() != null) { // remove block if it already exists before adding new one - assert clusterState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock().id()) == false : + assert clusterState.blocks().hasGlobalBlockWithId(discoverySettings.getNoMasterBlock().id()) == false : "NO_MASTER_BLOCK should only be added by ZenDiscovery"; ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(clusterState.blocks()) .addGlobalBlock(discoverySettings.getNoMasterBlock()) diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 3cc7615b8ce5a..2d236b08f79de 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -305,6 +305,10 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce applySegmentInfosTrace(settings); assertCanWrite(); + + if (DiscoveryNode.isMasterNode(settings) || DiscoveryNode.isDataNode(settings)) { + ensureAtomicMoveSupported(nodePaths); + } success = true; } finally { if (success == false) { @@ -1003,8 +1007,7 @@ private void assertEnvIsLocked() { * not supported by the filesystem. This test is executed on each of the data directories. * This method cleans up all files even in the case of an error. */ - public void ensureAtomicMoveSupported() throws IOException { - final NodePath[] nodePaths = nodePaths(); + private static void ensureAtomicMoveSupported(final NodePath[] nodePaths) throws IOException { for (NodePath nodePath : nodePaths) { assert Files.isDirectory(nodePath.path) : nodePath.path + " is not a directory"; final Path src = nodePath.path.resolve(TEMP_FILE_NAME + ".tmp"); diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 81f26b40c9aa0..f14d86c7602bf 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -51,9 +51,6 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.nio.file.DirectoryStream; -import java.nio.file.Files; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -102,8 +99,6 @@ public GatewayMetaState(Settings settings, NodeEnvironment nodeEnv, MetaStateSer this.clusterService = clusterService; this.indicesService = indicesService; - ensureNoPre019State(); //TODO remove this check, it's Elasticsearch version 7 already - ensureAtomicMoveSupported(); //TODO move this check to NodeEnvironment, because it's related to all types of metadata upgradeMetaData(metaDataIndexUpgradeService, metaDataUpgrader); initializeClusterState(ClusterName.CLUSTER_NAME_SETTING.get(settings)); incrementalWrite = false; @@ -194,12 +189,6 @@ protected boolean isMasterOrDataNode() { return DiscoveryNode.isMasterNode(settings) || DiscoveryNode.isDataNode(settings); } - private void ensureAtomicMoveSupported() throws IOException { - if (isMasterOrDataNode()) { - nodeEnv.ensureAtomicMoveSupported(); - } - } - public MetaData getMetaData() { return previousClusterState.metaData(); } @@ -410,62 +399,10 @@ public static Set getRelevantIndices(ClusterState state, ClusterState pre return relevantIndices; } - private static boolean isDataOnlyNode(ClusterState state) { return ((state.nodes().getLocalNode().isMasterNode() == false) && state.nodes().getLocalNode().isDataNode()); } - - private void ensureNoPre019State() throws IOException { - if (DiscoveryNode.isDataNode(settings)) { - ensureNoPre019ShardState(); - } - if (isMasterOrDataNode()) { - ensureNoPre019MetadataFiles(); - } - } - - /** - * Throws an IAE if a pre 0.19 state is detected - */ - private void ensureNoPre019MetadataFiles() throws IOException { - for (Path dataLocation : nodeEnv.nodeDataPaths()) { - final Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME); - if (!Files.exists(stateLocation)) { - continue; - } - try (DirectoryStream stream = Files.newDirectoryStream(stateLocation)) { - for (Path stateFile : stream) { - if (logger.isTraceEnabled()) { - logger.trace("[upgrade]: processing [{}]", stateFile.getFileName()); - } - final String name = stateFile.getFileName().toString(); - if (name.startsWith("metadata-")) { - throw new IllegalStateException("Detected pre 0.19 metadata file please upgrade to a version before " - + Version.CURRENT.minimumIndexCompatibilityVersion() - + " first to upgrade state structures - metadata found: [" + stateFile.getParent().toAbsolutePath()); - } - } - } - } - } - - // shard state BWC - private void ensureNoPre019ShardState() throws IOException { - for (Path dataLocation : nodeEnv.nodeDataPaths()) { - final Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME); - if (Files.exists(stateLocation)) { - try (DirectoryStream stream = Files.newDirectoryStream(stateLocation, "shards-*")) { - for (Path stateFile : stream) { - throw new IllegalStateException("Detected pre 0.19 shard state file please upgrade to a version before " - + Version.CURRENT.minimumIndexCompatibilityVersion() - + " first to upgrade state structures - shard state found: [" + stateFile.getParent().toAbsolutePath()); - } - } - } - } - } - /** * Elasticsearch 2.0 removed several deprecated features and as well as support for Lucene 3.x. This method calls * {@link MetaDataIndexUpgradeService} to makes sure that indices are compatible with the current version. The diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 7f2eae492fd56..6b83d2252dec7 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -84,7 +84,7 @@ */ public final class IndexModule { - public static final Setting NODE_STORE_ALLOW_MMAPFS = Setting.boolSetting("node.store.allow_mmapfs", true, Property.NodeScope); + public static final Setting NODE_STORE_ALLOW_MMAP = Setting.boolSetting("node.store.allow_mmap", true, Property.NodeScope); public static final Setting INDEX_STORE_TYPE_SETTING = new Setting<>("index.store.type", "", Function.identity(), Property.IndexScope, Property.NodeScope); @@ -302,6 +302,7 @@ public static boolean isBuiltinType(String storeType) { public enum Type { + HYBRIDFS("hybridfs"), NIOFS("niofs"), MMAPFS("mmapfs"), SIMPLEFS("simplefs"), @@ -330,7 +331,7 @@ public String getSettingsKey() { public static Type fromSettingsKey(final String key) { final Type type = TYPES.get(key); if (type == null) { - throw new IllegalArgumentException("no matching type for [" + key + "]"); + throw new IllegalArgumentException("no matching store type for [" + key + "]"); } return type; } @@ -354,9 +355,9 @@ public interface IndexSearcherWrapperFactory { IndexSearcherWrapper newWrapper(IndexService indexService); } - public static Type defaultStoreType(final boolean allowMmapfs) { - if (allowMmapfs && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { - return Type.MMAPFS; + public static Type defaultStoreType(final boolean allowMmap) { + if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + return Type.HYBRIDFS; } else if (Constants.WINDOWS) { return Type.SIMPLEFS; } else { @@ -405,9 +406,9 @@ private static IndexStore getIndexStore( final IndexSettings indexSettings, final Map> indexStoreFactories) { final String storeType = indexSettings.getValue(INDEX_STORE_TYPE_SETTING); final Type type; - final Boolean allowMmapfs = NODE_STORE_ALLOW_MMAPFS.get(indexSettings.getNodeSettings()); + final Boolean allowMmap = NODE_STORE_ALLOW_MMAP.get(indexSettings.getNodeSettings()); if (storeType.isEmpty() || Type.FS.getSettingsKey().equals(storeType)) { - type = defaultStoreType(allowMmapfs); + type = defaultStoreType(allowMmap); } else { if (isBuiltinType(storeType)) { type = Type.fromSettingsKey(storeType); @@ -415,8 +416,8 @@ private static IndexStore getIndexStore( type = null; } } - if (type != null && type == Type.MMAPFS && allowMmapfs == false) { - throw new IllegalArgumentException("store type [mmapfs] is not allowed"); + if (allowMmap == false && (type == Type.MMAPFS || type == Type.HYBRIDFS)) { + throw new IllegalArgumentException("store type [" + storeType + "] is not allowed because mmap is disabled"); } final IndexStore store; if (storeType.isEmpty() || isBuiltinType(storeType)) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index ead9e7597fd73..4d9a8f7d37b70 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -256,6 +256,17 @@ public final class IndexSettings { Setting.longSetting("index.soft_deletes.retention.operations", 0, 0, Property.IndexScope, Property.Dynamic); + /** + * Controls the maximum length of time since a retention lease is created or renewed before it is considered expired. + */ + public static final Setting INDEX_SOFT_DELETES_RETENTION_LEASE_SETTING = + Setting.timeSetting( + "index.soft_deletes.retention.lease", + TimeValue.timeValueHours(12), + TimeValue.ZERO, + Property.Dynamic, + Property.IndexScope); + /** * The maximum number of refresh listeners allows on this shard. */ @@ -316,6 +327,18 @@ public final class IndexSettings { private long gcDeletesInMillis = DEFAULT_GC_DELETES.millis(); private final boolean softDeleteEnabled; private volatile long softDeleteRetentionOperations; + + private volatile long retentionLeaseMillis; + + /** + * The maximum age of a retention lease before it is considered expired. + * + * @return the maximum age + */ + public long getRetentionLeaseMillis() { + return retentionLeaseMillis; + } + private volatile boolean warmerEnabled; private volatile int maxResultWindow; private volatile int maxInnerResultWindow; @@ -431,6 +454,7 @@ public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSetti gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis(); softDeleteEnabled = version.onOrAfter(Version.V_6_5_0) && scopedSettings.get(INDEX_SOFT_DELETES_SETTING); softDeleteRetentionOperations = scopedSettings.get(INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING); + retentionLeaseMillis = scopedSettings.get(INDEX_SOFT_DELETES_RETENTION_LEASE_SETTING).millis(); warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING); maxResultWindow = scopedSettings.get(MAX_RESULT_WINDOW_SETTING); maxInnerResultWindow = scopedSettings.get(MAX_INNER_RESULT_WINDOW_SETTING); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 55e2a1836fda3..165256940bb81 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -20,6 +20,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.elasticsearch.Version; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -130,7 +131,13 @@ public Analyzer getAnalyzer(String analyzer) throws IOException { throw new ElasticsearchException("failed to load analyzer for name " + key, ex); }} ); + } else if ("standard_html_strip".equals(analyzer)) { + if (Version.CURRENT.onOrAfter(Version.V_7_0_0)) { + throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " + + "use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"); + } } + return analyzerProvider.get(environment, analyzer).get(); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index f95ba96d343c9..1cc92319b5e45 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.seqno.RetentionLease; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.TranslogConfig; @@ -42,8 +43,11 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Collection; import java.util.List; +import java.util.Objects; import java.util.function.LongSupplier; +import java.util.function.Supplier; /* * Holds all the configuration that is used to create an {@link Engine}. @@ -77,6 +81,18 @@ public final class EngineConfig { @Nullable private final CircuitBreakerService circuitBreakerService; private final LongSupplier globalCheckpointSupplier; + private final Supplier> retentionLeasesSupplier; + + /** + * A supplier of the outstanding retention leases. This is used during merged operations to determine which operations that have been + * soft deleted should be retained. + * + * @return a supplier of outstanding retention leases + */ + public Supplier> retentionLeasesSupplier() { + return retentionLeasesSupplier; + } + private final LongSupplier primaryTermSupplier; private final TombstoneDocSupplier tombstoneDocSupplier; @@ -125,7 +141,9 @@ public EngineConfig(ShardId shardId, String allocationId, ThreadPool threadPool, List externalRefreshListener, List internalRefreshListener, Sort indexSort, CircuitBreakerService circuitBreakerService, LongSupplier globalCheckpointSupplier, - LongSupplier primaryTermSupplier, TombstoneDocSupplier tombstoneDocSupplier) { + Supplier> retentionLeasesSupplier, + LongSupplier primaryTermSupplier, + TombstoneDocSupplier tombstoneDocSupplier) { this.shardId = shardId; this.allocationId = allocationId; this.indexSettings = indexSettings; @@ -161,6 +179,7 @@ public EngineConfig(ShardId shardId, String allocationId, ThreadPool threadPool, this.indexSort = indexSort; this.circuitBreakerService = circuitBreakerService; this.globalCheckpointSupplier = globalCheckpointSupplier; + this.retentionLeasesSupplier = Objects.requireNonNull(retentionLeasesSupplier); this.primaryTermSupplier = primaryTermSupplier; this.tombstoneDocSupplier = tombstoneDocSupplier; } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index a295fbf3336b1..d0e55fc13eeda 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -276,8 +276,11 @@ private SoftDeletesPolicy newSoftDeletesPolicy() throws IOException { } else { lastMinRetainedSeqNo = Long.parseLong(commitUserData.get(SequenceNumbers.MAX_SEQ_NO)) + 1; } - return new SoftDeletesPolicy(translog::getLastSyncedGlobalCheckpoint, lastMinRetainedSeqNo, - engineConfig.getIndexSettings().getSoftDeleteRetentionOperations()); + return new SoftDeletesPolicy( + translog::getLastSyncedGlobalCheckpoint, + lastMinRetainedSeqNo, + engineConfig.getIndexSettings().getSoftDeleteRetentionOperations(), + engineConfig.retentionLeasesSupplier()); } /** diff --git a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java index 7faed37b2fd36..42276f4ca2108 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -33,11 +33,8 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.StoredFieldVisitor; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConjunctionDISI; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; @@ -68,15 +65,18 @@ static CodecReader wrapReader(String recoverySourceField, CodecReader reader, Su if (recoverySource == null || recoverySource.nextDoc() == DocIdSetIterator.NO_MORE_DOCS) { return reader; // early terminate - nothing to do here since non of the docs has a recovery source anymore. } - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(new DocValuesFieldExistsQuery(recoverySourceField), BooleanClause.Occur.FILTER); - builder.add(retainSourceQuerySupplier.get(), BooleanClause.Occur.FILTER); IndexSearcher s = new IndexSearcher(reader); s.setQueryCache(null); - Weight weight = s.createWeight(s.rewrite(builder.build()), ScoreMode.COMPLETE_NO_SCORES, 1.0f); + Weight weight = s.createWeight(s.rewrite(retainSourceQuerySupplier.get()), ScoreMode.COMPLETE_NO_SCORES, 1.0f); Scorer scorer = weight.scorer(reader.getContext()); if (scorer != null) { - return new SourcePruningFilterCodecReader(recoverySourceField, reader, BitSet.of(scorer.iterator(), reader.maxDoc())); + BitSet recoverySourceToKeep = BitSet.of(scorer.iterator(), reader.maxDoc()); + // calculating the cardinality is significantly cheaper than skipping all bulk-merging we might do + // if retentions are high we keep most of it + if (recoverySourceToKeep.cardinality() == reader.maxDoc()) { + return reader; // keep all source + } + return new SourcePruningFilterCodecReader(recoverySourceField, reader, recoverySourceToKeep); } else { return new SourcePruningFilterCodecReader(recoverySourceField, reader, null); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/SoftDeletesPolicy.java b/server/src/main/java/org/elasticsearch/index/engine/SoftDeletesPolicy.java index af2ded8c46620..c957902d8df77 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/SoftDeletesPolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/SoftDeletesPolicy.java @@ -23,11 +23,15 @@ import org.apache.lucene.search.Query; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.mapper.SeqNoFieldMapper; +import org.elasticsearch.index.seqno.RetentionLease; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; +import java.util.Collection; +import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; +import java.util.function.Supplier; /** * A policy that controls how many soft-deleted documents should be retained for peer-recovery and querying history changes purpose. @@ -41,11 +45,18 @@ final class SoftDeletesPolicy { private long retentionOperations; // The min seq_no value that is retained - ops after this seq# should exist in the Lucene index. private long minRetainedSeqNo; + // provides the retention leases used to calculate the minimum sequence number to retain + private final Supplier> retentionLeasesSupplier; - SoftDeletesPolicy(LongSupplier globalCheckpointSupplier, long minRetainedSeqNo, long retentionOperations) { + SoftDeletesPolicy( + final LongSupplier globalCheckpointSupplier, + final long minRetainedSeqNo, + final long retentionOperations, + final Supplier> retentionLeasesSupplier) { this.globalCheckpointSupplier = globalCheckpointSupplier; this.retentionOperations = retentionOperations; this.minRetainedSeqNo = minRetainedSeqNo; + this.retentionLeasesSupplier = Objects.requireNonNull(retentionLeasesSupplier); this.localCheckpointOfSafeCommit = SequenceNumbers.NO_OPS_PERFORMED; this.retentionLockCount = 0; } @@ -97,14 +108,35 @@ private synchronized void releaseRetentionLock() { synchronized long getMinRetainedSeqNo() { // Do not advance if the retention lock is held if (retentionLockCount == 0) { - // This policy retains operations for two purposes: peer-recovery and querying changes history. - // - Peer-recovery is driven by the local checkpoint of the safe commit. In peer-recovery, the primary transfers a safe commit, - // then sends ops after the local checkpoint of that commit. This requires keeping all ops after localCheckpointOfSafeCommit; - // - Changes APIs are driven the combination of the global checkpoint and retention ops. Here we prefer using the global - // checkpoint instead of max_seqno because only operations up to the global checkpoint are exposed in the the changes APIs. - final long minSeqNoForQueryingChanges = globalCheckpointSupplier.getAsLong() - retentionOperations; + /* + * This policy retains operations for two purposes: peer-recovery and querying changes history. + * - Peer-recovery is driven by the local checkpoint of the safe commit. In peer-recovery, the primary transfers a safe commit, + * then sends operations after the local checkpoint of that commit. This requires keeping all ops after + * localCheckpointOfSafeCommit. + * - Changes APIs are driven by a combination of the global checkpoint, retention operations, and retention leases. Here we + * prefer using the global checkpoint instead of the maximum sequence number because only operations up to the global + * checkpoint are exposed in the the changes APIs. + */ + + // calculate the minimum sequence number to retain based on retention leases + final long minimumRetainingSequenceNumber = retentionLeasesSupplier + .get() + .stream() + .mapToLong(RetentionLease::retainingSequenceNumber) + .min() + .orElse(Long.MAX_VALUE); + /* + * The minimum sequence number to retain is the minimum of the minimum based on retention leases, and the number of operations + * below the global checkpoint to retain (index.soft_deletes.retention.operations). + */ + final long minSeqNoForQueryingChanges = + Math.min(globalCheckpointSupplier.getAsLong() - retentionOperations, minimumRetainingSequenceNumber); final long minSeqNoToRetain = Math.min(minSeqNoForQueryingChanges, localCheckpointOfSafeCommit) + 1; - // This can go backward as the retentionOperations value can be changed in settings. + + /* + * We take the maximum as minSeqNoToRetain can go backward as the retention operations value can be changed in settings, or from + * the addition of leases with a retaining sequence number lower than previous retaining sequence numbers. + */ minRetainedSeqNo = Math.max(minRetainedSeqNo, minSeqNoToRetain); } return minRetainedSeqNo; @@ -117,4 +149,5 @@ synchronized long getMinRetainedSeqNo() { Query getRetentionQuery() { return LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, getMinRetainedSeqNo(), Long.MAX_VALUE); } + } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index cb962609fb436..058cf68e8e1f4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -265,13 +265,13 @@ public ParsedDocument parse(SourceToParse source) throws MapperParsingException } public ParsedDocument createDeleteTombstoneDoc(String index, String type, String id) throws MapperParsingException { - final SourceToParse emptySource = SourceToParse.source(index, type, id, new BytesArray("{}"), XContentType.JSON); + final SourceToParse emptySource = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON); return documentParser.parseDocument(emptySource, deleteTombstoneMetadataFieldMappers).toTombstone(); } public ParsedDocument createNoopTombstoneDoc(String index, String reason) throws MapperParsingException { final String id = ""; // _id won't be used. - final SourceToParse sourceToParse = SourceToParse.source(index, type, id, new BytesArray("{}"), XContentType.JSON); + final SourceToParse sourceToParse = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON); final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, noopTombstoneMetadataFieldMappers).toTombstone(); // Store the reason of a noop as a raw string in the _source field final BytesRef byteRef = new BytesRef(reason); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index 15faa70456c2a..e63d5a279f3cd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -172,12 +172,29 @@ private Tuple> extractMapping(String type, String so return extractMapping(type, root); } + /** + * Given an optional type name and mapping definition, returns the type and a normalized form of the mappings. + * + * The provided mapping definition may or may not contain the type name as the root key in the map. This method + * attempts to unwrap the mappings, so that they no longer contain a type name at the root. If no type name can + * be found, through either the 'type' parameter or by examining the provided mappings, then an exception will be + * thrown. + * + * @param type An optional type name. + * @param root The mapping definition. + * + * @return A tuple of the form (type, normalized mappings). + */ @SuppressWarnings({"unchecked"}) private Tuple> extractMapping(String type, Map root) throws MapperParsingException { if (root.size() == 0) { - // if we don't have any keys throw an exception - throw new MapperParsingException("malformed mapping no root object found"); + if (type != null) { + return new Tuple<>(type, root); + } else { + throw new MapperParsingException("malformed mapping, no type name found"); + } } + String rootName = root.keySet().iterator().next(); Tuple> mapping; if (type == null || type.equals(rootName)) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index 0e1413ee27765..486ff0bbe4a70 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; -import org.apache.lucene.document.XLatLonShape; +import org.apache.lucene.document.LatLonShape; import org.apache.lucene.geo.Line; import org.apache.lucene.geo.Polygon; import org.apache.lucene.geo.Rectangle; @@ -35,7 +35,7 @@ import java.util.Arrays; /** - * FieldMapper for indexing {@link XLatLonShape}s. + * FieldMapper for indexing {@link LatLonShape}s. *

* Currently Shapes can only be indexed and can only be queried using * {@link org.elasticsearch.index.query.GeoShapeQueryBuilder}, consequently @@ -97,7 +97,7 @@ public GeoShapeFieldType fieldType() { return (GeoShapeFieldType) super.fieldType(); } - /** parsing logic for {@link XLatLonShape} indexing */ + /** parsing logic for {@link LatLonShape} indexing */ @Override public void parse(ParseContext context) throws IOException { try { @@ -122,35 +122,35 @@ public void parse(ParseContext context) throws IOException { private void indexShape(ParseContext context, Object luceneShape) { if (luceneShape instanceof GeoPoint) { GeoPoint pt = (GeoPoint) luceneShape; - indexFields(context, XLatLonShape.createIndexableFields(name(), pt.lat(), pt.lon())); + indexFields(context, LatLonShape.createIndexableFields(name(), pt.lat(), pt.lon())); } else if (luceneShape instanceof double[]) { double[] pt = (double[]) luceneShape; - indexFields(context, XLatLonShape.createIndexableFields(name(), pt[1], pt[0])); + indexFields(context, LatLonShape.createIndexableFields(name(), pt[1], pt[0])); } else if (luceneShape instanceof Line) { - indexFields(context, XLatLonShape.createIndexableFields(name(), (Line)luceneShape)); + indexFields(context, LatLonShape.createIndexableFields(name(), (Line)luceneShape)); } else if (luceneShape instanceof Polygon) { - indexFields(context, XLatLonShape.createIndexableFields(name(), (Polygon) luceneShape)); + indexFields(context, LatLonShape.createIndexableFields(name(), (Polygon) luceneShape)); } else if (luceneShape instanceof double[][]) { double[][] pts = (double[][])luceneShape; for (int i = 0; i < pts.length; ++i) { - indexFields(context, XLatLonShape.createIndexableFields(name(), pts[i][1], pts[i][0])); + indexFields(context, LatLonShape.createIndexableFields(name(), pts[i][1], pts[i][0])); } } else if (luceneShape instanceof Line[]) { Line[] lines = (Line[]) luceneShape; for (int i = 0; i < lines.length; ++i) { - indexFields(context, XLatLonShape.createIndexableFields(name(), lines[i])); + indexFields(context, LatLonShape.createIndexableFields(name(), lines[i])); } } else if (luceneShape instanceof Polygon[]) { Polygon[] polys = (Polygon[]) luceneShape; for (int i = 0; i < polys.length; ++i) { - indexFields(context, XLatLonShape.createIndexableFields(name(), polys[i])); + indexFields(context, LatLonShape.createIndexableFields(name(), polys[i])); } } else if (luceneShape instanceof Rectangle) { // index rectangle as a polygon Rectangle r = (Rectangle) luceneShape; Polygon p = new Polygon(new double[]{r.minLat, r.minLat, r.maxLat, r.maxLat, r.minLat}, new double[]{r.minLon, r.maxLon, r.maxLon, r.minLon, r.minLon}); - indexFields(context, XLatLonShape.createIndexableFields(name(), p)); + indexFields(context, LatLonShape.createIndexableFields(name(), p)); } else if (luceneShape instanceof Object[]) { // recurse to index geometry collection for (Object o : (Object[])luceneShape) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java index 0130207c0a78e..7cb3241d61f70 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java @@ -21,17 +21,13 @@ import java.util.Objects; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; public class SourceToParse { - public static SourceToParse source(String index, String type, String id, BytesReference source, - XContentType contentType) { - return new SourceToParse(index, type, id, source, contentType); - } - private final BytesReference source; private final String index; @@ -40,11 +36,11 @@ public static SourceToParse source(String index, String type, String id, BytesRe private final String id; - private String routing; + private final @Nullable String routing; - private XContentType xContentType; + private final XContentType xContentType; - private SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType) { + public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType, @Nullable String routing) { this.index = Objects.requireNonNull(index); this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); @@ -52,6 +48,11 @@ private SourceToParse(String index, String type, String id, BytesReference sourc // so, we might as well do it here, and improve the performance of working with direct byte arrays this.source = new BytesArray(Objects.requireNonNull(source).toBytesRef()); this.xContentType = Objects.requireNonNull(xContentType); + this.routing = routing; + } + + public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType) { + this(index, type, id, source, xContentType, null); } public BytesReference source() { @@ -70,7 +71,7 @@ public String id() { return this.id; } - public String routing() { + public @Nullable String routing() { return this.routing; } @@ -78,11 +79,6 @@ public XContentType getXContentType() { return this.xContentType; } - public SourceToParse routing(String routing) { - this.routing = routing; - return this; - } - public enum Origin { PRIMARY, REPLICA diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 8a9c141b0afb5..1b25c7b9866f7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -33,6 +33,8 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.AutomatonQuery; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; @@ -156,7 +158,7 @@ public Builder indexPrefixes(int minChars, int maxChars) { if (maxChars >= 20) { throw new IllegalArgumentException("max_chars [" + maxChars + "] must be less than 20"); } - this.prefixFieldType = new PrefixFieldType(name() + "._index_prefix", minChars, maxChars); + this.prefixFieldType = new PrefixFieldType(name(), name() + "._index_prefix", minChars, maxChars); fieldType().setPrefixFieldType(this.prefixFieldType); return this; } @@ -347,14 +349,16 @@ static final class PrefixFieldType extends StringFieldType { final int minChars; final int maxChars; + final String parentField; - PrefixFieldType(String name, int minChars, int maxChars) { + PrefixFieldType(String parentField, String name, int minChars, int maxChars) { setTokenized(true); setOmitNorms(true); setIndexOptions(IndexOptions.DOCS); setName(name); this.minChars = minChars; this.maxChars = maxChars; + this.parentField = parentField; } PrefixFieldType setAnalyzer(NamedAnalyzer delegate) { @@ -387,12 +391,15 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, Quer Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = new AutomatonQuery(new Term(name(), value + "*"), automaton); query.setRewriteMethod(method); - return query; + return new BooleanQuery.Builder() + .add(query, BooleanClause.Occur.SHOULD) + .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) + .build(); } @Override public PrefixFieldType clone() { - return new PrefixFieldType(name(), minChars, maxChars); + return new PrefixFieldType(parentField, name(), minChars, maxChars); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 96bd77725bd2d..f8ffcfdc05bcc 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -19,7 +19,8 @@ package org.elasticsearch.index.query; -import org.apache.lucene.document.XLatLonShape; +import org.apache.logging.log4j.LogManager; +import org.apache.lucene.document.LatLonShape; import org.apache.lucene.geo.Line; import org.apache.lucene.geo.Polygon; import org.apache.lucene.geo.Rectangle; @@ -38,6 +39,7 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoPoint; @@ -48,6 +50,7 @@ import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -66,6 +69,10 @@ */ public class GeoShapeQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "geo_shape"; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(GeoShapeQueryBuilder.class)); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [geo_shape] queries. " + + "The type should no longer be specified in the [indexed_shape] section."; public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes"; public static final String DEFAULT_SHAPE_FIELD_NAME = "shape"; @@ -119,6 +126,19 @@ public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) { this(fieldName, shape, null, null); } + /** + * Creates a new GeoShapeQueryBuilder whose Query will be against the given + * field name and will use the Shape found with the given ID + * + * @param fieldName + * Name of the field that will be filtered + * @param indexedShapeId + * ID of the indexed Shape that will be used in the Query + */ + public GeoShapeQueryBuilder(String fieldName, String indexedShapeId) { + this(fieldName, (ShapeBuilder) null, indexedShapeId, null); + } + /** * Creates a new GeoShapeQueryBuilder whose Query will be against the given * field name and will use the Shape found with the given ID in the given @@ -130,20 +150,19 @@ public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) { * ID of the indexed Shape that will be used in the Query * @param indexedShapeType * Index type of the indexed Shapes + * @deprecated use {@link #GeoShapeQueryBuilder(String, String)} instead */ + @Deprecated public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) { this(fieldName, (ShapeBuilder) null, indexedShapeId, indexedShapeType); } - private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, String indexedShapeType) { + private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, @Nullable String indexedShapeType) { if (fieldName == null) { throw new IllegalArgumentException("fieldName is required"); } if (shape == null && indexedShapeId == null) { - throw new IllegalArgumentException("either shapeBytes or indexedShapeId and indexedShapeType are required"); - } - if (indexedShapeId != null && indexedShapeType == null) { - throw new IllegalArgumentException("indexedShapeType is required if indexedShapeId is specified"); + throw new IllegalArgumentException("either shape or indexedShapeId is required"); } this.fieldName = fieldName; this.shape = shape; @@ -152,7 +171,8 @@ private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexe this.supplier = null; } - private GeoShapeQueryBuilder(String fieldName, Supplier supplier, String indexedShapeId, String indexedShapeType) { + private GeoShapeQueryBuilder(String fieldName, Supplier supplier, String indexedShapeId, + @Nullable String indexedShapeType) { this.fieldName = fieldName; this.shape = null; this.supplier = supplier; @@ -238,7 +258,10 @@ public String indexedShapeId() { /** * @return the document type of the indexed Shape that will be used in the * Query + * + * @deprecated Types are in the process of being removed. */ + @Deprecated public String indexedShapeType() { return indexedShapeType; } @@ -429,16 +452,16 @@ private Query getVectorQuery(QueryShardContext context, ShapeBuilder queryShapeB private Query getVectorQueryFromShape(QueryShardContext context, Object queryShape) { Query geoQuery; if (queryShape instanceof Line[]) { - geoQuery = XLatLonShape.newLineQuery(fieldName(), relation.getLuceneRelation(), (Line[]) queryShape); + geoQuery = LatLonShape.newLineQuery(fieldName(), relation.getLuceneRelation(), (Line[]) queryShape); } else if (queryShape instanceof Polygon[]) { - geoQuery = XLatLonShape.newPolygonQuery(fieldName(), relation.getLuceneRelation(), (Polygon[]) queryShape); + geoQuery = LatLonShape.newPolygonQuery(fieldName(), relation.getLuceneRelation(), (Polygon[]) queryShape); } else if (queryShape instanceof Line) { - geoQuery = XLatLonShape.newLineQuery(fieldName(), relation.getLuceneRelation(), (Line) queryShape); + geoQuery = LatLonShape.newLineQuery(fieldName(), relation.getLuceneRelation(), (Line) queryShape); } else if (queryShape instanceof Polygon) { - geoQuery = XLatLonShape.newPolygonQuery(fieldName(), relation.getLuceneRelation(), (Polygon) queryShape); + geoQuery = LatLonShape.newPolygonQuery(fieldName(), relation.getLuceneRelation(), (Polygon) queryShape); } else if (queryShape instanceof Rectangle) { Rectangle r = (Rectangle) queryShape; - geoQuery = XLatLonShape.newBoxQuery(fieldName(), relation.getLuceneRelation(), + geoQuery = LatLonShape.newBoxQuery(fieldName(), relation.getLuceneRelation(), r.minLat, r.maxLat, r.minLon, r.maxLon); } else if (queryShape instanceof double[][]) { // note: we decompose point queries into a bounding box query with min values == max values @@ -457,7 +480,7 @@ private Query getVectorQueryFromShape(QueryShardContext context, Object querySha + "But found length " + pt.length + " for field [" + fieldName + "]"); } } - return XLatLonShape.newBoxQuery(fieldName, relation.getLuceneRelation(), pt[1], pt[1], pt[0], pt[0]); + return LatLonShape.newBoxQuery(fieldName, relation.getLuceneRelation(), pt[1], pt[1], pt[0], pt[0]); } else if (queryShape instanceof Object[]) { geoQuery = createGeometryCollectionQuery(context, (Object[]) queryShape); } else { @@ -566,8 +589,10 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep shape.toXContent(builder, params); } else { builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName()) - .field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId) - .field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType); + .field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId); + if (indexedShapeType != null) { + builder.field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType); + } if (indexedShapeIndex != null) { builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex); } @@ -677,6 +702,11 @@ public static GeoShapeQueryBuilder fromXContent(XContentParser parser) throws IO } } GeoShapeQueryBuilder builder; + if (type != null) { + deprecationLogger.deprecatedAndMaybeLog( + "geo_share_query_with_types", TYPES_DEPRECATION_MESSAGE); + } + if (shape != null) { builder = new GeoShapeQueryBuilder(fieldName, shape); } else { @@ -739,7 +769,12 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws } else if (this.shape == null) { SetOnce supplier = new SetOnce<>(); queryRewriteContext.registerAsyncAction((client, listener) -> { - GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId); + GetRequest getRequest; + if (indexedShapeType == null) { + getRequest = new GetRequest(indexedShapeIndex, indexedShapeId); + } else { + getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId); + } getRequest.routing(indexedShapeRouting); fetch(client, getRequest, indexedShapePath, ActionListener.wrap(builder-> { supplier.set(builder); diff --git a/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index 7cbd38f3398fd..358a2fccff108 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.cluster.metadata.MetaData; @@ -27,6 +28,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -51,6 +53,9 @@ */ public class IdsQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "ids"; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(IdsQueryBuilder.class)); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [ids] queries."; private static final ParseField TYPE_FIELD = new ParseField("type"); private static final ParseField VALUES_FIELD = new ParseField("values"); @@ -83,8 +88,10 @@ protected void doWriteTo(StreamOutput out) throws IOException { /** * Add types to query + * + * @deprecated Types are in the process of being removed, prefer to filter on a field instead. */ - // TODO: Remove + @Deprecated public IdsQueryBuilder types(String... types) { if (types == null) { throw new IllegalArgumentException("[" + NAME + "] types cannot be null"); @@ -95,7 +102,10 @@ public IdsQueryBuilder types(String... types) { /** * Returns the types used in this query + * + * @deprecated Types are in the process of being removed, prefer to filter on a field instead. */ + @Deprecated public String[] types() { return this.types; } @@ -121,7 +131,9 @@ public Set ids() { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); - builder.array(TYPE_FIELD.getPreferredName(), types); + if (types.length > 0) { + builder.array(TYPE_FIELD.getPreferredName(), types); + } builder.startArray(VALUES_FIELD.getPreferredName()); for (String value : ids) { builder.value(value); @@ -142,7 +154,11 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep public static IdsQueryBuilder fromXContent(XContentParser parser) { try { - return PARSER.apply(parser, null); + IdsQueryBuilder builder = PARSER.apply(parser, null); + if (builder.types().length > 0) { + deprecationLogger.deprecatedAndMaybeLog("ids_query_with_types", TYPES_DEPRECATION_MESSAGE); + } + return builder; } catch (IllegalArgumentException e) { throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e); } diff --git a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 6f87ac0fda79b..b90a1e60ffa0b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.Fields; import org.apache.lucene.search.BooleanClause; @@ -41,6 +42,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.lucene.search.XMoreLikeThis; import org.elasticsearch.common.lucene.uid.Versions; @@ -53,6 +55,7 @@ import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import java.io.IOException; @@ -66,6 +69,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.stream.Stream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -76,6 +80,11 @@ */ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "more_like_this"; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(MoreLikeThisQueryBuilder.class)); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [more_like_this] " + + "queries. The type should no longer be specified in the [like] and [unlike] sections."; + public static final int DEFAULT_MAX_QUERY_TERMS = XMoreLikeThis.DEFAULT_MAX_QUERY_TERMS; public static final int DEFAULT_MIN_TERM_FREQ = XMoreLikeThis.DEFAULT_MIN_TERM_FREQ; @@ -178,13 +187,45 @@ public Item() { this.versionType = copy.versionType; } + /** + * Constructor for a given item / document request + * + * @param index the index where the document is located + * @param id and its id + */ + public Item(@Nullable String index, String id) { + if (id == null) { + throw new IllegalArgumentException("Item requires id to be non-null"); + } + this.index = index; + this.id = id; + } + + /** + * Constructor for an artificial document request, that is not present in the index. + * + * @param index the index to be used for parsing the doc + * @param doc the document specification + */ + public Item(@Nullable String index, XContentBuilder doc) { + if (doc == null) { + throw new IllegalArgumentException("Item requires doc to be non-null"); + } + this.index = index; + this.doc = BytesReference.bytes(doc); + this.xContentType = doc.contentType(); + } + /** * Constructor for a given item / document request * * @param index the index where the document is located * @param type the type of the document * @param id and its id + * + * @deprecated Types are in the process of being removed, use {@link Item(String, String)} instead. */ + @Deprecated public Item(@Nullable String index, @Nullable String type, String id) { if (id == null) { throw new IllegalArgumentException("Item requires id to be non-null"); @@ -200,7 +241,10 @@ public Item(@Nullable String index, @Nullable String type, String id) { * @param index the index to be used for parsing the doc * @param type the type to be used for parsing the doc * @param doc the document specification + * + * @deprecated Types are in the process of being removed, use {@link Item(String, XContentBuilder)} instead. */ + @Deprecated public Item(@Nullable String index, @Nullable String type, XContentBuilder doc) { if (doc == null) { throw new IllegalArgumentException("Item requires doc to be non-null"); @@ -257,10 +301,18 @@ public Item index(String index) { return this; } + /** + * @deprecated Types are in the process of being removed. + */ + @Deprecated public String type() { return type; } + /** + * @deprecated Types are in the process of being removed. + */ + @Deprecated public Item type(String type) { this.type = type; return this; @@ -912,9 +964,18 @@ public static MoreLikeThisQueryBuilder fromXContent(XContentParser parser) throw if (stopWords != null) { moreLikeThisQueryBuilder.stopWords(stopWords); } + + if (moreLikeThisQueryBuilder.isTypeless() == false) { + deprecationLogger.deprecatedAndMaybeLog("more_like_this_query_with_types", TYPES_DEPRECATION_MESSAGE); + } return moreLikeThisQueryBuilder; } + public boolean isTypeless() { + return Stream.concat(Arrays.stream(likeItems), Arrays.stream(unlikeItems)) + .allMatch(item -> item.type == null); + } + private static void parseLikeField(XContentParser parser, List texts, List items) throws IOException { if (parser.currentToken().isValue()) { texts.add(parser.text()); @@ -1065,12 +1126,7 @@ private static void setDefaultIndexTypeFields(QueryShardContext context, Item it item.index(context.index().getName()); } if (item.type() == null) { - if (context.queryTypes().size() > 1) { - throw new QueryShardException(context, - "ambiguous type for item with id: " + item.id() + " and index: " + item.index()); - } else { - item.type(context.queryTypes().iterator().next()); - } + item.type(MapperService.SINGLE_MAPPING_NAME); } // default fields if not present but don't override for artificial docs if ((item.fields() == null || item.fields().length == 0) && item.doc() == null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/server/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index f5cf2d5da66be..5ac70781286a4 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -121,7 +121,10 @@ public static IdsQueryBuilder idsQuery() { * Constructs a query that will match only specific ids within types. * * @param types The mapping/doc type + * + * @deprecated Types are in the process of being removed, use {@link #idsQuery()} instead. */ + @Deprecated public static IdsQueryBuilder idsQuery(String... types) { return new IdsQueryBuilder().types(types); } @@ -646,6 +649,14 @@ public static GeoShapeQueryBuilder geoShapeQuery(String name, ShapeBuilder shape return new GeoShapeQueryBuilder(name, shape); } + public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId) { + return new GeoShapeQueryBuilder(name, indexedShapeId); + } + + /** + * @deprecated Types are in the process of being removed, use {@link #geoShapeQuery(String, String)} instead. + */ + @Deprecated public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId, String indexedShapeType) { return new GeoShapeQueryBuilder(name, indexedShapeId, indexedShapeType); } @@ -662,6 +673,16 @@ public static GeoShapeQueryBuilder geoIntersectionQuery(String name, ShapeBuilde return builder; } + public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String indexedShapeId) { + GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId); + builder.relation(ShapeRelation.INTERSECTS); + return builder; + } + + /** + * @deprecated Types are in the process of being removed, use {@link #geoIntersectionQuery(String, String)} instead. + */ + @Deprecated public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String indexedShapeId, String indexedShapeType) { GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); builder.relation(ShapeRelation.INTERSECTS); @@ -680,6 +701,16 @@ public static GeoShapeQueryBuilder geoWithinQuery(String name, ShapeBuilder shap return builder; } + public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId) { + GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId); + builder.relation(ShapeRelation.WITHIN); + return builder; + } + + /** + * @deprecated Types are in the process of being removed, use {@link #geoWithinQuery(String, String)} instead. + */ + @Deprecated public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId, String indexedShapeType) { GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); builder.relation(ShapeRelation.WITHIN); @@ -698,6 +729,16 @@ public static GeoShapeQueryBuilder geoDisjointQuery(String name, ShapeBuilder sh return builder; } + public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId) { + GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId); + builder.relation(ShapeRelation.DISJOINT); + return builder; + } + + /** + * @deprecated Types are in the process of being removed, use {@link #geoDisjointQuery(String, String)} instead. + */ + @Deprecated public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId, String indexedShapeType) { GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); builder.relation(ShapeRelation.DISJOINT); diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index f7f1d29f53098..c398fde04a2f6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -75,8 +75,8 @@ public class QueryShardContext extends QueryRewriteContext { private static final DeprecationLogger deprecationLogger = new DeprecationLogger( LogManager.getLogger(QueryShardContext.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using the _type field " + - "in queries is deprecated, prefer to filter on a field instead."; + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using the _type field " + + "in queries and aggregations is deprecated, prefer to use a field instead."; private final ScriptService scriptService; private final IndexSettings indexSettings; @@ -88,7 +88,7 @@ public class QueryShardContext extends QueryRewriteContext { private final IndexReader reader; private final String clusterAlias; private String[] types = Strings.EMPTY_ARRAY; - private boolean cachable = true; + private boolean cacheable = true; private final SetOnce frozen = new SetOnce<>(); private final Index fullyQualifiedIndex; @@ -333,7 +333,7 @@ public final void freezeContext() { * class says a request can be cached. */ protected final void failIfFrozen() { - this.cachable = false; + this.cacheable = false; if (frozen.get() == Boolean.TRUE) { throw new IllegalArgumentException("features that prevent cachability are disabled on this context"); } else { @@ -354,10 +354,10 @@ public void executeAsyncActions(ActionListener listener) { } /** - * Returns true iff the result of the processed search request is cachable. Otherwise false + * Returns true iff the result of the processed search request is cacheable. Otherwise false */ - public final boolean isCachable() { - return cachable; + public final boolean isCacheable() { + return cacheable; } /** diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index a144cbf088a01..ae7bbae63018b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.util.BytesRef; @@ -34,6 +35,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -63,6 +65,11 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "terms"; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(TermsQueryBuilder.class)); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated " + + "in [terms] lookup queries."; + private final String fieldName; private final List values; private final TermsLookup termsLookup; @@ -211,6 +218,10 @@ public TermsLookup termsLookup() { return this.termsLookup; } + public boolean isTypeless() { + return termsLookup == null || termsLookup.type() == null; + } + private static final Set> INTEGER_TYPES = new HashSet<>( Arrays.asList(Byte.class, Short.class, Integer.class, Long.class)); private static final Set> STRING_TYPES = new HashSet<>( @@ -391,9 +402,16 @@ public static TermsQueryBuilder fromXContent(XContentParser parser) throws IOExc throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query requires a field name, " + "followed by array of terms or a document lookup specification"); } - return new TermsQueryBuilder(fieldName, values, termsLookup) - .boost(boost) - .queryName(queryName); + + TermsQueryBuilder builder = new TermsQueryBuilder(fieldName, values, termsLookup) + .boost(boost) + .queryName(queryName); + + if (builder.isTypeless() == false) { + deprecationLogger.deprecatedAndMaybeLog("terms_lookup_with_types", TYPES_DEPRECATION_MESSAGE); + } + + return builder; } static List parseValues(XContentParser parser) throws IOException { @@ -442,8 +460,10 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } private void fetch(TermsLookup termsLookup, Client client, ActionListener> actionListener) { - GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id()) - .preference("_local").routing(termsLookup.routing()); + GetRequest getRequest = termsLookup.type() == null + ? new GetRequest(termsLookup.index(), termsLookup.id()) + : new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id()); + getRequest.preference("_local").routing(termsLookup.routing()); client.get(getRequest, new ActionListener() { @Override public void onResponse(GetResponse getResponse) { diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index 52a1c89d4b3f5..cd93356bb3968 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.TaskId; @@ -292,9 +293,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject("source"); if (remoteInfo != null) { builder.field("remote", remoteInfo); + builder.rawField("query", remoteInfo.getQuery().streamInput(), builder.contentType()); } builder.array("index", getSearchRequest().indices()); - builder.array("type", getSearchRequest().types()); + String[] types = getSearchRequest().types(); + if (types.length > 0) { + builder.array("type", types); + } getSearchRequest().source().innerToXContent(builder, params); builder.endObject(); } @@ -302,7 +307,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws // build destination builder.startObject("dest"); builder.field("index", getDestination().index()); - if (getDestination().type() != null) { + String type = getDestination().type(); + if (type != null && type.equals(MapperService.SINGLE_MAPPING_NAME) == false) { builder.field("type", getDestination().type()); } if (getDestination().routing() != null) { diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java index e255b4db34e35..91b0879cfdebf 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java @@ -209,7 +209,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.field("socket_timeout", socketTimeout.getStringRep()); builder.field("connect_timeout", connectTimeout.getStringRep()); - builder.field("query", query); builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 84597d4d3383c..4974ef9277e9a 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -42,15 +42,14 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; @@ -507,7 +506,7 @@ private Query getPrefixQuerySingle(String field, String termStr) throws ParseExc } setAnalyzer(forceAnalyzer == null ? queryBuilder.context.getSearchAnalyzer(currentFieldType) : forceAnalyzer); Query query = null; - if (currentFieldType instanceof StringFieldType == false) { + if (currentFieldType.tokenized() == false) { query = currentFieldType.prefixQuery(termStr, getMultiTermRewriteMethod(), context); } else { query = getPossiblyAnalyzedPrefixQuery(currentFieldType.name(), termStr); @@ -525,7 +524,8 @@ private Query getPrefixQuerySingle(String field, String termStr) throws ParseExc private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr) throws ParseException { if (analyzeWildcard == false) { - return super.getPrefixQuery(field, termStr); + return currentFieldType.prefixQuery(getAnalyzer().normalize(field, termStr).utf8ToString(), + getMultiTermRewriteMethod(), context); } List > tlist; // get Analyzer from superclass and tokenize the term @@ -568,7 +568,7 @@ private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr) throw } if (tlist.size() == 1 && tlist.get(0).size() == 1) { - return super.getPrefixQuery(field, tlist.get(0).get(0)); + return currentFieldType.prefixQuery(tlist.get(0).get(0), getMultiTermRewriteMethod(), context); } // build a boolean query with prefix on the last position only. @@ -579,7 +579,7 @@ private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr) throw Query posQuery; if (plist.size() == 1) { if (isLastPos) { - posQuery = super.getPrefixQuery(field, plist.get(0)); + posQuery = currentFieldType.prefixQuery(plist.get(0), getMultiTermRewriteMethod(), context); } else { posQuery = newTermQuery(new Term(field, plist.get(0))); } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index b406621e978da..4298e5712bfc6 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -135,6 +136,12 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L */ private final LongConsumer onGlobalCheckpointUpdated; + /** + * A supplier of the current time. This supplier is used to add a timestamp to retention leases, and to determine retention lease + * expiration. + */ + private final LongSupplier currentTimeMillisSupplier; + /** * This set contains allocation IDs for which there is a thread actively waiting for the local checkpoint to advance to at least the * current global checkpoint. @@ -146,6 +153,38 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L */ volatile ReplicationGroup replicationGroup; + private final Map retentionLeases = new HashMap<>(); + + /** + * Get all non-expired retention leases tracker on this shard. An unmodifiable copy of the retention leases is returned. + * + * @return the retention leases + */ + public synchronized Collection getRetentionLeases() { + final long currentTimeMillis = currentTimeMillisSupplier.getAsLong(); + final long retentionLeaseMillis = indexSettings.getRetentionLeaseMillis(); + final Collection nonExpiredRetentionLeases = retentionLeases + .values() + .stream() + .filter(retentionLease -> currentTimeMillis - retentionLease.timestamp() <= retentionLeaseMillis) + .collect(Collectors.toList()); + retentionLeases.clear(); + retentionLeases.putAll(nonExpiredRetentionLeases.stream().collect(Collectors.toMap(RetentionLease::id, lease -> lease))); + return Collections.unmodifiableCollection(nonExpiredRetentionLeases); + } + + /** + * Adds a new or updates an existing retention lease. + * + * @param id the identifier of the retention lease + * @param retainingSequenceNumber the retaining sequence number + * @param source the source of the retention lease + */ + public synchronized void addOrUpdateRetentionLease(final String id, final long retainingSequenceNumber, final String source) { + assert primaryMode; + retentionLeases.put(id, new RetentionLease(id, retainingSequenceNumber, currentTimeMillisSupplier.getAsLong(), source)); + } + public static class CheckpointState implements Writeable { /** @@ -400,7 +439,8 @@ public ReplicationTracker( final String allocationId, final IndexSettings indexSettings, final long globalCheckpoint, - final LongConsumer onGlobalCheckpointUpdated) { + final LongConsumer onGlobalCheckpointUpdated, + final LongSupplier currentTimeMillisSupplier) { super(shardId, indexSettings); assert globalCheckpoint >= SequenceNumbers.UNASSIGNED_SEQ_NO : "illegal initial global checkpoint: " + globalCheckpoint; this.shardAllocationId = allocationId; @@ -410,6 +450,7 @@ public ReplicationTracker( this.checkpoints = new HashMap<>(1 + indexSettings.getNumberOfReplicas()); checkpoints.put(allocationId, new CheckpointState(SequenceNumbers.UNASSIGNED_SEQ_NO, globalCheckpoint, false, false)); this.onGlobalCheckpointUpdated = Objects.requireNonNull(onGlobalCheckpointUpdated); + this.currentTimeMillisSupplier = Objects.requireNonNull(currentTimeMillisSupplier); this.pendingInSync = new HashSet<>(); this.routingTable = null; this.replicationGroup = null; diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLease.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLease.java new file mode 100644 index 0000000000000..076b707a5df42 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLease.java @@ -0,0 +1,106 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.seqno; + +/** + * A "shard history retention lease" (or "retention lease" for short) is conceptually a marker containing a retaining sequence number such + * that all operations with sequence number at least that retaining sequence number will be retained during merge operations (which could + * otherwise merge away operations that have been soft deleted). Each retention lease contains a unique identifier, the retaining sequence + * number, the timestamp of when the lease was created or renewed, and the source of the retention lease (e.g., "ccr"). + */ +public final class RetentionLease { + + private final String id; + + /** + * The identifier for this retention lease. This identifier should be unique per lease and is set during construction by the caller. + * + * @return the identifier + */ + public String id() { + return id; + } + + private final long retainingSequenceNumber; + + /** + * The retaining sequence number of this retention lease. The retaining sequence number is the minimum sequence number that this + * retention lease wants to retain during merge operations. The retaining sequence number is set during construction by the caller. + * + * @return the retaining sequence number + */ + public long retainingSequenceNumber() { + return retainingSequenceNumber; + } + + private final long timestamp; + + /** + * The timestamp of when this retention lease was created or renewed. + * + * @return the timestamp used as a basis for determining lease expiration + */ + public long timestamp() { + return timestamp; + } + + private final String source; + + /** + * The source of this retention lease. The source is set during construction by the caller. + * + * @return the source + */ + public String source() { + return source; + } + + /** + * Constructs a new retention lease. + * + * @param id the identifier of the retention lease + * @param retainingSequenceNumber the retaining sequence number + * @param timestamp the timestamp of when the retention lease was created or renewed + * @param source the source of the retention lease + */ + public RetentionLease(final String id, final long retainingSequenceNumber, final long timestamp, final String source) { + if (retainingSequenceNumber < SequenceNumbers.UNASSIGNED_SEQ_NO) { + throw new IllegalArgumentException("retention lease retaining sequence number [" + retainingSequenceNumber + "] out of range"); + } + if (timestamp < 0) { + throw new IllegalArgumentException("retention lease timestamp [" + timestamp + "] out of range"); + } + this.id = id; + this.retainingSequenceNumber = retainingSequenceNumber; + this.timestamp = timestamp; + this.source = source; + } + + @Override + public String toString() { + return "RetentionLease{" + + "id='" + id + '\'' + + ", retainingSequenceNumber=" + retainingSequenceNumber + + ", timestamp=" + timestamp + + ", source='" + source + '\'' + + '}'; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 37c7ace855cf2..66b8e607b5c76 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -161,7 +161,6 @@ import java.util.stream.Collectors; import java.util.stream.StreamSupport; -import static org.elasticsearch.index.mapper.SourceToParse.source; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; public class IndexShard extends AbstractIndexShardComponent implements IndicesClusterStateService.Shard { @@ -306,7 +305,13 @@ public IndexShard( this.globalCheckpointListeners = new GlobalCheckpointListeners(shardId, threadPool.executor(ThreadPool.Names.LISTENER), threadPool.scheduler(), logger); this.replicationTracker = - new ReplicationTracker(shardId, aId, indexSettings, UNASSIGNED_SEQ_NO, globalCheckpointListeners::globalCheckpointUpdated); + new ReplicationTracker( + shardId, + aId, + indexSettings, + UNASSIGNED_SEQ_NO, + globalCheckpointListeners::globalCheckpointUpdated, + threadPool::absoluteTimeInMillis); // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis @@ -608,8 +613,10 @@ public IndexShardState markAsRecovering(String reason, RecoveryState recoverySta public void relocated(final Consumer consumer) throws IllegalIndexShardStateException, InterruptedException { assert shardRouting.primary() : "only primaries can be marked as relocated: " + shardRouting; + final Releasable forceRefreshes = refreshListeners.forceRefreshes(); try { indexShardOperationPermits.blockOperations(30, TimeUnit.MINUTES, () -> { + forceRefreshes.close(); // no shard operation permits are being held here, move state from started to relocated assert indexShardOperationPermits.getActiveOperationsCount() == 0 : "in-flight operations in progress while moving shard state to relocated"; @@ -640,6 +647,8 @@ public void relocated(final Consumer consumer // Fail primary relocation source and target shards. failShard("timed out waiting for relocation hand-off to complete", null); throw new IndexShardClosedException(shardId(), "timed out waiting for relocation hand-off to complete"); + } finally { + forceRefreshes.close(); } } @@ -715,9 +724,8 @@ private Engine.IndexResult applyIndexOperation(Engine engine, long seqNo, long o if (resolvedType.equals(sourceToParse.type())) { sourceWithResolvedType = sourceToParse; } else { - sourceWithResolvedType = SourceToParse.source(sourceToParse.index(), resolvedType, sourceToParse.id(), - sourceToParse.source(), sourceToParse.getXContentType()); - sourceWithResolvedType.routing(sourceToParse.routing()); + sourceWithResolvedType = new SourceToParse(sourceToParse.index(), resolvedType, sourceToParse.id(), + sourceToParse.source(), sourceToParse.getXContentType(), sourceToParse.routing()); } operation = prepareIndex(docMapper(resolvedType), indexSettings.getIndexVersionCreated(), sourceWithResolvedType, seqNo, opPrimaryTerm, version, versionType, origin, autoGeneratedTimeStamp, isRetry, ifSeqNo, ifPrimaryTerm); @@ -1300,8 +1308,8 @@ private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation o // autoGeneratedID docs that are coming from the primary are updated correctly. result = applyIndexOperation(engine, index.seqNo(), index.primaryTerm(), index.version(), versionType, UNASSIGNED_SEQ_NO, 0, index.getAutoGeneratedIdTimestamp(), true, origin, - source(shardId.getIndexName(), index.type(), index.id(), index.source(), - XContentHelper.xContentType(index.source())).routing(index.routing())); + new SourceToParse(shardId.getIndexName(), index.type(), index.id(), index.source(), + XContentHelper.xContentType(index.source()), index.routing())); break; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; @@ -1862,6 +1870,20 @@ public void addGlobalCheckpointListener( this.globalCheckpointListeners.add(waitingForGlobalCheckpoint, listener, timeout); } + + /** + * Adds a new or updates an existing retention lease. + * + * @param id the identifier of the retention lease + * @param retainingSequenceNumber the retaining sequence number + * @param source the source of the retention lease + */ + void addOrUpdateRetentionLease(final String id, final long retainingSequenceNumber, final String source) { + assert assertPrimaryMode(); + verifyNotClosed(); + replicationTracker.addOrUpdateRetentionLease(id, retainingSequenceNumber, source); + } + /** * Waits for all operations up to the provided sequence number to complete. * @@ -2308,13 +2330,14 @@ private DocumentMapperForType docMapper(String type) { private EngineConfig newEngineConfig() { Sort indexSort = indexSortSupplier.get(); return new EngineConfig(shardId, shardRouting.allocationId().getId(), - threadPool, indexSettings, warmer, store, indexSettings.getMergePolicy(), - mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, - indexCache.query(), cachingPolicy, translogConfig, - IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), - Collections.singletonList(refreshListeners), - Collections.singletonList(new RefreshMetricUpdater(refreshMetric)), - indexSort, circuitBreakerService, replicationTracker, () -> operationPrimaryTerm, tombstoneDocSupplier()); + threadPool, indexSettings, warmer, store, indexSettings.getMergePolicy(), + mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, + indexCache.query(), cachingPolicy, translogConfig, + IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), + Collections.singletonList(refreshListeners), + Collections.singletonList(new RefreshMetricUpdater(refreshMetric)), + indexSort, circuitBreakerService, replicationTracker, replicationTracker::getRetentionLeases, + () -> operationPrimaryTerm, tombstoneDocSupplier()); } /** @@ -2341,7 +2364,24 @@ public void acquireAllPrimaryOperationsPermits(final ActionListener verifyNotClosed(); assert shardRouting.primary() : "acquireAllPrimaryOperationsPermits should only be called on primary shard: " + shardRouting; - indexShardOperationPermits.asyncBlockOperations(onPermitAcquired, timeout.duration(), timeout.timeUnit()); + asyncBlockOperations(onPermitAcquired, timeout.duration(), timeout.timeUnit()); + } + + private void asyncBlockOperations(ActionListener onPermitAcquired, long timeout, TimeUnit timeUnit) { + final Releasable forceRefreshes = refreshListeners.forceRefreshes(); + final ActionListener wrappedListener = ActionListener.wrap(r -> { + forceRefreshes.close(); + onPermitAcquired.onResponse(r); + }, e -> { + forceRefreshes.close(); + onPermitAcquired.onFailure(e); + }); + try { + indexShardOperationPermits.asyncBlockOperations(wrappedListener, timeout, timeUnit); + } catch (Exception e) { + forceRefreshes.close(); + throw e; + } } private void bumpPrimaryTerm(final long newPrimaryTerm, @@ -2351,7 +2391,7 @@ private void bumpPrimaryTerm(final long newPrimaryTerm, assert newPrimaryTerm > pendingPrimaryTerm || (newPrimaryTerm >= pendingPrimaryTerm && combineWithAction != null); assert operationPrimaryTerm <= pendingPrimaryTerm; final CountDownLatch termUpdated = new CountDownLatch(1); - indexShardOperationPermits.asyncBlockOperations(new ActionListener() { + asyncBlockOperations(new ActionListener() { @Override public void onFailure(final Exception e) { try { @@ -2444,8 +2484,10 @@ public void acquireAllReplicaOperationsPermits(final long opPrimaryTerm, final long maxSeqNoOfUpdatesOrDeletes, final ActionListener onPermitAcquired, final TimeValue timeout) { - innerAcquireReplicaOperationPermit(opPrimaryTerm, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, onPermitAcquired, true, - (listener) -> indexShardOperationPermits.asyncBlockOperations(listener, timeout.duration(), timeout.timeUnit())); + innerAcquireReplicaOperationPermit(opPrimaryTerm, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, + onPermitAcquired, true, + listener -> asyncBlockOperations(listener, timeout.duration(), timeout.timeUnit()) + ); } private void innerAcquireReplicaOperationPermit(final long opPrimaryTerm, diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java index d5d0d7f3e9753..fe7a5392a080d 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java @@ -295,7 +295,7 @@ private Releasable acquire(Object debugInfo, StackTraceElement[] stackTrace) thr /** * Obtain the active operation count, or zero if all permits are held (even if there are outstanding operations in flight). * - * @return the active operation count, or zero when all permits ar eheld + * @return the active operation count, or zero when all permits are held */ int getActiveOperationsCount() { int availablePermits = semaphore.availablePermits(); diff --git a/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java b/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java index d8a51d58ad956..b4b9e13f7e063 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java @@ -22,6 +22,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.ReferenceManager; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.translog.Translog; @@ -53,6 +55,13 @@ public final class RefreshListeners implements ReferenceManager.RefreshListener, * Is this closed? If true then we won't add more listeners and have flushed all pending listeners. */ private volatile boolean closed = false; + + /** + * Force-refreshes new refresh listeners that are added while {@code >= 0}. Used to prevent becoming blocked on operations waiting for + * refresh during relocation. + */ + private int refreshForcers; + /** * List of refresh listeners. Defaults to null and built on demand because most refresh cycles won't need it. Entries are never removed * from it, rather, it is nulled and rebuilt when needed again. The (hopefully) rare entries that didn't make the current refresh cycle @@ -75,6 +84,32 @@ public RefreshListeners(IntSupplier getMaxRefreshListeners, Runnable forceRefres this.threadContext = threadContext; } + /** + * Force-refreshes newly added listeners and forces a refresh if there are currently listeners registered. See {@link #refreshForcers}. + */ + public Releasable forceRefreshes() { + synchronized (this) { + assert refreshForcers >= 0; + refreshForcers += 1; + } + final RunOnce runOnce = new RunOnce(() -> { + synchronized (RefreshListeners.this) { + assert refreshForcers > 0; + refreshForcers -= 1; + } + }); + if (refreshNeeded()) { + try { + forceRefresh.run(); + } catch (Exception e) { + runOnce.run(); + throw e; + } + } + assert refreshListeners == null; + return () -> runOnce.run(); + } + /** * Add a listener for refreshes, calling it immediately if the location is already visible. If this runs out of listener slots then it * forces a refresh and calls the listener immediately as well. @@ -102,7 +137,7 @@ public boolean addOrNotify(Translog.Location location, Consumer listene listeners = new ArrayList<>(); refreshListeners = listeners; } - if (listeners.size() < getMaxRefreshListeners.getAsInt()) { + if (refreshForcers == 0 && listeners.size() < getMaxRefreshListeners.getAsInt()) { ThreadContext.StoredContext storedContext = threadContext.newStoredContext(true); Consumer contextPreservingListener = forced -> { try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index f95cdb3a9f692..ab93d1759391a 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -20,7 +20,10 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.FileSwitchDirectory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.LockFactory; import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.store.NIOFSDirectory; @@ -30,6 +33,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardPath; @@ -41,7 +45,6 @@ import java.util.Set; public class FsDirectoryService extends DirectoryService { - protected final IndexStore indexStore; public static final Setting INDEX_LOCK_FACTOR_SETTING = new Setting<>("index.store.fs.fs_lock", "native", (s) -> { switch (s) { @@ -78,27 +81,30 @@ public Directory newDirectory() throws IOException { protected Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { final String storeType = indexSettings.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.FS.getSettingsKey()); + IndexModule.Type type; if (IndexModule.Type.FS.match(storeType)) { - final IndexModule.Type type = - IndexModule.defaultStoreType(IndexModule.NODE_STORE_ALLOW_MMAPFS.get(indexSettings.getNodeSettings())); - switch (type) { - case MMAPFS: - return new MMapDirectory(location, lockFactory); - case SIMPLEFS: - return new SimpleFSDirectory(location, lockFactory); - case NIOFS: - return new NIOFSDirectory(location, lockFactory); - default: - throw new AssertionError("unexpected built-in store type [" + type + "]"); - } - } else if (IndexModule.Type.SIMPLEFS.match(storeType)) { - return new SimpleFSDirectory(location, lockFactory); - } else if (IndexModule.Type.NIOFS.match(storeType)) { - return new NIOFSDirectory(location, lockFactory); - } else if (IndexModule.Type.MMAPFS.match(storeType)) { - return new MMapDirectory(location, lockFactory); + type = IndexModule.defaultStoreType(IndexModule.NODE_STORE_ALLOW_MMAP.get(indexSettings.getNodeSettings())); + } else { + type = IndexModule.Type.fromSettingsKey(storeType); + } + switch (type) { + case HYBRIDFS: + // Use Lucene defaults + final FSDirectory primaryDirectory = FSDirectory.open(location, lockFactory); + if (primaryDirectory instanceof MMapDirectory) { + return new HybridDirectory(location, lockFactory, primaryDirectory); + } else { + return primaryDirectory; + } + case MMAPFS: + return new MMapDirectory(location, lockFactory); + case SIMPLEFS: + return new SimpleFSDirectory(location, lockFactory); + case NIOFS: + return new NIOFSDirectory(location, lockFactory); + default: + throw new AssertionError("unexpected built-in store type [" + type + "]"); } - throw new IllegalArgumentException("No directory found for type [" + storeType + "]"); } private static Directory setPreload(Directory directory, Path location, LockFactory lockFactory, @@ -122,4 +128,44 @@ public String[] listAll() throws IOException { } return directory; } + + static final class HybridDirectory extends NIOFSDirectory { + private final FSDirectory randomAccessDirectory; + + HybridDirectory(Path location, LockFactory lockFactory, FSDirectory randomAccessDirectory) throws IOException { + super(location, lockFactory); + this.randomAccessDirectory = randomAccessDirectory; + } + + @Override + public IndexInput openInput(String name, IOContext context) throws IOException { + String extension = FileSwitchDirectory.getExtension(name); + switch(extension) { + // We are mmapping norms, docvalues as well as term dictionaries, all other files are served through NIOFS + // this provides good random access performance and does not lead to page cache thrashing. + case "nvd": + case "dvd": + case "tim": + // we need to do these checks on the outer directory since the inner doesn't know about pending deletes + ensureOpen(); + ensureCanRead(name); + // we only use the mmap to open inputs. Everything else is managed by the NIOFSDirectory otherwise + // we might run into trouble with files that are pendingDelete in one directory but still + // listed in listAll() from the other. We on the other hand don't want to list files from both dirs + // and intersect for perf reasons. + return randomAccessDirectory.openInput(name, context); + default: + return super.openInput(name, context); + } + } + + @Override + public void close() throws IOException { + IOUtils.close(super::close, randomAccessDirectory); + } + + Directory getRandomAccessDirectory() { + return randomAccessDirectory; + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 68f175f7ed6ae..a05870b842f2d 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -50,6 +50,7 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.shard.IndexShard; @@ -69,8 +70,6 @@ import java.util.concurrent.TimeUnit; import java.util.function.LongSupplier; -import static org.elasticsearch.index.mapper.SourceToParse.source; - public class TermVectorsService { @@ -302,8 +301,8 @@ private static Fields generateTermVectors(IndexShard indexShard, private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException { // parse the document, at the moment we do update the mapping, just like percolate - ParsedDocument parsedDocument = - parseDocument(indexShard, indexShard.shardId().getIndexName(), request.type(), request.doc(), request.xContentType()); + ParsedDocument parsedDocument = parseDocument(indexShard, indexShard.shardId().getIndexName(), request.type(), request.doc(), + request.xContentType(), request.routing()); // select the right fields and generate term vectors ParseContext.Document doc = parsedDocument.rootDoc(); @@ -332,10 +331,11 @@ private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVect } private static ParsedDocument parseDocument(IndexShard indexShard, String index, String type, BytesReference doc, - XContentType xContentType) { + XContentType xContentType, String routing) { MapperService mapperService = indexShard.mapperService(); DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type); - ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse(source(index, type, "_id_for_tv_api", doc, xContentType)); + ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse( + new SourceToParse(index, type, "_id_for_tv_api", doc, xContentType, routing)); if (docMapper.getMapping() != null) { parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping()); } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 19388a2b63d4d..f0294535fa751 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -159,20 +159,6 @@ public class IndicesService extends AbstractLifecycleComponent public static final String INDICES_SHARDS_CLOSED_TIMEOUT = "indices.shards_closed_timeout"; public static final Setting INDICES_CACHE_CLEAN_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), Property.NodeScope); - private static final boolean ENFORCE_MAX_SHARDS_PER_NODE; - - static { - final String ENFORCE_SHARD_LIMIT_KEY = "es.enforce_max_shards_per_node"; - final String enforceMaxShardsPerNode = System.getProperty(ENFORCE_SHARD_LIMIT_KEY); - if (enforceMaxShardsPerNode == null) { - ENFORCE_MAX_SHARDS_PER_NODE = false; - } else if ("true".equals(enforceMaxShardsPerNode)) { - ENFORCE_MAX_SHARDS_PER_NODE = true; - } else { - throw new IllegalArgumentException(ENFORCE_SHARD_LIMIT_KEY + " may only be unset or set to [true] but was [" + - enforceMaxShardsPerNode + "]"); - } - } /** * The node's settings. @@ -1212,7 +1198,7 @@ public boolean canCache(ShardSearchRequest request, SearchContext context) { // if now in millis is used (or in the future, a more generic "isDeterministic" flag // then we can't cache based on "now" key within the search request, as it is not deterministic - if (context.getQueryShardContext().isCachable() == false) { + if (context.getQueryShardContext().isCacheable() == false) { return false; } return true; diff --git a/server/src/main/java/org/elasticsearch/indices/TermsLookup.java b/server/src/main/java/org/elasticsearch/indices/TermsLookup.java index c1acce072b166..077116e2fd54e 100644 --- a/server/src/main/java/org/elasticsearch/indices/TermsLookup.java +++ b/server/src/main/java/org/elasticsearch/indices/TermsLookup.java @@ -20,11 +20,11 @@ package org.elasticsearch.indices; import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -38,18 +38,24 @@ */ public class TermsLookup implements Writeable, ToXContentFragment { private final String index; - private final String type; + private @Nullable String type; private final String id; private final String path; private String routing; + + public TermsLookup(String index, String id, String path) { + this(index, null, id, path); + } + + /** + * @deprecated Types are in the process of being removed, use {@link TermsLookup(String, String, String)} instead. + */ + @Deprecated public TermsLookup(String index, String type, String id, String path) { if (id == null) { throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the id."); } - if (type == null) { - throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the type."); - } if (path == null) { throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the path."); } @@ -66,7 +72,12 @@ public TermsLookup(String index, String type, String id, String path) { * Read from a stream. */ public TermsLookup(StreamInput in) throws IOException { - type = in.readString(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + type = in.readOptionalString(); + } else { + // Before 7.0, the type parameter was always non-null and serialized as a (non-optional) string. + type = in.readString(); + } id = in.readString(); path = in.readString(); if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) { @@ -82,7 +93,16 @@ public TermsLookup(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(type); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeOptionalString(type); + } else { + if (type == null) { + throw new IllegalArgumentException("Typeless [terms] lookup queries are not supported if any " + + "node is running a version before 7.0."); + + } + out.writeString(type); + } out.writeString(id); out.writeString(path); if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) { @@ -97,6 +117,10 @@ public String index() { return index; } + /** + * @deprecated Types are in the process of being removed. + */ + @Deprecated public String type() { return type; } @@ -155,18 +179,28 @@ public static TermsLookup parseTermsLookup(XContentParser parser) throws IOExcep + token + "] after [" + currentFieldName + "]"); } } - return new TermsLookup(index, type, id, path).routing(routing); + if (type == null) { + return new TermsLookup(index, id, path).routing(routing); + } else { + return new TermsLookup(index, type, id, path).routing(routing); + } } @Override public String toString() { - return index + "/" + type + "/" + id + "/" + path; + if (type == null) { + return index + "/" + id + "/" + path; + } else { + return index + "/" + type + "/" + id + "/" + path; + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("index", index); - builder.field("type", type); + if (type != null) { + builder.field("type", type); + } builder.field("id", id); builder.field("path", path); if (routing != null) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index cdb4082b82e70..8e52a05e2ac30 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -59,17 +59,18 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; -import org.elasticsearch.transport.FutureTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.List; import java.util.StringJoiner; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; @@ -142,6 +143,8 @@ public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexSh public void startRecovery(final IndexShard indexShard, final DiscoveryNode sourceNode, final RecoveryListener listener) { // create a new recovery status, and process... final long recoveryId = onGoingRecoveries.startRecovery(indexShard, sourceNode, listener, recoverySettings.activityTimeout()); + // we fork off quickly here and go async but this is called from the cluster state applier thread too and that can cause + // assertions to trip if we executed it on the same thread hence we fork off to the generic threadpool. threadPool.generic().execute(new RecoveryRunner(recoveryId)); } @@ -165,17 +168,16 @@ private void retryRecovery(final long recoveryId, final TimeValue retryAfter, fi private void doRecovery(final long recoveryId) { final StartRecoveryRequest request; - final CancellableThreads cancellableThreads; final RecoveryState.Timer timer; - + CancellableThreads cancellableThreads; try (RecoveryRef recoveryRef = onGoingRecoveries.getRecovery(recoveryId)) { if (recoveryRef == null) { logger.trace("not running recovery with id [{}] - can not find it (probably finished)", recoveryId); return; } final RecoveryTarget recoveryTarget = recoveryRef.target(); - cancellableThreads = recoveryTarget.cancellableThreads(); timer = recoveryTarget.state().getTimer(); + cancellableThreads = recoveryTarget.cancellableThreads(); try { assert recoveryTarget.sourceNode() != null : "can not do a recovery without a source node"; request = getStartRecoveryRequest(recoveryTarget); @@ -189,51 +191,11 @@ private void doRecovery(final long recoveryId) { return; } } - - try { - logger.trace("{} starting recovery from {}", request.shardId(), request.sourceNode()); - final AtomicReference responseHolder = new AtomicReference<>(); - cancellableThreads.execute(() -> responseHolder.set( - transportService.submitRequest(request.sourceNode(), PeerRecoverySourceService.Actions.START_RECOVERY, request, - new FutureTransportResponseHandler() { - @Override - public RecoveryResponse read(StreamInput in) throws IOException { - RecoveryResponse recoveryResponse = new RecoveryResponse(); - recoveryResponse.readFrom(in); - return recoveryResponse; - } - }).txGet())); - final RecoveryResponse recoveryResponse = responseHolder.get(); - final TimeValue recoveryTime = new TimeValue(timer.time()); - // do this through ongoing recoveries to remove it from the collection - onGoingRecoveries.markRecoveryAsDone(recoveryId); - if (logger.isTraceEnabled()) { - StringBuilder sb = new StringBuilder(); - sb.append('[').append(request.shardId().getIndex().getName()).append(']').append('[').append(request.shardId().id()) - .append("] "); - sb.append("recovery completed from ").append(request.sourceNode()).append(", took[").append(recoveryTime).append("]\n"); - sb.append(" phase1: recovered_files [").append(recoveryResponse.phase1FileNames.size()).append("]").append(" with " + - "total_size of [").append(new ByteSizeValue(recoveryResponse.phase1TotalSize)).append("]") - .append(", took [").append(timeValueMillis(recoveryResponse.phase1Time)).append("], throttling_wait [").append - (timeValueMillis(recoveryResponse.phase1ThrottlingWaitTime)).append(']') - .append("\n"); - sb.append(" : reusing_files [").append(recoveryResponse.phase1ExistingFileNames.size()).append("] with " + - "total_size of [").append(new ByteSizeValue(recoveryResponse.phase1ExistingTotalSize)).append("]\n"); - sb.append(" phase2: start took [").append(timeValueMillis(recoveryResponse.startTime)).append("]\n"); - sb.append(" : recovered [").append(recoveryResponse.phase2Operations).append("]").append(" transaction log " + - "operations") - .append(", took [").append(timeValueMillis(recoveryResponse.phase2Time)).append("]") - .append("\n"); - logger.trace("{}", sb); - } else { - logger.debug("{} recovery done from [{}], took [{}]", request.shardId(), request.sourceNode(), recoveryTime); - } - } catch (CancellableThreads.ExecutionCancelledException e) { - logger.trace("recovery cancelled", e); - } catch (Exception e) { + Consumer handleException = e -> { if (logger.isTraceEnabled()) { logger.trace(() -> new ParameterizedMessage( - "[{}][{}] Got exception on recovery", request.shardId().getIndex().getName(), request.shardId().id()), e); + "[{}][{}] Got exception on recovery", request.shardId().getIndex().getName(), + request.shardId().id()), e); } Throwable cause = ExceptionsHelper.unwrapCause(e); if (cause instanceof CancellableThreads.ExecutionCancelledException) { @@ -267,14 +229,16 @@ public RecoveryResponse read(StreamInput in) throws IOException { } if (cause instanceof DelayRecoveryException) { - retryRecovery(recoveryId, cause, recoverySettings.retryDelayStateSync(), recoverySettings.activityTimeout()); + retryRecovery(recoveryId, cause, recoverySettings.retryDelayStateSync(), + recoverySettings.activityTimeout()); return; } if (cause instanceof ConnectTransportException) { logger.debug("delaying recovery of {} for [{}] due to networking error [{}]", request.shardId(), recoverySettings.retryDelayNetwork(), cause.getMessage()); - retryRecovery(recoveryId, cause.getMessage(), recoverySettings.retryDelayNetwork(), recoverySettings.activityTimeout()); + retryRecovery(recoveryId, cause.getMessage(), recoverySettings.retryDelayNetwork(), + recoverySettings.activityTimeout()); return; } @@ -285,6 +249,69 @@ public RecoveryResponse read(StreamInput in) throws IOException { } onGoingRecoveries.failRecovery(recoveryId, new RecoveryFailedException(request, e), true); + }; + + try { + logger.trace("{} starting recovery from {}", request.shardId(), request.sourceNode()); + cancellableThreads.executeIO(() -> + // we still execute under cancelableThreads here to ensure we interrupt any blocking call to the network if any + // on the underlying transport. It's unclear if we need this here at all after moving to async execution but + // the issues that a missing call to this could cause are sneaky and hard to debug. If we don't need it on this + // call we can potentially remove it altogether which we should do it in a major release only with enough + // time to test. This shoudl be done for 7.0 if possible + transportService.submitRequest(request.sourceNode(), PeerRecoverySourceService.Actions.START_RECOVERY, request, + new TransportResponseHandler() { + @Override + public void handleResponse(RecoveryResponse recoveryResponse) { + final TimeValue recoveryTime = new TimeValue(timer.time()); + // do this through ongoing recoveries to remove it from the collection + onGoingRecoveries.markRecoveryAsDone(recoveryId); + if (logger.isTraceEnabled()) { + StringBuilder sb = new StringBuilder(); + sb.append('[').append(request.shardId().getIndex().getName()).append(']') + .append('[').append(request.shardId().id()).append("] "); + sb.append("recovery completed from ").append(request.sourceNode()).append(", took[").append(recoveryTime) + .append("]\n"); + sb.append(" phase1: recovered_files [").append(recoveryResponse.phase1FileNames.size()).append("]") + .append(" with total_size of [").append(new ByteSizeValue(recoveryResponse.phase1TotalSize)).append("]") + .append(", took [").append(timeValueMillis(recoveryResponse.phase1Time)).append("], throttling_wait [") + .append(timeValueMillis(recoveryResponse.phase1ThrottlingWaitTime)).append(']').append("\n"); + sb.append(" : reusing_files [").append(recoveryResponse.phase1ExistingFileNames.size()) + .append("] with total_size of [").append(new ByteSizeValue(recoveryResponse.phase1ExistingTotalSize)) + .append("]\n"); + sb.append(" phase2: start took [").append(timeValueMillis(recoveryResponse.startTime)).append("]\n"); + sb.append(" : recovered [").append(recoveryResponse.phase2Operations).append("]") + .append(" transaction log operations") + .append(", took [").append(timeValueMillis(recoveryResponse.phase2Time)).append("]") + .append("\n"); + logger.trace("{}", sb); + } else { + logger.debug("{} recovery done from [{}], took [{}]", request.shardId(), request.sourceNode(), + recoveryTime); + } + } + + @Override + public void handleException(TransportException e) { + handleException.accept(e); + } + + @Override + public String executor() { + // we do some heavy work like refreshes in the response so fork off to the generic threadpool + return ThreadPool.Names.GENERIC; + } + + @Override + public RecoveryResponse read(StreamInput in) throws IOException { + return new RecoveryResponse(in); + } + }) + ); + } catch (CancellableThreads.ExecutionCancelledException e) { + logger.trace("recovery cancelled", e); + } catch (Exception e) { + handleException.accept(e); } } @@ -632,5 +659,4 @@ public void doRun() { doRecovery(recoveryId); } } - } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryResponse.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryResponse.java index 9018f6f0be199..02d4ff5dbc13b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryResponse.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryResponse.java @@ -24,53 +24,46 @@ import org.elasticsearch.transport.TransportResponse; import java.io.IOException; -import java.util.ArrayList; import java.util.List; -class RecoveryResponse extends TransportResponse { +final class RecoveryResponse extends TransportResponse { - List phase1FileNames = new ArrayList<>(); - List phase1FileSizes = new ArrayList<>(); - List phase1ExistingFileNames = new ArrayList<>(); - List phase1ExistingFileSizes = new ArrayList<>(); - long phase1TotalSize; - long phase1ExistingTotalSize; - long phase1Time; - long phase1ThrottlingWaitTime; + final List phase1FileNames; + final List phase1FileSizes; + final List phase1ExistingFileNames; + final List phase1ExistingFileSizes; + final long phase1TotalSize; + final long phase1ExistingTotalSize; + final long phase1Time; + final long phase1ThrottlingWaitTime; - long startTime; + final long startTime; - int phase2Operations; - long phase2Time; + final int phase2Operations; + final long phase2Time; - RecoveryResponse() { + RecoveryResponse(List phase1FileNames, List phase1FileSizes, List phase1ExistingFileNames, + List phase1ExistingFileSizes, long phase1TotalSize, long phase1ExistingTotalSize, + long phase1Time, long phase1ThrottlingWaitTime, long startTime, int phase2Operations, long phase2Time) { + this.phase1FileNames = phase1FileNames; + this.phase1FileSizes = phase1FileSizes; + this.phase1ExistingFileNames = phase1ExistingFileNames; + this.phase1ExistingFileSizes = phase1ExistingFileSizes; + this.phase1TotalSize = phase1TotalSize; + this.phase1ExistingTotalSize = phase1ExistingTotalSize; + this.phase1Time = phase1Time; + this.phase1ThrottlingWaitTime = phase1ThrottlingWaitTime; + this.startTime = startTime; + this.phase2Operations = phase2Operations; + this.phase2Time = phase2Time; } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - int size = in.readVInt(); - phase1FileNames = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - phase1FileNames.add(in.readString()); - } - size = in.readVInt(); - phase1FileSizes = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - phase1FileSizes.add(in.readVLong()); - } - - size = in.readVInt(); - phase1ExistingFileNames = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - phase1ExistingFileNames.add(in.readString()); - } - size = in.readVInt(); - phase1ExistingFileSizes = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - phase1ExistingFileSizes.add(in.readVLong()); - } - + RecoveryResponse(StreamInput in) throws IOException { + super(in); + phase1FileNames = in.readList(StreamInput::readString); + phase1FileSizes = in.readList(StreamInput::readVLong); + phase1ExistingFileNames = in.readList(StreamInput::readString); + phase1ExistingFileSizes = in.readList(StreamInput::readVLong); phase1TotalSize = in.readVLong(); phase1ExistingTotalSize = in.readVLong(); phase1Time = in.readVLong(); @@ -83,24 +76,10 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeVInt(phase1FileNames.size()); - for (String name : phase1FileNames) { - out.writeString(name); - } - out.writeVInt(phase1FileSizes.size()); - for (long size : phase1FileSizes) { - out.writeVLong(size); - } - - out.writeVInt(phase1ExistingFileNames.size()); - for (String name : phase1ExistingFileNames) { - out.writeString(name); - } - out.writeVInt(phase1ExistingFileSizes.size()); - for (long size : phase1ExistingFileSizes) { - out.writeVLong(size); - } - + out.writeStringList(phase1FileNames); + out.writeCollection(phase1FileSizes, StreamOutput::writeVLong); + out.writeStringList(phase1ExistingFileNames); + out.writeCollection(phase1ExistingFileSizes, StreamOutput::writeVLong); out.writeVLong(phase1TotalSize); out.writeVLong(phase1ExistingTotalSize); out.writeVLong(phase1Time); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 46f98275740ae..315af6b4ae084 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.core.internal.io.IOUtils; @@ -64,6 +65,7 @@ import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Locale; @@ -95,8 +97,6 @@ public class RecoverySourceHandler { private final int chunkSizeInBytes; private final RecoveryTargetHandler recoveryTarget; - protected final RecoveryResponse response; - private final CancellableThreads cancellableThreads = new CancellableThreads() { @Override protected void onCancel(String reason, @Nullable Exception suppressedException) { @@ -122,7 +122,6 @@ public RecoverySourceHandler(final IndexShard shard, RecoveryTargetHandler recov this.shardId = this.request.shardId().id(); this.logger = Loggers.getLogger(getClass(), request.shardId(), "recover to " + request.targetNode().getName()); this.chunkSizeInBytes = fileChunkSizeInBytes; - this.response = new RecoveryResponse(); } public StartRecoveryRequest getRequest() { @@ -149,10 +148,12 @@ public RecoveryResponse recoverToTarget() throws IOException { final long requiredSeqNoRangeStart; final boolean isSequenceNumberBasedRecovery = request.startingSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO && isTargetSameHistory() && shard.hasCompleteHistoryOperations("peer-recovery", request.startingSeqNo()); + final SendFileResult sendFileResult; if (isSequenceNumberBasedRecovery) { logger.trace("performing sequence numbers based recovery. starting at [{}]", request.startingSeqNo()); startingSeqNo = request.startingSeqNo(); requiredSeqNoRangeStart = startingSeqNo; + sendFileResult = SendFileResult.EMPTY; } else { final Engine.IndexCommitRef phase1Snapshot; try { @@ -169,7 +170,7 @@ public RecoveryResponse recoverToTarget() throws IOException { startingSeqNo = shard.indexSettings().isSoftDeleteEnabled() ? requiredSeqNoRangeStart : 0; try { final int estimateNumOps = shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo); - phase1(phase1Snapshot.getIndexCommit(), () -> estimateNumOps); + sendFileResult = phase1(phase1Snapshot.getIndexCommit(), () -> estimateNumOps); } catch (final Exception e) { throw new RecoveryEngineException(shard.shardId(), 1, "phase1 failed", e); } finally { @@ -184,9 +185,10 @@ public RecoveryResponse recoverToTarget() throws IOException { assert requiredSeqNoRangeStart >= startingSeqNo : "requiredSeqNoRangeStart [" + requiredSeqNoRangeStart + "] is lower than [" + startingSeqNo + "]"; + final TimeValue prepareEngineTime; try { // For a sequence based recovery, the target can keep its local translog - prepareTargetForTranslog(isSequenceNumberBasedRecovery == false, + prepareEngineTime = prepareTargetForTranslog(isSequenceNumberBasedRecovery == false, shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo)); } catch (final Exception e) { throw new RecoveryEngineException(shard.shardId(), 1, "prepare target for translog failed", e); @@ -213,21 +215,25 @@ public RecoveryResponse recoverToTarget() throws IOException { logger.trace("snapshot translog for recovery; current size is [{}]", shard.estimateNumberOfHistoryOperations("peer-recovery", startingSeqNo)); } - final long targetLocalCheckpoint; + final SendSnapshotResult sendSnapshotResult; try (Translog.Snapshot snapshot = shard.getHistoryOperations("peer-recovery", startingSeqNo)) { // we have to capture the max_seen_auto_id_timestamp and the max_seq_no_of_updates to make sure that these values // are at least as high as the corresponding values on the primary when any of these operations were executed on it. final long maxSeenAutoIdTimestamp = shard.getMaxSeenAutoIdTimestamp(); final long maxSeqNoOfUpdatesOrDeletes = shard.getMaxSeqNoOfUpdatesOrDeletes(); - targetLocalCheckpoint = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, + sendSnapshotResult = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); } catch (Exception e) { throw new RecoveryEngineException(shard.shardId(), 2, "phase2 failed", e); } - finalizeRecovery(targetLocalCheckpoint); + finalizeRecovery(sendSnapshotResult.targetLocalCheckpoint); + final long phase1ThrottlingWaitTime = 0L; // TODO: return the actual throttle time + return new RecoveryResponse(sendFileResult.phase1FileNames, sendFileResult.phase1FileSizes, + sendFileResult.phase1ExistingFileNames, sendFileResult.phase1ExistingFileSizes, sendFileResult.totalSize, + sendFileResult.existingTotalSize, sendFileResult.took.millis(), phase1ThrottlingWaitTime, prepareEngineTime.millis(), + sendSnapshotResult.totalOperations, sendSnapshotResult.tookTime.millis()); } - return response; } private boolean isTargetSameHistory() { @@ -237,7 +243,7 @@ private boolean isTargetSameHistory() { return targetHistoryUUID != null && targetHistoryUUID.equals(shard.getHistoryUUID()); } - static void runUnderPrimaryPermit(CancellableThreads.Interruptable runnable, String reason, + static void runUnderPrimaryPermit(CancellableThreads.Interruptible runnable, String reason, IndexShard primary, CancellableThreads cancellableThreads, Logger logger) { cancellableThreads.execute(() -> { CompletableFuture permit = new CompletableFuture<>(); @@ -276,6 +282,32 @@ public void onFailure(Exception e) { }); } + static final class SendFileResult { + final List phase1FileNames; + final List phase1FileSizes; + final long totalSize; + + final List phase1ExistingFileNames; + final List phase1ExistingFileSizes; + final long existingTotalSize; + + final TimeValue took; + + SendFileResult(List phase1FileNames, List phase1FileSizes, long totalSize, + List phase1ExistingFileNames, List phase1ExistingFileSizes, long existingTotalSize, TimeValue took) { + this.phase1FileNames = phase1FileNames; + this.phase1FileSizes = phase1FileSizes; + this.totalSize = totalSize; + this.phase1ExistingFileNames = phase1ExistingFileNames; + this.phase1ExistingFileSizes = phase1ExistingFileSizes; + this.existingTotalSize = existingTotalSize; + this.took = took; + } + + static final SendFileResult EMPTY = new SendFileResult(Collections.emptyList(), Collections.emptyList(), 0L, + Collections.emptyList(), Collections.emptyList(), 0L, TimeValue.ZERO); + } + /** * Perform phase1 of the recovery operations. Once this {@link IndexCommit} * snapshot has been performed no commit operations (files being fsync'd) @@ -285,12 +317,16 @@ public void onFailure(Exception e) { * segments that are missing. Only segments that have the same size and * checksum can be reused */ - public void phase1(final IndexCommit snapshot, final Supplier translogOps) { + public SendFileResult phase1(final IndexCommit snapshot, final Supplier translogOps) { cancellableThreads.checkForCancel(); // Total size of segment files that are recovered long totalSize = 0; // Total size of segment files that were able to be re-used long existingTotalSize = 0; + final List phase1FileNames = new ArrayList<>(); + final List phase1FileSizes = new ArrayList<>(); + final List phase1ExistingFileNames = new ArrayList<>(); + final List phase1ExistingFileSizes = new ArrayList<>(); final Store store = shard.store(); store.incRef(); try { @@ -331,8 +367,8 @@ public void phase1(final IndexCommit snapshot, final Supplier translogO } else { final Store.RecoveryDiff diff = recoverySourceMetadata.recoveryDiff(request.metadataSnapshot()); for (StoreFileMetaData md : diff.identical) { - response.phase1ExistingFileNames.add(md.name()); - response.phase1ExistingFileSizes.add(md.length()); + phase1ExistingFileNames.add(md.name()); + phase1ExistingFileSizes.add(md.length()); existingTotalSize += md.length(); if (logger.isTraceEnabled()) { logger.trace("recovery [phase1]: not recovering [{}], exist in local store and has checksum [{}]," + @@ -350,20 +386,16 @@ public void phase1(final IndexCommit snapshot, final Supplier translogO } else { logger.trace("recovery [phase1]: recovering [{}], does not exist in remote", md.name()); } - response.phase1FileNames.add(md.name()); - response.phase1FileSizes.add(md.length()); + phase1FileNames.add(md.name()); + phase1FileSizes.add(md.length()); totalSize += md.length(); } - response.phase1TotalSize = totalSize; - response.phase1ExistingTotalSize = existingTotalSize; - logger.trace("recovery [phase1]: recovering_files [{}] with total_size [{}], reusing_files [{}] with total_size [{}]", - response.phase1FileNames.size(), - new ByteSizeValue(totalSize), response.phase1ExistingFileNames.size(), new ByteSizeValue(existingTotalSize)); - cancellableThreads.execute(() -> - recoveryTarget.receiveFileInfo(response.phase1FileNames, response.phase1FileSizes, response.phase1ExistingFileNames, - response.phase1ExistingFileSizes, translogOps.get())); + phase1FileNames.size(), new ByteSizeValue(totalSize), + phase1ExistingFileNames.size(), new ByteSizeValue(existingTotalSize)); + cancellableThreads.execute(() -> recoveryTarget.receiveFileInfo( + phase1FileNames, phase1FileSizes, phase1ExistingFileNames, phase1ExistingFileSizes, translogOps.get())); // How many bytes we've copied since we last called RateLimiter.pause final Function outputStreamFactories = md -> new BufferedOutputStream(new RecoveryOutputStream(md, translogOps), chunkSizeInBytes); @@ -417,27 +449,27 @@ public void phase1(final IndexCommit snapshot, final Supplier translogO } } } - - logger.trace("recovery [phase1]: took [{}]", stopWatch.totalTime()); - response.phase1Time = stopWatch.totalTime().millis(); + final TimeValue took = stopWatch.totalTime(); + logger.trace("recovery [phase1]: took [{}]", took); + return new SendFileResult(phase1FileNames, phase1FileSizes, totalSize, phase1ExistingFileNames, + phase1ExistingFileSizes, existingTotalSize, took); } catch (Exception e) { - throw new RecoverFilesRecoveryException(request.shardId(), response.phase1FileNames.size(), new ByteSizeValue(totalSize), e); + throw new RecoverFilesRecoveryException(request.shardId(), phase1FileNames.size(), new ByteSizeValue(totalSize), e); } finally { store.decRef(); } } - void prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException { + TimeValue prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException { StopWatch stopWatch = new StopWatch().start(); logger.trace("recovery [phase1]: prepare remote engine for translog"); - final long startEngineStart = stopWatch.totalTime().millis(); // Send a request preparing the new shard's translog to receive operations. This ensures the shard engine is started and disables // garbage collection (not the JVM's GC!) of tombstone deletes. cancellableThreads.executeIO(() -> recoveryTarget.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps)); stopWatch.stop(); - - response.startTime = stopWatch.totalTime().millis() - startEngineStart; - logger.trace("recovery [phase1]: remote engine start took [{}]", stopWatch.totalTime()); + final TimeValue tookTime = stopWatch.totalTime(); + logger.trace("recovery [phase1]: remote engine start took [{}]", tookTime); + return tookTime; } /** @@ -454,102 +486,23 @@ void prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTr * @param snapshot a snapshot of the translog * @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it. - * @return the local checkpoint on the target + * @return the send snapshot result */ - long phase2(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, final Translog.Snapshot snapshot, - final long maxSeenAutoIdTimestamp, final long maxSeqNoOfUpdatesOrDeletes) - throws IOException { + SendSnapshotResult phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot, + long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) throws IOException { + assert requiredSeqNoRangeStart <= endingSeqNo + 1: + "requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo; + assert startingSeqNo <= requiredSeqNoRangeStart : + "startingSeqNo " + startingSeqNo + " is larger than requiredSeqNoRangeStart " + requiredSeqNoRangeStart; if (shard.state() == IndexShardState.CLOSED) { throw new IndexShardClosedException(request.shardId()); } - cancellableThreads.checkForCancel(); final StopWatch stopWatch = new StopWatch().start(); logger.trace("recovery [phase2]: sending transaction log operations (seq# from [" + startingSeqNo + "], " + "required [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "]"); - // send all the snapshot's translog operations to the target - final SendSnapshotResult result = sendSnapshot( - startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); - - stopWatch.stop(); - logger.trace("recovery [phase2]: took [{}]", stopWatch.totalTime()); - response.phase2Time = stopWatch.totalTime().millis(); - response.phase2Operations = result.totalOperations; - return result.targetLocalCheckpoint; - } - - /* - * finalizes the recovery process - */ - public void finalizeRecovery(final long targetLocalCheckpoint) throws IOException { - if (shard.state() == IndexShardState.CLOSED) { - throw new IndexShardClosedException(request.shardId()); - } - cancellableThreads.checkForCancel(); - StopWatch stopWatch = new StopWatch().start(); - logger.trace("finalizing recovery"); - /* - * Before marking the shard as in-sync we acquire an operation permit. We do this so that there is a barrier between marking a - * shard as in-sync and relocating a shard. If we acquire the permit then no relocation handoff can complete before we are done - * marking the shard as in-sync. If the relocation handoff holds all the permits then after the handoff completes and we acquire - * the permit then the state of the shard will be relocated and this recovery will fail. - */ - runUnderPrimaryPermit(() -> shard.markAllocationIdAsInSync(request.targetAllocationId(), targetLocalCheckpoint), - shardId + " marking " + request.targetAllocationId() + " as in sync", shard, cancellableThreads, logger); - final long globalCheckpoint = shard.getGlobalCheckpoint(); - cancellableThreads.executeIO(() -> recoveryTarget.finalizeRecovery(globalCheckpoint)); - runUnderPrimaryPermit(() -> shard.updateGlobalCheckpointForShard(request.targetAllocationId(), globalCheckpoint), - shardId + " updating " + request.targetAllocationId() + "'s global checkpoint", shard, cancellableThreads, logger); - - if (request.isPrimaryRelocation()) { - logger.trace("performing relocation hand-off"); - // this acquires all IndexShard operation permits and will thus delay new recoveries until it is done - cancellableThreads.execute(() -> shard.relocated(recoveryTarget::handoffPrimaryContext)); - /* - * if the recovery process fails after disabling primary mode on the source shard, both relocation source and - * target are failed (see {@link IndexShard#updateRoutingEntry}). - */ - } - stopWatch.stop(); - logger.trace("finalizing recovery took [{}]", stopWatch.totalTime()); - } - - static class SendSnapshotResult { - - final long targetLocalCheckpoint; - final int totalOperations; - - SendSnapshotResult(final long targetLocalCheckpoint, final int totalOperations) { - this.targetLocalCheckpoint = targetLocalCheckpoint; - this.totalOperations = totalOperations; - } - - } - - /** - * Send the given snapshot's operations with a sequence number greater than the specified staring sequence number to this handler's - * target node. - *

- * Operations are bulked into a single request depending on an operation count limit or size-in-bytes limit. - * - * @param startingSeqNo the sequence number for which only operations with a sequence number greater than this will be sent - * @param requiredSeqNoRangeStart the lower sequence number of the required range - * @param endingSeqNo the upper bound of the sequence number range to be sent (inclusive) - * @param snapshot the translog snapshot to replay operations from @return the local checkpoint on the target and the - * total number of operations sent - * @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary - * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it. - * @throws IOException if an I/O exception occurred reading the translog snapshot - */ - protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, - final Translog.Snapshot snapshot, final long maxSeenAutoIdTimestamp, - final long maxSeqNoOfUpdatesOrDeletes) throws IOException { - assert requiredSeqNoRangeStart <= endingSeqNo + 1: - "requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo; - assert startingSeqNo <= requiredSeqNoRangeStart : - "startingSeqNo " + startingSeqNo + " is larger than requiredSeqNoRangeStart " + requiredSeqNoRangeStart; int ops = 0; long size = 0; int skippedOps = 0; @@ -563,7 +516,7 @@ protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long require logger.trace("no translog operations to send"); } - final CancellableThreads.IOInterruptable sendBatch = () -> { + final CancellableThreads.IOInterruptible sendBatch = () -> { final long targetCheckpoint = recoveryTarget.indexTranslogOperations( operations, expectedTotalOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); targetLocalCheckpoint.set(targetCheckpoint); @@ -615,7 +568,58 @@ protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long require logger.trace("sent final batch of [{}][{}] (total: [{}]) translog operations", ops, new ByteSizeValue(size), expectedTotalOps); - return new SendSnapshotResult(targetLocalCheckpoint.get(), totalSentOps); + stopWatch.stop(); + final TimeValue tookTime = stopWatch.totalTime(); + logger.trace("recovery [phase2]: took [{}]", tookTime); + return new SendSnapshotResult(targetLocalCheckpoint.get(), totalSentOps, tookTime); + } + + /* + * finalizes the recovery process + */ + public void finalizeRecovery(final long targetLocalCheckpoint) throws IOException { + if (shard.state() == IndexShardState.CLOSED) { + throw new IndexShardClosedException(request.shardId()); + } + cancellableThreads.checkForCancel(); + StopWatch stopWatch = new StopWatch().start(); + logger.trace("finalizing recovery"); + /* + * Before marking the shard as in-sync we acquire an operation permit. We do this so that there is a barrier between marking a + * shard as in-sync and relocating a shard. If we acquire the permit then no relocation handoff can complete before we are done + * marking the shard as in-sync. If the relocation handoff holds all the permits then after the handoff completes and we acquire + * the permit then the state of the shard will be relocated and this recovery will fail. + */ + runUnderPrimaryPermit(() -> shard.markAllocationIdAsInSync(request.targetAllocationId(), targetLocalCheckpoint), + shardId + " marking " + request.targetAllocationId() + " as in sync", shard, cancellableThreads, logger); + final long globalCheckpoint = shard.getGlobalCheckpoint(); + cancellableThreads.executeIO(() -> recoveryTarget.finalizeRecovery(globalCheckpoint)); + runUnderPrimaryPermit(() -> shard.updateGlobalCheckpointForShard(request.targetAllocationId(), globalCheckpoint), + shardId + " updating " + request.targetAllocationId() + "'s global checkpoint", shard, cancellableThreads, logger); + + if (request.isPrimaryRelocation()) { + logger.trace("performing relocation hand-off"); + // this acquires all IndexShard operation permits and will thus delay new recoveries until it is done + cancellableThreads.execute(() -> shard.relocated(recoveryTarget::handoffPrimaryContext)); + /* + * if the recovery process fails after disabling primary mode on the source shard, both relocation source and + * target are failed (see {@link IndexShard#updateRoutingEntry}). + */ + } + stopWatch.stop(); + logger.trace("finalizing recovery took [{}]", stopWatch.totalTime()); + } + + static final class SendSnapshotResult { + final long targetLocalCheckpoint; + final int totalOperations; + final TimeValue tookTime; + + SendSnapshotResult(final long targetLocalCheckpoint, final int totalOperations, final TimeValue tookTime) { + this.targetLocalCheckpoint = targetLocalCheckpoint; + this.totalOperations = totalOperations; + this.tookTime = tookTime; + } } /** diff --git a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java index d4f27f47eb8f2..29ae578a64371 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java +++ b/server/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java @@ -335,7 +335,7 @@ public static TemplateScript.Factory compileTemplate(String processorType, Strin // installed for use by REST tests. `propertyValue` will not be // modified if templating is not available so a script that simply returns an unmodified `propertyValue` // is returned. - if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG)) { + if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG) && propertyValue.contains("{{")) { Script script = new Script(ScriptType.INLINE, DEFAULT_TEMPLATE_LANG, propertyValue, Collections.emptyMap()); return scriptService.compile(script, TemplateScript.CONTEXT); } else { diff --git a/server/src/main/java/org/elasticsearch/ingest/ValueSource.java b/server/src/main/java/org/elasticsearch/ingest/ValueSource.java index 4e2787c023539..4dda3e86ba27e 100644 --- a/server/src/main/java/org/elasticsearch/ingest/ValueSource.java +++ b/server/src/main/java/org/elasticsearch/ingest/ValueSource.java @@ -75,7 +75,7 @@ static ValueSource wrap(Object value, ScriptService scriptService) { // This check is here because the DEFAULT_TEMPLATE_LANG(mustache) is not // installed for use by REST tests. `value` will not be // modified if templating is not available - if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG)) { + if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG) && ((String) value).contains("{{")) { Script script = new Script(ScriptType.INLINE, DEFAULT_TEMPLATE_LANG, (String) value, Collections.emptyMap()); return new TemplatedValue(scriptService.compile(script, TemplateScript.CONTEXT)); } else { diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 7ab6ae0a1f4c9..033080c2c38e6 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -111,62 +111,63 @@ public void registerRepository(final RegisterRepositoryRequest request, final Ac return; } - clusterService.submitStateUpdateTask(request.cause, new AckedClusterStateUpdateTask(request, registrationListener) { - @Override - protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { - return new ClusterStateUpdateResponse(acknowledged); - } - - @Override - public ClusterState execute(ClusterState currentState) { - ensureRepositoryNotInUse(currentState, request.name); - MetaData metaData = currentState.metaData(); - MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); - RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE); - if (repositories == null) { - logger.info("put repository [{}]", request.name); - repositories = new RepositoriesMetaData( - Collections.singletonList(new RepositoryMetaData(request.name, request.type, request.settings))); - } else { - boolean found = false; - List repositoriesMetaData = new ArrayList<>(repositories.repositories().size() + 1); + clusterService.submitStateUpdateTask(request.cause, + new AckedClusterStateUpdateTask(request, registrationListener) { + @Override + protected ClusterStateUpdateResponse newResponse(boolean acknowledged) { + return new ClusterStateUpdateResponse(acknowledged); + } - for (RepositoryMetaData repositoryMetaData : repositories.repositories()) { - if (repositoryMetaData.name().equals(newRepositoryMetaData.name())) { - if (newRepositoryMetaData.equals(repositoryMetaData)) { - // Previous version is the same as this one no update is needed. - return currentState; + @Override + public ClusterState execute(ClusterState currentState) { + ensureRepositoryNotInUse(currentState, request.name); + MetaData metaData = currentState.metaData(); + MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); + RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE); + if (repositories == null) { + logger.info("put repository [{}]", request.name); + repositories = new RepositoriesMetaData( + Collections.singletonList(new RepositoryMetaData(request.name, request.type, request.settings))); + } else { + boolean found = false; + List repositoriesMetaData = new ArrayList<>(repositories.repositories().size() + 1); + + for (RepositoryMetaData repositoryMetaData : repositories.repositories()) { + if (repositoryMetaData.name().equals(newRepositoryMetaData.name())) { + if (newRepositoryMetaData.equals(repositoryMetaData)) { + // Previous version is the same as this one no update is needed. + return currentState; + } + found = true; + repositoriesMetaData.add(newRepositoryMetaData); + } else { + repositoriesMetaData.add(repositoryMetaData); } - found = true; - repositoriesMetaData.add(newRepositoryMetaData); + } + if (!found) { + logger.info("put repository [{}]", request.name); + repositoriesMetaData.add(new RepositoryMetaData(request.name, request.type, request.settings)); } else { - repositoriesMetaData.add(repositoryMetaData); + logger.info("update repository [{}]", request.name); } + repositories = new RepositoriesMetaData(repositoriesMetaData); } - if (!found) { - logger.info("put repository [{}]", request.name); - repositoriesMetaData.add(new RepositoryMetaData(request.name, request.type, request.settings)); - } else { - logger.info("update repository [{}]", request.name); - } - repositories = new RepositoriesMetaData(repositoriesMetaData); + mdBuilder.putCustom(RepositoriesMetaData.TYPE, repositories); + return ClusterState.builder(currentState).metaData(mdBuilder).build(); } - mdBuilder.putCustom(RepositoriesMetaData.TYPE, repositories); - return ClusterState.builder(currentState).metaData(mdBuilder).build(); - } - @Override - public void onFailure(String source, Exception e) { - logger.warn(() -> new ParameterizedMessage("failed to create repository [{}]", request.name), e); - super.onFailure(source, e); - } + @Override + public void onFailure(String source, Exception e) { + logger.warn(() -> new ParameterizedMessage("failed to create repository [{}]", request.name), e); + super.onFailure(source, e); + } - @Override - public boolean mustAck(DiscoveryNode discoveryNode) { - // repository is created on both master and data nodes - return discoveryNode.isMasterNode() || discoveryNode.isDataNode(); - } - }); + @Override + public boolean mustAck(DiscoveryNode discoveryNode) { + // repository is created on both master and data nodes + return discoveryNode.isMasterNode() || discoveryNode.isDataNode(); + } + }); } /** * Unregisters repository in the cluster @@ -323,7 +324,8 @@ public void applyClusterState(ClusterChangedEvent event) { } catch (RepositoryException ex) { // TODO: this catch is bogus, it means the old repo is already closed, // but we have nothing to replace it - logger.warn(() -> new ParameterizedMessage("failed to change repository [{}]", repositoryMetaData.name()), ex); + logger.warn(() -> new ParameterizedMessage("failed to change repository [{}]", + repositoryMetaData.name()), ex); } } } else { @@ -411,7 +413,8 @@ private Repository createRepository(RepositoryMetaData repositoryMetaData, Map listener) { @@ -90,28 +92,31 @@ public void verify(String repository, String verificationToken, final ActionList finishVerification(listener, nodes, errors); } } else { - transportService.sendRequest(node, ACTION_NAME, new VerifyNodeRepositoryRequest(repository, verificationToken), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { - @Override - public void handleResponse(TransportResponse.Empty response) { - if (counter.decrementAndGet() == 0) { - finishVerification(listener, nodes, errors); + transportService.sendRequest(node, ACTION_NAME, new VerifyNodeRepositoryRequest(repository, verificationToken), + new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { + @Override + public void handleResponse(TransportResponse.Empty response) { + if (counter.decrementAndGet() == 0) { + finishVerification(listener, nodes, errors); + } } - } - @Override - public void handleException(TransportException exp) { - errors.add(new VerificationFailure(node.getId(), exp)); - if (counter.decrementAndGet() == 0) { - finishVerification(listener, nodes, errors); + @Override + public void handleException(TransportException exp) { + errors.add(new VerificationFailure(node.getId(), exp)); + if (counter.decrementAndGet() == 0) { + finishVerification(listener, nodes, errors); + } } - } - }); + }); } } } - public void finishVerification(ActionListener listener, List nodes, CopyOnWriteArrayList errors) { - listener.onResponse(new RepositoriesService.VerifyResponse(nodes.toArray(new DiscoveryNode[nodes.size()]), errors.toArray(new VerificationFailure[errors.size()]))); + public void finishVerification(ActionListener listener, List nodes, + CopyOnWriteArrayList errors) { + listener.onResponse(new RepositoriesService.VerifyResponse(nodes.toArray(new DiscoveryNode[nodes.size()]), + errors.toArray(new VerificationFailure[errors.size()]))); } private void doVerify(String repositoryName, String verificationToken, DiscoveryNode localNode) { diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 09eb557fe9cad..99cb1db3e3652 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -479,12 +479,12 @@ public void deleteSnapshot(SnapshotId snapshotId, long repositoryStateId) { // we'll ignore that and accept that cleanup didn't fully succeed. // since we are using UUIDs for path names, this won't be an issue for // snapshotting indices of the same name - logger.debug(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " + - "its index folder due to the directory not being empty.", metadata.name(), indexId), dnee); + logger.debug(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, " + + "but failed to clean up its index folder due to the directory not being empty.", metadata.name(), indexId), dnee); } catch (IOException ioe) { // a different IOException occurred while trying to delete - will just log the issue for now - logger.debug(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " + - "its index folder.", metadata.name(), indexId), ioe); + logger.debug(() -> new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, " + + "but failed to clean up its index folder.", metadata.name(), indexId), ioe); } } } catch (IOException | ResourceNotFoundException ex) { @@ -524,7 +524,8 @@ private void deleteIndexMetaDataBlobIgnoringErrors(final SnapshotInfo snapshotIn try { indexMetaDataFormat.delete(indexMetaDataBlobContainer, snapshotId.getUUID()); } catch (IOException ex) { - logger.warn(() -> new ParameterizedMessage("[{}] failed to delete metadata for index [{}]", snapshotId, indexId.getName()), ex); + logger.warn(() -> new ParameterizedMessage("[{}] failed to delete metadata for index [{}]", + snapshotId, indexId.getName()), ex); } } @@ -861,7 +862,8 @@ public void snapshotShard(IndexShard shard, Store store, SnapshotId snapshotId, } @Override - public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) { + public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, + RecoveryState recoveryState) { final RestoreContext snapshotContext = new RestoreContext(shard, snapshotId, indexId, snapshotShardId, recoveryState); try { snapshotContext.restore(); @@ -898,12 +900,14 @@ public void verify(String seed, DiscoveryNode localNode) { testBlobContainer.writeBlob("data-" + localNode.getId() + ".dat", stream, bytes.length(), true); } } catch (IOException exp) { - throw new RepositoryVerificationException(metadata.name(), "store location [" + blobStore() + "] is not accessible on the node [" + localNode + "]", exp); + throw new RepositoryVerificationException(metadata.name(), "store location [" + blobStore() + + "] is not accessible on the node [" + localNode + "]", exp); } } else { - throw new RepositoryVerificationException(metadata.name(), "a file written by master to the store [" + blobStore() + "] cannot be accessed on the node [" + localNode + "]. " - + "This might indicate that the store [" + blobStore() + "] is not shared between this node and the master node or " - + "that permissions on the store don't allow reading files written by the master node"); + throw new RepositoryVerificationException(metadata.name(), "a file written by master to the store [" + blobStore() + + "] cannot be accessed on the node [" + localNode + "]. " + + "This might indicate that the store [" + blobStore() + "] is not shared between this node and the master node or " + + "that permissions on the store don't allow reading files written by the master node"); } } } @@ -945,7 +949,8 @@ private class Context { Context(SnapshotId snapshotId, IndexId indexId, ShardId shardId, ShardId snapshotShardId) { this.snapshotId = snapshotId; this.shardId = shardId; - blobContainer = blobStore().blobContainer(basePath().add("indices").add(indexId.getId()).add(Integer.toString(snapshotShardId.getId()))); + blobContainer = blobStore().blobContainer(basePath().add("indices").add(indexId.getId()) + .add(Integer.toString(snapshotShardId.getId()))); } /** @@ -1235,7 +1240,8 @@ public void snapshot(final IndexCommit snapshotIndexCommit) { // in a bwc compatible way. maybeRecalculateMetadataHash(blobContainer, fileInfo, metadata); } catch (Exception e) { - logger.warn(() -> new ParameterizedMessage("{} Can't calculate hash from blob for file [{}] [{}]", shardId, fileInfo.physicalName(), fileInfo.metadata()), e); + logger.warn(() -> new ParameterizedMessage("{} Can't calculate hash from blob for file [{}] [{}]", + shardId, fileInfo.physicalName(), fileInfo.metadata()), e); } if (fileInfo.isSame(md) && snapshotFileExistsInBlobs(fileInfo, blobs)) { // a commit point file with the same name, size and checksum was already copied to repository @@ -1253,7 +1259,8 @@ public void snapshot(final IndexCommit snapshotIndexCommit) { indexIncrementalFileCount++; indexIncrementalSize += md.length(); // create a new FileInfo - BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), md, chunkSize()); + BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = + new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), md, chunkSize()); indexCommitPointFiles.add(snapshotFileInfo); filesToSnapshot.add(snapshotFileInfo); } else { @@ -1411,7 +1418,8 @@ private void checkAborted() { * The new logic for StoreFileMetaData reads the entire {@code .si} and {@code segments.n} files to strengthen the * comparison of the files on a per-segment / per-commit level. */ - private static void maybeRecalculateMetadataHash(final BlobContainer blobContainer, final BlobStoreIndexShardSnapshot.FileInfo fileInfo, Store.MetadataSnapshot snapshot) throws Exception { + private static void maybeRecalculateMetadataHash(final BlobContainer blobContainer, final BlobStoreIndexShardSnapshot.FileInfo fileInfo, + Store.MetadataSnapshot snapshot) throws Exception { final StoreFileMetaData metadata; if (fileInfo != null && (metadata = snapshot.get(fileInfo.physicalName())) != null) { if (metadata.hash().length > 0 && fileInfo.metadata().hash().length == 0) { @@ -1509,7 +1517,8 @@ public void restore() throws IOException { logger.trace("[{}] [{}] restoring from to an empty shard", shardId, snapshotId); recoveryTargetMetadata = Store.MetadataSnapshot.EMPTY; } catch (IOException e) { - logger.warn(() -> new ParameterizedMessage("{} Can't read metadata from store, will not reuse any local file while restoring", shardId), e); + logger.warn(() -> new ParameterizedMessage("{} Can't read metadata from store, will not reuse any " + + "local file while restoring", shardId), e); recoveryTargetMetadata = Store.MetadataSnapshot.EMPTY; } @@ -1525,7 +1534,8 @@ public void restore() throws IOException { maybeRecalculateMetadataHash(blobContainer, fileInfo, recoveryTargetMetadata); } catch (Exception e) { // if the index is broken we might not be able to read it - logger.warn(() -> new ParameterizedMessage("{} Can't calculate hash from blog for file [{}] [{}]", shardId, fileInfo.physicalName(), fileInfo.metadata()), e); + logger.warn(() -> new ParameterizedMessage("{} Can't calculate hash from blog for file [{}] [{}]", + shardId, fileInfo.physicalName(), fileInfo.metadata()), e); } snapshotMetaData.put(fileInfo.metadata().name(), fileInfo.metadata()); fileInfos.put(fileInfo.metadata().name(), fileInfo); @@ -1543,7 +1553,8 @@ public void restore() throws IOException { BlobStoreIndexShardSnapshot.FileInfo fileInfo = fileInfos.get(md.name()); recoveryState.getIndex().addFileDetail(fileInfo.name(), fileInfo.length(), true); if (logger.isTraceEnabled()) { - logger.trace("[{}] [{}] not_recovering [{}] from [{}], exists in local store and is same", shardId, snapshotId, fileInfo.physicalName(), fileInfo.name()); + logger.trace("[{}] [{}] not_recovering [{}] from [{}], exists in local store and is same", + shardId, snapshotId, fileInfo.physicalName(), fileInfo.name()); } } @@ -1634,7 +1645,8 @@ private void restoreFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo, fi stream = new RateLimitingInputStream(partSliceStream, restoreRateLimiter, restoreRateLimitingTimeInNanos::inc); } - try (IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), fileInfo.metadata(), IOContext.DEFAULT)) { + try (IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), + fileInfo.metadata(), IOContext.DEFAULT)) { final byte[] buffer = new byte[BUFFER_SIZE]; int length; while ((length = stream.read(buffer)) > 0) { diff --git a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 3db5195097145..01c08fbce0044 100644 --- a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -43,7 +43,8 @@ *

*
{@code location}
Path to the root of repository. This is mandatory parameter.
*
{@code concurrent_streams}
Number of concurrent read/write stream (per repository on each node). Defaults to 5.
- *
{@code chunk_size}
Large file can be divided into chunks. This parameter specifies the chunk size. Defaults to not chucked.
+ *
{@code chunk_size}
Large file can be divided into chunks. This parameter specifies the chunk size. + * Defaults to not chucked.
*
{@code compress}
If set to true metadata files will be stored compressed. Defaults to false.
*
*/ diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 5bb806c02d6df..97b4e29d9a208 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -58,6 +58,12 @@ public abstract class BaseRestHandler extends AbstractComponent implements RestH private final LongAdder usageCount = new LongAdder(); + /** + * Parameter that controls whether certain REST apis should include type names in their requests or responses. + * Note: Support for this parameter will be removed after the transition period to typeless APIs. + */ + public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; + protected BaseRestHandler(Settings settings) { // TODO drop settings from ctor } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java index ccb10b603d3b2..5d8c93ca4337c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java @@ -51,7 +51,7 @@ public RestGetRepositoriesAction(Settings settings, RestController controller, S @Override public String getName() { - return "get_respositories_action"; + return "get_repositories_action"; } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java index 08637e0dfce1d..aa0867696c674 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -49,7 +49,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean("include_type_name", true); + final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, true); CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index")); if (request.hasContent()) { Map sourceAsMap = XContentHelper.convertToMap(request.content(), false, request.getXContentType()).v2(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index c43f14dcddf26..f3a73fa29fd98 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -62,6 +62,13 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); final String[] fields = Strings.splitStringByCommaToArray(request.param("fields")); + + boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, true); + if (includeTypeName == false && types.length > 0) { + throw new IllegalArgumentException("Cannot set include_type_name=false and specify" + + " types at the same time."); + } + GetFieldMappingsRequest getMappingsRequest = new GetFieldMappingsRequest(); getMappingsRequest.indices(indices).types(types).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false)); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java index 38c1cb76611f4..50370797aa6f2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -31,6 +32,7 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; +import java.util.Collections; import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -43,6 +45,9 @@ */ public class RestGetIndexTemplateAction extends BaseRestHandler { + private static final Set RESPONSE_PARAMETERS = Collections.unmodifiableSet(Sets.union( + Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER), Settings.FORMAT_PARAMS)); + public RestGetIndexTemplateAction(final Settings settings, final RestController controller) { super(settings); controller.registerHandler(GET, "/_template", this); @@ -79,7 +84,7 @@ protected RestStatus getStatus(final GetIndexTemplatesResponse response) { @Override protected Set responseParams() { - return Settings.FORMAT_PARAMS; + return RESPONSE_PARAMETERS; } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java index 04fae0f30f6bf..f38df9326949f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -20,10 +20,12 @@ package org.elasticsearch.rest.action.admin.indices; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -31,7 +33,10 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; +import java.util.Collections; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; @@ -41,6 +46,13 @@ */ public class RestGetIndicesAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetIndicesAction.class)); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using `include_type_name` in get indices requests is deprecated. " + + "The parameter will be removed in the next major version."; + + private static final Set allowedResponseParameters = Collections + .unmodifiableSet(Stream.concat(Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER).stream(), Settings.FORMAT_PARAMS.stream()) + .collect(Collectors.toSet())); public RestGetIndicesAction( final Settings settings, @@ -58,6 +70,10 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); + // starting with 7.0 we don't include types by default in the response + if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { + deprecationLogger.deprecatedAndMaybeLog("get_indices_with_types", TYPES_DEPRECATION_MESSAGE); + } final GetIndexRequest getIndexRequest = new GetIndexRequest(); getIndexRequest.indices(indices); getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); @@ -68,9 +84,12 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.admin().indices().getIndex(getIndexRequest, new RestToXContentListener<>(channel)); } + /** + * Parameters used for controlling the response and thus might not be consumed during + * preparation of the request execution in {@link BaseRestHandler#prepareRequest(RestRequest, NodeClient)}. + */ @Override protected Set responseParams() { - return Settings.FORMAT_PARAMS; + return allowedResponseParameters; } - } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index aca6235420cca..ee052f96a1a33 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -20,8 +20,9 @@ package org.elasticsearch.rest.action.admin.indices; import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.apache.logging.log4j.Logger; + import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -85,7 +86,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC deprecationLogger.deprecated("Type exists requests are deprecated, as types have been deprecated."); } - final boolean includeTypeName = request.paramAsBoolean("include_type_name", true); + final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, true); final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index 258bb05a7d66c..f5cc3c6aad26d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -33,6 +35,8 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; public class RestPutIndexTemplateAction extends BaseRestHandler { @@ -63,8 +67,23 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout())); putRequest.create(request.paramAsBoolean("create", false)); putRequest.cause(request.param("cause", "")); - putRequest.source(request.requiredContent(), request.getXContentType()); + + boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, true); + Map sourceAsMap = prepareRequestSource(request, includeTypeName); + putRequest.source(sourceAsMap); + return channel -> client.admin().indices().putTemplate(putRequest, new RestToXContentListener<>(channel)); } + Map prepareRequestSource(RestRequest request, boolean includeTypeName) { + Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, + request.getXContentType()).v2(); + if (includeTypeName == false && sourceAsMap.containsKey("mappings")) { + Map newSourceAsMap = new HashMap<>(sourceAsMap); + newSourceAsMap.put("mappings", Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, sourceAsMap.get("mappings"))); + return newSourceAsMap; + } else { + return sourceAsMap; + } + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index dc77cf52a8cfc..286bd82fd3501 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -68,7 +68,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean("include_type_name", true); + final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, true); PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); final String type = request.param("type"); if (type != null && includeTypeName == false) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 1a859933ad3fe..676f2bbdc7b2e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -121,7 +122,8 @@ public void processResponse(final ClusterHealthResponse clusterHealthResponse) { client.admin().indices().stats(indicesStatsRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(IndicesStatsResponse indicesStatsResponse) throws Exception { - Table tab = buildTable(request, concreteIndices, clusterHealthResponse, indicesStatsResponse, state.metaData()); + Table tab = buildTable(request, concreteIndices, clusterHealthResponse, + indicesStatsResponse, state.metaData()); return RestTable.buildResponse(tab, channel); } }); @@ -167,28 +169,36 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("completion.size", "sibling:pri;alias:cs,completionSize;default:false;text-align:right;desc:size of completion"); table.addCell("pri.completion.size", "default:false;text-align:right;desc:size of completion"); - table.addCell("fielddata.memory_size", "sibling:pri;alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache"); + table.addCell("fielddata.memory_size", + "sibling:pri;alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache"); table.addCell("pri.fielddata.memory_size", "default:false;text-align:right;desc:used fielddata cache"); - table.addCell("fielddata.evictions", "sibling:pri;alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions"); + table.addCell("fielddata.evictions", + "sibling:pri;alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions"); table.addCell("pri.fielddata.evictions", "default:false;text-align:right;desc:fielddata evictions"); - table.addCell("query_cache.memory_size", "sibling:pri;alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache"); + table.addCell("query_cache.memory_size", + "sibling:pri;alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache"); table.addCell("pri.query_cache.memory_size", "default:false;text-align:right;desc:used query cache"); - table.addCell("query_cache.evictions", "sibling:pri;alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions"); + table.addCell("query_cache.evictions", + "sibling:pri;alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions"); table.addCell("pri.query_cache.evictions", "default:false;text-align:right;desc:query cache evictions"); - table.addCell("request_cache.memory_size", "sibling:pri;alias:rcm,requestCacheMemory;default:false;text-align:right;desc:used request cache"); + table.addCell("request_cache.memory_size", + "sibling:pri;alias:rcm,requestCacheMemory;default:false;text-align:right;desc:used request cache"); table.addCell("pri.request_cache.memory_size", "default:false;text-align:right;desc:used request cache"); - table.addCell("request_cache.evictions", "sibling:pri;alias:rce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions"); + table.addCell("request_cache.evictions", + "sibling:pri;alias:rce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions"); table.addCell("pri.request_cache.evictions", "default:false;text-align:right;desc:request cache evictions"); - table.addCell("request_cache.hit_count", "sibling:pri;alias:rchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit count"); + table.addCell("request_cache.hit_count", + "sibling:pri;alias:rchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit count"); table.addCell("pri.request_cache.hit_count", "default:false;text-align:right;desc:request cache hit count"); - table.addCell("request_cache.miss_count", "sibling:pri;alias:rcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss count"); + table.addCell("request_cache.miss_count", + "sibling:pri;alias:rcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss count"); table.addCell("pri.request_cache.miss_count", "default:false;text-align:right;desc:request cache miss count"); table.addCell("flush.total", "sibling:pri;alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes"); @@ -206,49 +216,64 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("get.total", "sibling:pri;alias:gto,getTotal;default:false;text-align:right;desc:number of get ops"); table.addCell("pri.get.total", "default:false;text-align:right;desc:number of get ops"); - table.addCell("get.exists_time", "sibling:pri;alias:geti,getExistsTime;default:false;text-align:right;desc:time spent in successful gets"); + table.addCell("get.exists_time", + "sibling:pri;alias:geti,getExistsTime;default:false;text-align:right;desc:time spent in successful gets"); table.addCell("pri.get.exists_time", "default:false;text-align:right;desc:time spent in successful gets"); - table.addCell("get.exists_total", "sibling:pri;alias:geto,getExistsTotal;default:false;text-align:right;desc:number of successful gets"); + table.addCell("get.exists_total", + "sibling:pri;alias:geto,getExistsTotal;default:false;text-align:right;desc:number of successful gets"); table.addCell("pri.get.exists_total", "default:false;text-align:right;desc:number of successful gets"); - table.addCell("get.missing_time", "sibling:pri;alias:gmti,getMissingTime;default:false;text-align:right;desc:time spent in failed gets"); + table.addCell("get.missing_time", + "sibling:pri;alias:gmti,getMissingTime;default:false;text-align:right;desc:time spent in failed gets"); table.addCell("pri.get.missing_time", "default:false;text-align:right;desc:time spent in failed gets"); - table.addCell("get.missing_total", "sibling:pri;alias:gmto,getMissingTotal;default:false;text-align:right;desc:number of failed gets"); + table.addCell("get.missing_total", + "sibling:pri;alias:gmto,getMissingTotal;default:false;text-align:right;desc:number of failed gets"); table.addCell("pri.get.missing_total", "default:false;text-align:right;desc:number of failed gets"); - table.addCell("indexing.delete_current", "sibling:pri;alias:idc,indexingDeleteCurrent;default:false;text-align:right;desc:number of current deletions"); + table.addCell("indexing.delete_current", + "sibling:pri;alias:idc,indexingDeleteCurrent;default:false;text-align:right;desc:number of current deletions"); table.addCell("pri.indexing.delete_current", "default:false;text-align:right;desc:number of current deletions"); - table.addCell("indexing.delete_time", "sibling:pri;alias:idti,indexingDeleteTime;default:false;text-align:right;desc:time spent in deletions"); + table.addCell("indexing.delete_time", + "sibling:pri;alias:idti,indexingDeleteTime;default:false;text-align:right;desc:time spent in deletions"); table.addCell("pri.indexing.delete_time", "default:false;text-align:right;desc:time spent in deletions"); - table.addCell("indexing.delete_total", "sibling:pri;alias:idto,indexingDeleteTotal;default:false;text-align:right;desc:number of delete ops"); + table.addCell("indexing.delete_total", + "sibling:pri;alias:idto,indexingDeleteTotal;default:false;text-align:right;desc:number of delete ops"); table.addCell("pri.indexing.delete_total", "default:false;text-align:right;desc:number of delete ops"); - table.addCell("indexing.index_current", "sibling:pri;alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops"); + table.addCell("indexing.index_current", + "sibling:pri;alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops"); table.addCell("pri.indexing.index_current", "default:false;text-align:right;desc:number of current indexing ops"); - table.addCell("indexing.index_time", "sibling:pri;alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing"); + table.addCell("indexing.index_time", + "sibling:pri;alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing"); table.addCell("pri.indexing.index_time", "default:false;text-align:right;desc:time spent in indexing"); - table.addCell("indexing.index_total", "sibling:pri;alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops"); + table.addCell("indexing.index_total", + "sibling:pri;alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops"); table.addCell("pri.indexing.index_total", "default:false;text-align:right;desc:number of indexing ops"); - table.addCell("indexing.index_failed", "sibling:pri;alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops"); + table.addCell("indexing.index_failed", + "sibling:pri;alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops"); table.addCell("pri.indexing.index_failed", "default:false;text-align:right;desc:number of failed indexing ops"); - table.addCell("merges.current", "sibling:pri;alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges"); + table.addCell("merges.current", + "sibling:pri;alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges"); table.addCell("pri.merges.current", "default:false;text-align:right;desc:number of current merges"); - table.addCell("merges.current_docs", "sibling:pri;alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs"); + table.addCell("merges.current_docs", + "sibling:pri;alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs"); table.addCell("pri.merges.current_docs", "default:false;text-align:right;desc:number of current merging docs"); - table.addCell("merges.current_size", "sibling:pri;alias:mcs,mergesCurrentSize;default:false;text-align:right;desc:size of current merges"); + table.addCell("merges.current_size", + "sibling:pri;alias:mcs,mergesCurrentSize;default:false;text-align:right;desc:size of current merges"); table.addCell("pri.merges.current_size", "default:false;text-align:right;desc:size of current merges"); - table.addCell("merges.total", "sibling:pri;alias:mt,mergesTotal;default:false;text-align:right;desc:number of completed merge ops"); + table.addCell("merges.total", + "sibling:pri;alias:mt,mergesTotal;default:false;text-align:right;desc:number of completed merge ops"); table.addCell("pri.merges.total", "default:false;text-align:right;desc:number of completed merge ops"); table.addCell("merges.total_docs", "sibling:pri;alias:mtd,mergesTotalDocs;default:false;text-align:right;desc:docs merged"); @@ -257,7 +282,8 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("merges.total_size", "sibling:pri;alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("pri.merges.total_size", "default:false;text-align:right;desc:size merged"); - table.addCell("merges.total_time", "sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); + table.addCell("merges.total_time", + "sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); table.addCell("pri.merges.total_time", "default:false;text-align:right;desc:time spent in merges"); table.addCell("refresh.total", "sibling:pri;alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); @@ -266,37 +292,48 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("refresh.time", "sibling:pri;alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); table.addCell("pri.refresh.time", "default:false;text-align:right;desc:time spent in refreshes"); - table.addCell("refresh.listeners", "sibling:pri;alias:rli,refreshListeners;default:false;text-align:right;desc:number of pending refresh listeners"); + table.addCell("refresh.listeners", + "sibling:pri;alias:rli,refreshListeners;default:false;text-align:right;desc:number of pending refresh listeners"); table.addCell("pri.refresh.listeners", "default:false;text-align:right;desc:number of pending refresh listeners"); - table.addCell("search.fetch_current", "sibling:pri;alias:sfc,searchFetchCurrent;default:false;text-align:right;desc:current fetch phase ops"); + table.addCell("search.fetch_current", + "sibling:pri;alias:sfc,searchFetchCurrent;default:false;text-align:right;desc:current fetch phase ops"); table.addCell("pri.search.fetch_current", "default:false;text-align:right;desc:current fetch phase ops"); - table.addCell("search.fetch_time", "sibling:pri;alias:sfti,searchFetchTime;default:false;text-align:right;desc:time spent in fetch phase"); + table.addCell("search.fetch_time", + "sibling:pri;alias:sfti,searchFetchTime;default:false;text-align:right;desc:time spent in fetch phase"); table.addCell("pri.search.fetch_time", "default:false;text-align:right;desc:time spent in fetch phase"); - table.addCell("search.fetch_total", "sibling:pri;alias:sfto,searchFetchTotal;default:false;text-align:right;desc:total fetch ops"); + table.addCell("search.fetch_total", + "sibling:pri;alias:sfto,searchFetchTotal;default:false;text-align:right;desc:total fetch ops"); table.addCell("pri.search.fetch_total", "default:false;text-align:right;desc:total fetch ops"); - table.addCell("search.open_contexts", "sibling:pri;alias:so,searchOpenContexts;default:false;text-align:right;desc:open search contexts"); + table.addCell("search.open_contexts", + "sibling:pri;alias:so,searchOpenContexts;default:false;text-align:right;desc:open search contexts"); table.addCell("pri.search.open_contexts", "default:false;text-align:right;desc:open search contexts"); - table.addCell("search.query_current", "sibling:pri;alias:sqc,searchQueryCurrent;default:false;text-align:right;desc:current query phase ops"); + table.addCell("search.query_current", + "sibling:pri;alias:sqc,searchQueryCurrent;default:false;text-align:right;desc:current query phase ops"); table.addCell("pri.search.query_current", "default:false;text-align:right;desc:current query phase ops"); - table.addCell("search.query_time", "sibling:pri;alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase"); + table.addCell("search.query_time", + "sibling:pri;alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase"); table.addCell("pri.search.query_time", "default:false;text-align:right;desc:time spent in query phase"); - table.addCell("search.query_total", "sibling:pri;alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops"); + table.addCell("search.query_total", + "sibling:pri;alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops"); table.addCell("pri.search.query_total", "default:false;text-align:right;desc:total query phase ops"); - table.addCell("search.scroll_current", "sibling:pri;alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts"); + table.addCell("search.scroll_current", + "sibling:pri;alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts"); table.addCell("pri.search.scroll_current", "default:false;text-align:right;desc:open scroll contexts"); - table.addCell("search.scroll_time", "sibling:pri;alias:scti,searchScrollTime;default:false;text-align:right;desc:time scroll contexts held open"); + table.addCell("search.scroll_time", + "sibling:pri;alias:scti,searchScrollTime;default:false;text-align:right;desc:time scroll contexts held open"); table.addCell("pri.search.scroll_time", "default:false;text-align:right;desc:time scroll contexts held open"); - table.addCell("search.scroll_total", "sibling:pri;alias:scto,searchScrollTotal;default:false;text-align:right;desc:completed scroll contexts"); + table.addCell("search.scroll_total", + "sibling:pri;alias:scto,searchScrollTotal;default:false;text-align:right;desc:completed scroll contexts"); table.addCell("pri.search.scroll_total", "default:false;text-align:right;desc:completed scroll contexts"); table.addCell("segments.count", "sibling:pri;alias:sc,segmentsCount;default:false;text-align:right;desc:number of segments"); @@ -305,14 +342,20 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("segments.memory", "sibling:pri;alias:sm,segmentsMemory;default:false;text-align:right;desc:memory used by segments"); table.addCell("pri.segments.memory", "default:false;text-align:right;desc:memory used by segments"); - table.addCell("segments.index_writer_memory", "sibling:pri;alias:siwm,segmentsIndexWriterMemory;default:false;text-align:right;desc:memory used by index writer"); + table.addCell("segments.index_writer_memory", + "sibling:pri;alias:siwm,segmentsIndexWriterMemory;default:false;text-align:right;desc:memory used by index writer"); table.addCell("pri.segments.index_writer_memory", "default:false;text-align:right;desc:memory used by index writer"); - table.addCell("segments.version_map_memory", "sibling:pri;alias:svmm,segmentsVersionMapMemory;default:false;text-align:right;desc:memory used by version map"); + table.addCell("segments.version_map_memory", + "sibling:pri;alias:svmm,segmentsVersionMapMemory;default:false;text-align:right;desc:memory used by version map"); table.addCell("pri.segments.version_map_memory", "default:false;text-align:right;desc:memory used by version map"); - table.addCell("segments.fixed_bitset_memory", "sibling:pri;alias:sfbm,fixedBitsetMemory;default:false;text-align:right;desc:memory used by fixed bit sets for nested object field types and type filters for types referred in _parent fields"); - table.addCell("pri.segments.fixed_bitset_memory", "default:false;text-align:right;desc:memory used by fixed bit sets for nested object field types and type filters for types referred in _parent fields"); + table.addCell("segments.fixed_bitset_memory", + "sibling:pri;alias:sfbm,fixedBitsetMemory;default:false;text-align:right;desc:memory used by fixed bit sets for" + + " nested object field types and type filters for types referred in _parent fields"); + table.addCell("pri.segments.fixed_bitset_memory", + "default:false;text-align:right;desc:memory used by fixed bit sets for nested object" + + " field types and type filters for types referred in _parent fields"); table.addCell("warmer.current", "sibling:pri;alias:wc,warmerCurrent;default:false;text-align:right;desc:current warmer ops"); table.addCell("pri.warmer.current", "default:false;text-align:right;desc:current warmer ops"); @@ -320,10 +363,12 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("warmer.total", "sibling:pri;alias:wto,warmerTotal;default:false;text-align:right;desc:total warmer ops"); table.addCell("pri.warmer.total", "default:false;text-align:right;desc:total warmer ops"); - table.addCell("warmer.total_time", "sibling:pri;alias:wtt,warmerTotalTime;default:false;text-align:right;desc:time spent in warmers"); + table.addCell("warmer.total_time", + "sibling:pri;alias:wtt,warmerTotalTime;default:false;text-align:right;desc:time spent in warmers"); table.addCell("pri.warmer.total_time", "default:false;text-align:right;desc:time spent in warmers"); - table.addCell("suggest.current", "sibling:pri;alias:suc,suggestCurrent;default:false;text-align:right;desc:number of current suggest ops"); + table.addCell("suggest.current", + "sibling:pri;alias:suc,suggestCurrent;default:false;text-align:right;desc:number of current suggest ops"); table.addCell("pri.suggest.current", "default:false;text-align:right;desc:number of current suggest ops"); table.addCell("suggest.time", "sibling:pri;alias:suti,suggestTime;default:false;text-align:right;desc:time spend in suggest"); @@ -335,12 +380,15 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("memory.total", "sibling:pri;alias:tm,memoryTotal;default:false;text-align:right;desc:total used memory"); table.addCell("pri.memory.total", "default:false;text-align:right;desc:total user memory"); + table.addCell("search.throttled", "alias:sth;default:false;desc:indicates if the index is search throttled"); + table.endHeaders(); return table; } // package private for testing - Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse response, IndicesStatsResponse stats, MetaData indexMetaDatas) { + Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse response, + IndicesStatsResponse stats, MetaData indexMetaDatas) { final String healthParam = request.param("health"); final ClusterHealthStatus status; if (healthParam != null) { @@ -357,6 +405,7 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res IndexStats indexStats = stats.getIndices().get(indexName); IndexMetaData indexMetaData = indexMetaDatas.getIndices().get(indexName); IndexMetaData.State state = indexMetaData.getState(); + boolean searchThrottled = IndexSettings.INDEX_SEARCH_THROTTLED.get(indexMetaData.getSettings()); if (status != null) { if (state == IndexMetaData.State.CLOSE || @@ -370,7 +419,8 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res final CommonStats totalStats = indexStats == null ? new CommonStats() : indexStats.getTotal(); table.startRow(); - table.addCell(state == IndexMetaData.State.OPEN ? (indexHealth == null ? "red*" : indexHealth.getStatus().toString().toLowerCase(Locale.ROOT)) : null); + table.addCell(state == IndexMetaData.State.OPEN ? + (indexHealth == null ? "red*" : indexHealth.getStatus().toString().toLowerCase(Locale.ROOT)) : null); table.addCell(state.toString().toLowerCase(Locale.ROOT)); table.addCell(indexName); table.addCell(index.getUUID()); @@ -558,6 +608,8 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res table.addCell(indexStats == null ? null : indexStats.getTotal().getTotalMemory()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getTotalMemory()); + table.addCell(searchThrottled); + table.endRow(); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index e4e8ea121dd9d..eb82f7da58c5c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -142,16 +142,20 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("get.missing_time", "alias:gmti,getMissingTime;default:false;text-align:right;desc:time spent in failed gets"); table.addCell("get.missing_total", "alias:gmto,getMissingTotal;default:false;text-align:right;desc:number of failed gets"); - table.addCell("indexing.delete_current", "alias:idc,indexingDeleteCurrent;default:false;text-align:right;desc:number of current deletions"); + table.addCell("indexing.delete_current", + "alias:idc,indexingDeleteCurrent;default:false;text-align:right;desc:number of current deletions"); table.addCell("indexing.delete_time", "alias:idti,indexingDeleteTime;default:false;text-align:right;desc:time spent in deletions"); table.addCell("indexing.delete_total", "alias:idto,indexingDeleteTotal;default:false;text-align:right;desc:number of delete ops"); - table.addCell("indexing.index_current", "alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops"); + table.addCell("indexing.index_current", + "alias:iic,indexingIndexCurrent;default:false;text-align:right;desc:number of current indexing ops"); table.addCell("indexing.index_time", "alias:iiti,indexingIndexTime;default:false;text-align:right;desc:time spent in indexing"); table.addCell("indexing.index_total", "alias:iito,indexingIndexTotal;default:false;text-align:right;desc:number of indexing ops"); - table.addCell("indexing.index_failed", "alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops"); + table.addCell("indexing.index_failed", + "alias:iif,indexingIndexFailed;default:false;text-align:right;desc:number of failed indexing ops"); table.addCell("merges.current", "alias:mc,mergesCurrent;default:false;text-align:right;desc:number of current merges"); - table.addCell("merges.current_docs", "alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs"); + table.addCell("merges.current_docs", + "alias:mcd,mergesCurrentDocs;default:false;text-align:right;desc:number of current merging docs"); table.addCell("merges.current_size", "alias:mcs,mergesCurrentSize;default:false;text-align:right;desc:size of current merges"); table.addCell("merges.total", "alias:mt,mergesTotal;default:false;text-align:right;desc:number of completed merge ops"); table.addCell("merges.total_docs", "alias:mtd,mergesTotalDocs;default:false;text-align:right;desc:docs merged"); @@ -160,7 +164,8 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); - table.addCell("refresh.listeners", "alias:rli,refreshListeners;default:false;text-align:right;desc:number of pending refresh listeners"); + table.addCell("refresh.listeners", + "alias:rli,refreshListeners;default:false;text-align:right;desc:number of pending refresh listeners"); table.addCell("search.fetch_current", "alias:sfc,searchFetchCurrent;default:false;text-align:right;desc:current fetch phase ops"); table.addCell("search.fetch_time", "alias:sfti,searchFetchTime;default:false;text-align:right;desc:time spent in fetch phase"); @@ -170,14 +175,19 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("search.query_time", "alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase"); table.addCell("search.query_total", "alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops"); table.addCell("search.scroll_current", "alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts"); - table.addCell("search.scroll_time", "alias:scti,searchScrollTime;default:false;text-align:right;desc:time scroll contexts held open"); + table.addCell("search.scroll_time", + "alias:scti,searchScrollTime;default:false;text-align:right;desc:time scroll contexts held open"); table.addCell("search.scroll_total", "alias:scto,searchScrollTotal;default:false;text-align:right;desc:completed scroll contexts"); table.addCell("segments.count", "alias:sc,segmentsCount;default:false;text-align:right;desc:number of segments"); table.addCell("segments.memory", "alias:sm,segmentsMemory;default:false;text-align:right;desc:memory used by segments"); - table.addCell("segments.index_writer_memory", "alias:siwm,segmentsIndexWriterMemory;default:false;text-align:right;desc:memory used by index writer"); - table.addCell("segments.version_map_memory", "alias:svmm,segmentsVersionMapMemory;default:false;text-align:right;desc:memory used by version map"); - table.addCell("segments.fixed_bitset_memory", "alias:sfbm,fixedBitsetMemory;default:false;text-align:right;desc:memory used by fixed bit sets for nested object field types and type filters for types referred in _parent fields"); + table.addCell("segments.index_writer_memory", + "alias:siwm,segmentsIndexWriterMemory;default:false;text-align:right;desc:memory used by index writer"); + table.addCell("segments.version_map_memory", + "alias:svmm,segmentsVersionMapMemory;default:false;text-align:right;desc:memory used by version map"); + table.addCell("segments.fixed_bitset_memory", + "alias:sfbm,fixedBitsetMemory;default:false;text-align:right;desc:memory used by fixed bit sets for nested object" + + " field types and type filters for types referred in _parent fields"); table.addCell("seq_no.max", "alias:sqm,maxSeqNo;default:false;text-align:right;desc:max sequence number"); table.addCell("seq_no.local_checkpoint", "alias:sql,localCheckpoint;default:false;text-align:right;desc:local checkpoint"); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java index 3df270c8f6c80..e420dfb9843b8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java @@ -88,7 +88,8 @@ public void processResponse(final NodesInfoResponse nodesInfoResponse) { client.admin().cluster().nodesStats(nodesStatsRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(NodesStatsResponse nodesStatsResponse) throws Exception { - return RestTable.buildResponse(buildTable(request, clusterStateResponse, nodesInfoResponse, nodesStatsResponse), channel); + return RestTable.buildResponse( + buildTable(request, clusterStateResponse, nodesInfoResponse, nodesStatsResponse), channel); } }); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 5cfbbfdb1b524..7861a4fe9d1bd 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -19,17 +19,20 @@ package org.elasticsearch.rest.action.document; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.Requests; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -49,6 +52,9 @@ public class RestBulkAction extends BaseRestHandler { private final boolean allowExplicitIndex; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSearchAction.class)); + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + + " Specifying types in bulk requests is deprecated."; public RestBulkAction(Settings settings, RestController controller) { super(settings); @@ -76,6 +82,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC String defaultType = request.param("type"); if (defaultType == null) { defaultType = MapperService.SINGLE_MAPPING_NAME; + } else { + deprecationLogger.deprecatedAndMaybeLog("bulk_with_types", RestBulkAction.TYPES_DEPRECATION_MESSAGE); } String defaultRouting = request.param("routing"); FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 30791835d3009..2d685ecfd51a2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -59,7 +59,7 @@ public class RestMultiSearchAction extends BaseRestHandler { static { final Set responseParams = new HashSet<>( - Arrays.asList(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HIT_AS_INT_PARAM) + Arrays.asList(RestSearchAction.TYPED_KEYS_PARAM, RestSearchAction.TOTAL_HITS_AS_INT_PARAM) ); RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } @@ -118,6 +118,7 @@ public static MultiSearchRequest parseRequest(RestRequest restRequest, boolean a deprecationLogger.deprecatedAndMaybeLog("msearch_with_types", TYPES_DEPRECATION_MESSAGE); } searchRequest.source(SearchSourceBuilder.fromXContent(parser, false)); + RestSearchAction.checkRestTotalHits(restRequest, searchRequest); multiRequest.add(searchRequest); }); List requests = multiRequest.requests(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 8e6e247123d36..3e3a1e02a174b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; @@ -59,12 +60,12 @@ public class RestSearchAction extends BaseRestHandler { * Indicates whether hits.total should be rendered as an integer or an object * in the rest search response. */ - public static final String TOTAL_HIT_AS_INT_PARAM = "rest_total_hits_as_int"; + public static final String TOTAL_HITS_AS_INT_PARAM = "rest_total_hits_as_int"; public static final String TYPED_KEYS_PARAM = "typed_keys"; private static final Set RESPONSE_PARAMS; static { - final Set responseParams = new HashSet<>(Arrays.asList(TYPED_KEYS_PARAM, TOTAL_HIT_AS_INT_PARAM)); + final Set responseParams = new HashSet<>(Arrays.asList(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM)); RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } @@ -172,6 +173,7 @@ public static void parseSearchRequest(SearchRequest searchRequest, RestRequest r searchRequest.routing(request.param("routing")); searchRequest.preference(request.param("preference")); searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions())); + checkRestTotalHits(request, searchRequest); } /** @@ -236,7 +238,15 @@ private static void parseSearchSource(final SearchSourceBuilder searchSourceBuil } if (request.hasParam("track_total_hits")) { - searchSourceBuilder.trackTotalHits(request.paramAsBoolean("track_total_hits", true)); + if (Booleans.isBoolean(request.param("track_total_hits"))) { + searchSourceBuilder.trackTotalHits( + request.paramAsBoolean("track_total_hits", true) + ); + } else { + searchSourceBuilder.trackTotalHitsUpTo( + request.paramAsInt("track_total_hits", SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO) + ); + } } String sSorts = request.param("sort"); @@ -275,6 +285,23 @@ private static void parseSearchSource(final SearchSourceBuilder searchSourceBuil } } + /** + * Throws an {@link IllegalArgumentException} if {@link #TOTAL_HITS_AS_INT_PARAM} + * is used in conjunction with a lower bound value for the track_total_hits option. + */ + public static void checkRestTotalHits(RestRequest restRequest, SearchRequest searchRequest) { + int trackTotalHitsUpTo = searchRequest.source() == null ? + SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO : searchRequest.source().trackTotalHitsUpTo(); + if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_ACCURATE || + trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) { + return ; + } + if (restRequest.paramAsBoolean(TOTAL_HITS_AS_INT_PARAM, false)) { + throw new IllegalArgumentException("[" + TOTAL_HITS_AS_INT_PARAM + "] cannot be used " + + "if the tracking of total hits is not accurate, got " + trackTotalHitsUpTo); + } + } + @Override protected Set responseParams() { return RESPONSE_PARAMS; diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java index 6d2f0971ad770..50806a096f17b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java @@ -37,7 +37,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestSearchScrollAction extends BaseRestHandler { - private static final Set RESPONSE_PARAMS = Collections.singleton(RestSearchAction.TOTAL_HIT_AS_INT_PARAM); + private static final Set RESPONSE_PARAMS = Collections.singleton(RestSearchAction.TOTAL_HITS_AS_INT_PARAM); public RestSearchScrollAction(Settings settings, RestController controller) { super(settings); diff --git a/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java b/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java index 1d8de9f95f40a..949ca5bdb7f0f 100644 --- a/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java +++ b/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java @@ -66,7 +66,7 @@ abstract class AbstractSortScript implements ScorerAware { this.leafLookup = lookup.getLeafSearchLookup(leafContext); Map parameters = new HashMap<>(params); parameters.putAll(leafLookup.asMap()); - this.params = new ParameterMap(parameters, DEPRECATIONS); + this.params = new DeprecationMap(parameters, DEPRECATIONS); } protected AbstractSortScript() { diff --git a/server/src/main/java/org/elasticsearch/script/AggregationScript.java b/server/src/main/java/org/elasticsearch/script/AggregationScript.java index 8e1b485816618..5d36b91785f8a 100644 --- a/server/src/main/java/org/elasticsearch/script/AggregationScript.java +++ b/server/src/main/java/org/elasticsearch/script/AggregationScript.java @@ -71,7 +71,7 @@ public abstract class AggregationScript implements ScorerAware { private Object value; public AggregationScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { - this.params = new ParameterMap(new HashMap<>(params), DEPRECATIONS); + this.params = new DeprecationMap(new HashMap<>(params), DEPRECATIONS); this.leafLookup = lookup.getLeafSearchLookup(leafContext); this.params.putAll(leafLookup.asMap()); } diff --git a/server/src/main/java/org/elasticsearch/script/ParameterMap.java b/server/src/main/java/org/elasticsearch/script/DeprecationMap.java similarity index 74% rename from server/src/main/java/org/elasticsearch/script/ParameterMap.java rename to server/src/main/java/org/elasticsearch/script/DeprecationMap.java index b40c0f9b401d3..5b14e2e3b1168 100644 --- a/server/src/main/java/org/elasticsearch/script/ParameterMap.java +++ b/server/src/main/java/org/elasticsearch/script/DeprecationMap.java @@ -26,38 +26,38 @@ import java.util.Map; import java.util.Set; -public final class ParameterMap implements Map { +public final class DeprecationMap implements Map { private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(ParameterMap.class)); + new DeprecationLogger(LogManager.getLogger(DeprecationMap.class)); - private final Map params; + private final Map delegate; private final Map deprecations; - public ParameterMap(Map params, Map deprecations) { - this.params = params; + public DeprecationMap(Map delegate, Map deprecations) { + this.delegate = delegate; this.deprecations = deprecations; } @Override public int size() { - return params.size(); + return delegate.size(); } @Override public boolean isEmpty() { - return params.isEmpty(); + return delegate.isEmpty(); } @Override public boolean containsKey(final Object key) { - return params.containsKey(key); + return delegate.containsKey(key); } @Override public boolean containsValue(final Object value) { - return params.containsValue(value); + return delegate.containsValue(value); } @Override @@ -66,41 +66,41 @@ public Object get(final Object key) { if (deprecationMessage != null) { deprecationLogger.deprecated(deprecationMessage); } - return params.get(key); + return delegate.get(key); } @Override public Object put(final String key, final Object value) { - return params.put(key, value); + return delegate.put(key, value); } @Override public Object remove(final Object key) { - return params.remove(key); + return delegate.remove(key); } @Override public void putAll(final Map m) { - params.putAll(m); + delegate.putAll(m); } @Override public void clear() { - params.clear(); + delegate.clear(); } @Override public Set keySet() { - return params.keySet(); + return delegate.keySet(); } @Override public Collection values() { - return params.values(); + return delegate.values(); } @Override public Set> entrySet() { - return params.entrySet(); + return delegate.entrySet(); } } diff --git a/server/src/main/java/org/elasticsearch/script/FieldScript.java b/server/src/main/java/org/elasticsearch/script/FieldScript.java index 29684a6447776..4f0e3c7229037 100644 --- a/server/src/main/java/org/elasticsearch/script/FieldScript.java +++ b/server/src/main/java/org/elasticsearch/script/FieldScript.java @@ -63,7 +63,7 @@ public FieldScript(Map params, SearchLookup lookup, LeafReaderCo this.leafLookup = lookup.getLeafSearchLookup(leafContext); params = new HashMap<>(params); params.putAll(leafLookup.asMap()); - this.params = new ParameterMap(params, DEPRECATIONS); + this.params = new DeprecationMap(params, DEPRECATIONS); } // for expression engine diff --git a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java index ec6492c9d5ec9..546deb3a24b68 100644 --- a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java +++ b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java @@ -151,6 +151,10 @@ public int getYear() { return dt.getYear(); } + public ZoneId getZone() { + return dt.getZone(); + } + public ZonedDateTime minus(TemporalAmount delta) { return dt.minus(delta); } diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index c88c68fd407a2..78d206888e998 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -73,7 +73,7 @@ public ScoreScript(Map params, SearchLookup lookup, LeafReaderCo this.leafLookup = lookup.getLeafSearchLookup(leafContext); params = new HashMap<>(params); params.putAll(leafLookup.asMap()); - this.params = new ParameterMap(params, DEPRECATIONS); + this.params = new DeprecationMap(params, DEPRECATIONS); } } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java index 4c51b9fed69ec..a17503a026f24 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -95,7 +95,7 @@ public MapScript(Map params, Map state, SearchLo if (leafLookup != null) { params = new HashMap<>(params); // copy params so we aren't modifying input params.putAll(leafLookup.asMap()); // add lookup vars - params = new ParameterMap(params, DEPRECATIONS); // wrap with deprecations + params = new DeprecationMap(params, DEPRECATIONS); // wrap with deprecations } this.params = params; } diff --git a/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java b/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java index 122e3defe7586..9462b6a8e811a 100644 --- a/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java +++ b/server/src/main/java/org/elasticsearch/script/TermsSetQueryScript.java @@ -64,7 +64,7 @@ public TermsSetQueryScript(Map params, SearchLookup lookup, Leaf Map parameters = new HashMap<>(params); this.leafLookup = lookup.getLeafSearchLookup(leafContext); parameters.putAll(leafLookup.asMap()); - this.params = new ParameterMap(parameters, DEPRECATIONS); + this.params = new DeprecationMap(parameters, DEPRECATIONS); } protected TermsSetQueryScript() { diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 091fd5f8c85e0..590c58b1f6615 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -116,7 +116,7 @@ final class DefaultSearchContext extends SearchContext { private SortAndFormats sort; private Float minimumScore; private boolean trackScores = false; // when sorting, track scores as well... - private boolean trackTotalHits = true; + private int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; private FieldDoc searchAfter; private CollapseContext collapse; private boolean lowLevelCancellation; @@ -159,7 +159,7 @@ final class DefaultSearchContext extends SearchContext { DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget, Engine.Searcher engineSearcher, ClusterService clusterService, IndexService indexService, IndexShard indexShard, BigArrays bigArrays, Counter timeEstimateCounter, TimeValue timeout, - FetchPhase fetchPhase, String clusterAlias, Version minNodeVersion) { + FetchPhase fetchPhase, Version minNodeVersion) { this.id = id; this.request = request; this.fetchPhase = fetchPhase; @@ -179,7 +179,7 @@ final class DefaultSearchContext extends SearchContext { this.timeout = timeout; this.minNodeVersion = minNodeVersion; queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher.getIndexReader(), request::nowInMillis, - clusterAlias); + shardTarget.getClusterAlias()); queryShardContext.setTypes(request.types()); queryBoost = request.indexBoost(); } @@ -558,14 +558,14 @@ public boolean trackScores() { } @Override - public SearchContext trackTotalHits(boolean trackTotalHits) { - this.trackTotalHits = trackTotalHits; + public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) { + this.trackTotalHitsUpTo = trackTotalHitsUpTo; return this; } @Override - public boolean trackTotalHits() { - return trackTotalHits; + public int trackTotalHitsUpTo() { + return trackTotalHitsUpTo; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index 6f295eff42605..f04183ffde700 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -44,10 +44,13 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; public final class SearchHits implements Streamable, ToXContentFragment, Iterable { - public static SearchHits empty() { + return empty(true); + } + + public static SearchHits empty(boolean withTotalHits) { // We shouldn't use static final instance, since that could directly be returned by native transport clients - return new SearchHits(EMPTY, new TotalHits(0, Relation.EQUAL_TO), 0); + return new SearchHits(EMPTY, withTotalHits ? new TotalHits(0, Relation.EQUAL_TO) : null, 0); } public static final SearchHit[] EMPTY = new SearchHit[0]; @@ -151,7 +154,7 @@ public static final class Fields { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.HITS); - boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HIT_AS_INT_PARAM, false); + boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { long total = totalHits == null ? -1 : totalHits.in.value; builder.field(Fields.TOTAL, total); @@ -329,12 +332,17 @@ private static String printRelation(Relation relation) { private static class Total implements Writeable, ToXContentFragment { final TotalHits in; + Total(TotalHits in) { + this.in = Objects.requireNonNull(in); + } + Total(StreamInput in) throws IOException { this.in = Lucene.readTotalHits(in); } - Total(TotalHits in) { - this.in = Objects.requireNonNull(in); + @Override + public void writeTo(StreamOutput out) throws IOException { + Lucene.writeTotalHits(out, in); } @Override @@ -351,11 +359,6 @@ public int hashCode() { return Objects.hash(in.value, in.relation); } - @Override - public void writeTo(StreamOutput out) throws IOException { - Lucene.writeTotalHits(out, in); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("value", in.value); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 98f2e1d2e7ecf..f6e91c03af6e1 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -668,8 +668,7 @@ final SearchContext createContext(ShardSearchRequest request) throws IOException return context; } - public DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout) - throws IOException { + public DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout) throws IOException { return createSearchContext(request, timeout, true, "search"); } @@ -684,7 +683,7 @@ private DefaultSearchContext createSearchContext(ShardSearchRequest request, Tim final DefaultSearchContext searchContext = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, clusterService, indexService, indexShard, bigArrays, threadPool.estimatedTimeInMillisCounter(), timeout, - fetchPhase, request.getClusterAlias(), clusterService.state().nodes().getMinNodeVersion()); + fetchPhase, clusterService.state().nodes().getMinNodeVersion()); boolean success = false; try { // we clone the query shard context here just for rewriting otherwise we @@ -692,7 +691,7 @@ private DefaultSearchContext createSearchContext(ShardSearchRequest request, Tim // during rewrite and normalized / evaluate templates etc. QueryShardContext context = new QueryShardContext(searchContext.getQueryShardContext()); Rewriteable.rewrite(request.getRewriteable(), context, assertAsyncActions); - assert searchContext.getQueryShardContext().isCachable(); + assert searchContext.getQueryShardContext().isCacheable(); success = true; } finally { if (success == false) { @@ -815,7 +814,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc if (source.trackTotalHits() == false && context.scrollContext() != null) { throw new SearchContextException(context, "disabling [track_total_hits] is not allowed in a scroll context"); } - context.trackTotalHits(source.trackTotalHits()); + context.trackTotalHitsUpTo(source.trackTotalHitsUpTo()); if (source.minScore() != null) { context.minimumScore(source.minScore()); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java index 4a46c7202d14e..42f3b67e358e4 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -30,6 +30,7 @@ import org.elasticsearch.transport.RemoteClusterAware; import java.io.IOException; +import java.util.Objects; /** * The target that the search request was executed on. @@ -54,7 +55,7 @@ public SearchShardTarget(StreamInput in) throws IOException { clusterAlias = in.readOptionalString(); } - public SearchShardTarget(String nodeId, ShardId shardId, String clusterAlias, OriginalIndices originalIndices) { + public SearchShardTarget(String nodeId, ShardId shardId, @Nullable String clusterAlias, OriginalIndices originalIndices) { this.nodeId = nodeId == null ? null : new Text(nodeId); this.shardId = shardId; this.originalIndices = originalIndices; @@ -63,7 +64,7 @@ public SearchShardTarget(String nodeId, ShardId shardId, String clusterAlias, Or //this constructor is only used in tests public SearchShardTarget(String nodeId, Index index, int shardId, String clusterAlias) { - this(nodeId, new ShardId(index, shardId), clusterAlias, OriginalIndices.NONE); + this(nodeId, new ShardId(index, shardId), clusterAlias, OriginalIndices.NONE); } @Nullable @@ -87,15 +88,16 @@ public OriginalIndices getOriginalIndices() { return originalIndices; } + @Nullable public String getClusterAlias() { return clusterAlias; } /** - * Returns the fully qualified index name, including the cluster alias. + * Returns the fully qualified index name, including the index prefix that indicates which cluster results come from. */ public String getFullyQualifiedIndexName() { - return RemoteClusterAware.buildRemoteIndexName(getClusterAlias(), getIndex()); + return RemoteClusterAware.buildRemoteIndexName(clusterAlias, getIndex()); } @Override @@ -121,28 +123,27 @@ public void writeTo(StreamOutput out) throws IOException { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } SearchShardTarget that = (SearchShardTarget) o; - if (shardId.equals(that.shardId) == false) return false; - if (nodeId != null ? !nodeId.equals(that.nodeId) : that.nodeId != null) return false; - if (clusterAlias != null ? !clusterAlias.equals(that.clusterAlias) : that.clusterAlias != null) return false; - return true; + return Objects.equals(nodeId, that.nodeId) && + Objects.equals(shardId, that.shardId) && + Objects.equals(clusterAlias, that.clusterAlias); } @Override public int hashCode() { - int result = nodeId != null ? nodeId.hashCode() : 0; - result = 31 * result + (shardId.getIndexName() != null ? shardId.getIndexName().hashCode() : 0); - result = 31 * result + shardId.hashCode(); - result = 31 * result + (clusterAlias != null ? clusterAlias.hashCode() : 0); - return result; + return Objects.hash(nodeId, shardId, clusterAlias); } @Override public String toString() { - String shardToString = "[" + RemoteClusterAware.buildRemoteIndexName(clusterAlias, shardId.getIndexName()) + "][" + shardId.getId() - + "]"; + String shardToString = "[" + RemoteClusterAware.buildRemoteIndexName( + clusterAlias, shardId.getIndexName()) + "][" + shardId.getId() + "]"; if (nodeId == null) { return "[_na_]" + shardToString; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java index 63ba70c6f23ee..b302c40c3bd12 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java @@ -118,6 +118,10 @@ public void reset() { public int getCount() { return count; } + + public int getLimit() { + return limit; + } } public MultiBucketConsumer create() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/SearchContextAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/SearchContextAggregations.java index ab0a73d53ed17..563bede42d172 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/SearchContextAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/SearchContextAggregations.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations; -import java.util.function.IntConsumer; - import static org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; /** @@ -60,7 +58,7 @@ public void aggregators(Aggregator[] aggregators) { * Returns a consumer for multi bucket aggregation that checks the total number of buckets * created in the response */ - public IntConsumer multiBucketConsumer() { + public MultiBucketConsumer multiBucketConsumer() { return multiBucketConsumer; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketUtils.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketUtils.java index 17b50fa9bef5f..823a2b1e43422 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketUtils.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketUtils.java @@ -31,18 +31,12 @@ private BucketUtils() {} * * @param finalSize * The number of terms required in the final reduce phase. - * @param singleShard - * whether a single shard is being queried, or multiple shards * @return A suggested default for the size of any shard-side PriorityQueues */ - public static int suggestShardSideQueueSize(int finalSize, boolean singleShard) { + public static int suggestShardSideQueueSize(int finalSize) { if (finalSize < 1) { throw new IllegalArgumentException("size must be positive, got " + finalSize); } - if (singleShard) { - // In the case of a single shard, we do not need to over-request - return finalSize; - } // Request 50% more buckets on the shards in order to improve accuracy // as well as a small constant that should help with small values of 'size' final long shardSampleSize = (long) (finalSize * 1.5 + 10); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java index 7bfae34b1a305..21346844aac89 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java @@ -114,12 +114,30 @@ int compareCurrentWithAfter() { return compareValues(currentValue, afterValue); } + @Override + int hashCode(int slot) { + if (missingBucket && values.get(slot) == null) { + return 0; + } else { + return values.get(slot).hashCode(); + } + } + + @Override + int hashCodeCurrent() { + if (missingBucket && currentValue == null) { + return 0; + } else { + return currentValue.hashCode(); + } + } + int compareValues(BytesRef v1, BytesRef v2) { return v1.compareTo(v2) * reverseMul; } @Override - void setAfter(Comparable value) { + void setAfter(Comparable value) { if (missingBucket && value == null) { afterValue = null; } else if (value.getClass() == String.class) { @@ -155,7 +173,7 @@ public void collect(int doc, long bucket) throws IOException { } @Override - LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) { + LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) { if (value.getClass() != BytesRef.class) { throw new IllegalArgumentException("Expected BytesRef, got " + value.getClass()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index 251dc7e428396..43e33fad93189 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -170,7 +170,7 @@ protected AggregatorFactory doBuild(SearchContext context, AggregatorFactory< throw new IllegalArgumentException("[after] has " + after.size() + " value(s) but [sources] has " + sources.size()); } - Comparable[] values = new Comparable[sources.size()]; + Comparable[] values = new Comparable[sources.size()]; for (int i = 0; i < sources.size(); i++) { String sourceName = sources.get(i).name(); if (after.containsKey(sourceName) == false) { @@ -180,7 +180,7 @@ protected AggregatorFactory doBuild(SearchContext context, AggregatorFactory< if (configs[i].missingBucket() && obj == null) { values[i] = null; } else if (obj instanceof Comparable) { - values[i] = (Comparable) obj; + values[i] = (Comparable) obj; } else { throw new IllegalArgumentException("Invalid value for [after." + sources.get(i).name() + "], expected comparable, got [" + (obj == null ? "null" : obj.getClass().getSimpleName()) + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 3c43cf3ec1d2c..cd7fd6abe8ca9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -40,6 +40,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.MultiBucketCollector; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; @@ -54,6 +55,8 @@ import java.util.function.LongUnaryOperator; import java.util.stream.Collectors; +import static org.elasticsearch.search.aggregations.MultiBucketConsumerService.MAX_BUCKET_SETTING; + final class CompositeAggregator extends BucketsAggregator { private final int size; private final SortedDocsProducer sortedDocsProducer; @@ -78,9 +81,15 @@ final class CompositeAggregator extends BucketsAggregator { this.reverseMuls = Arrays.stream(sourceConfigs).mapToInt(CompositeValuesSourceConfig::reverseMul).toArray(); this.formats = Arrays.stream(sourceConfigs).map(CompositeValuesSourceConfig::format).collect(Collectors.toList()); this.sources = new SingleDimensionValuesSource[sourceConfigs.length]; + // check that the provided size is not greater than the search.max_buckets setting + int bucketLimit = context.aggregations().multiBucketConsumer().getLimit(); + if (size > bucketLimit) { + throw new MultiBucketConsumerService.TooManyBucketsException("Trying to create too many buckets. Must be less than or equal" + + " to: [" + bucketLimit + "] but was [" + size + "]. This limit can be set by changing the [" + MAX_BUCKET_SETTING.getKey() + + "] cluster level setting.", bucketLimit); + } for (int i = 0; i < sourceConfigs.length; i++) { - this.sources[i] = createValuesSource(context.bigArrays(), context.searcher().getIndexReader(), - context.query(), sourceConfigs[i], size, i); + this.sources[i] = createValuesSource(context.bigArrays(), context.searcher().getIndexReader(), sourceConfigs[i], size); } this.queue = new CompositeValuesCollectorQueue(context.bigArrays(), sources, size, rawAfterKey); this.sortedDocsProducer = sources[0].createSortedDocsProducerOrNull(context.searcher().getIndexReader(), context.query()); @@ -88,8 +97,11 @@ final class CompositeAggregator extends BucketsAggregator { @Override protected void doClose() { - Releasables.close(queue); - Releasables.close(sources); + try { + Releasables.close(queue); + } finally { + Releasables.close(sources); + } } @Override @@ -116,12 +128,12 @@ public InternalAggregation buildAggregation(long zeroBucket) throws IOException int num = Math.min(size, queue.size()); final InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; - int pos = 0; - for (int slot : queue.getSortedSlot()) { + while (queue.size() > 0) { + int slot = queue.pop(); CompositeKey key = queue.toCompositeKey(slot); InternalAggregations aggs = bucketAggregations(slot); int docCount = queue.getDocCount(slot); - buckets[pos++] = new InternalComposite.InternalBucket(sourceNames, formats, key, reverseMuls, docCount, aggs); + buckets[queue.size()] = new InternalComposite.InternalBucket(sourceNames, formats, key, reverseMuls, docCount, aggs); } CompositeKey lastBucket = num > 0 ? buckets[num-1].getRawKey() : null; return new InternalComposite(name, size, sourceNames, formats, Arrays.asList(buckets), lastBucket, reverseMuls, @@ -259,13 +271,13 @@ public void collect(int doc, long zeroBucket) throws IOException { }; } - private SingleDimensionValuesSource createValuesSource(BigArrays bigArrays, IndexReader reader, Query query, - CompositeValuesSourceConfig config, int sortRank, int size) { + private SingleDimensionValuesSource createValuesSource(BigArrays bigArrays, IndexReader reader, + CompositeValuesSourceConfig config, int size) { final int reverseMul = config.reverseMul(); if (config.valuesSource() instanceof ValuesSource.Bytes.WithOrdinals && reader instanceof DirectoryReader) { ValuesSource.Bytes.WithOrdinals vs = (ValuesSource.Bytes.WithOrdinals) config.valuesSource(); - SingleDimensionValuesSource source = new GlobalOrdinalValuesSource( + return new GlobalOrdinalValuesSource( bigArrays, config.fieldType(), vs::globalOrdinalsValues, @@ -274,25 +286,6 @@ private SingleDimensionValuesSource createValuesSource(BigArrays bigArrays, I size, reverseMul ); - - if (sortRank == 0 && source.createSortedDocsProducerOrNull(reader, query) != null) { - // this the leading source and we can optimize it with the sorted docs producer but - // we don't want to use global ordinals because the number of visited documents - // should be low and global ordinals need one lookup per visited term. - Releasables.close(source); - return new BinaryValuesSource( - bigArrays, - this::addRequestCircuitBreakerBytes, - config.fieldType(), - vs::bytesValues, - config.format(), - config.missingBucket(), - size, - reverseMul - ); - } else { - return source; - } } else if (config.valuesSource() instanceof ValuesSource.Bytes) { ValuesSource.Bytes vs = (ValuesSource.Bytes) config.valuesSource(); return new BinaryValuesSource( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java index 51c5a7c5a887f..c643ee67b180d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeKey.java @@ -30,16 +30,16 @@ * A key that is composed of multiple {@link Comparable} values. */ class CompositeKey implements Writeable { - private final Comparable[] values; + private final Comparable[] values; - CompositeKey(Comparable... values) { + CompositeKey(Comparable... values) { this.values = values; } CompositeKey(StreamInput in) throws IOException { - values = new Comparable[in.readVInt()]; + values = new Comparable[in.readVInt()]; for (int i = 0; i < values.length; i++) { - values[i] = (Comparable) in.readGenericValue(); + values[i] = (Comparable) in.readGenericValue(); } } @@ -51,7 +51,7 @@ public void writeTo(StreamOutput out) throws IOException { } } - Comparable[] values() { + Comparable[] values() { return values; } @@ -59,7 +59,7 @@ int size() { return values.length; } - Comparable get(int pos) { + Comparable get(int pos) { assert pos < values.length; return values[pos]; } @@ -77,4 +77,11 @@ public boolean equals(Object o) { public int hashCode() { return Arrays.hashCode(values); } + + @Override + public String toString() { + return "CompositeKey{" + + "values=" + Arrays.toString(values) + + '}'; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java index 38e22296333ae..58887d9e6a2dc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; @@ -27,19 +28,40 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector; import java.io.IOException; -import java.util.Set; -import java.util.TreeMap; +import java.util.HashMap; +import java.util.Map; /** - * A specialized queue implementation for composite buckets + * A specialized {@link PriorityQueue} implementation for composite buckets. */ -final class CompositeValuesCollectorQueue implements Releasable { +final class CompositeValuesCollectorQueue extends PriorityQueue implements Releasable { + private class Slot { + int value; + + Slot(int initial) { + this.value = initial; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Slot slot = (Slot) o; + return CompositeValuesCollectorQueue.this.equals(value, slot.value); + } + + @Override + public int hashCode() { + return CompositeValuesCollectorQueue.this.hashCode(value); + } + } + // the slot for the current candidate private static final int CANDIDATE_SLOT = Integer.MAX_VALUE; private final BigArrays bigArrays; private final int maxSize; - private final TreeMap keys; + private final Map map; private final SingleDimensionValuesSource[] arrays; private IntArray docCounts; private boolean afterKeyIsSet = false; @@ -52,10 +74,11 @@ final class CompositeValuesCollectorQueue implements Releasable { * @param afterKey composite key */ CompositeValuesCollectorQueue(BigArrays bigArrays, SingleDimensionValuesSource[] sources, int size, CompositeKey afterKey) { + super(size); this.bigArrays = bigArrays; this.maxSize = size; this.arrays = sources; - this.keys = new TreeMap<>(this::compare); + this.map = new HashMap<>(size); if (afterKey != null) { assert afterKey.size() == sources.length; afterKeyIsSet = true; @@ -66,25 +89,16 @@ final class CompositeValuesCollectorQueue implements Releasable { this.docCounts = bigArrays.newIntArray(1, false); } - /** - * The current size of the queue. - */ - int size() { - return keys.size(); + @Override + protected boolean lessThan(Integer a, Integer b) { + return compare(a, b) > 0; } /** * Whether the queue is full or not. */ boolean isFull() { - return keys.size() == maxSize; - } - - /** - * Returns a sorted {@link Set} view of the slots contained in this queue. - */ - Set getSortedSlot() { - return keys.keySet(); + return size() >= maxSize; } /** @@ -92,21 +106,21 @@ Set getSortedSlot() { * the slot if the candidate is already in the queue or null if the candidate is not present. */ Integer compareCurrent() { - return keys.get(CANDIDATE_SLOT); + return map.get(new Slot(CANDIDATE_SLOT)); } /** * Returns the lowest value (exclusive) of the leading source. */ - Comparable getLowerValueLeadSource() { + Comparable getLowerValueLeadSource() { return afterKeyIsSet ? arrays[0].getAfter() : null; } /** * Returns the upper value (inclusive) of the leading source. */ - Comparable getUpperValueLeadSource() throws IOException { - return size() >= maxSize ? arrays[0].toComparable(keys.lastKey()) : null; + Comparable getUpperValueLeadSource() throws IOException { + return size() >= maxSize ? arrays[0].toComparable(top()) : null; } /** * Returns the document count in slot. @@ -127,12 +141,17 @@ private void copyCurrent(int slot) { } /** - * Compares the values in slot1 with slot2. + * Compares the values in slot1 with the values in slot2. */ int compare(int slot1, int slot2) { + assert slot2 != CANDIDATE_SLOT; for (int i = 0; i < arrays.length; i++) { - int cmp = (slot1 == CANDIDATE_SLOT) ? arrays[i].compareCurrent(slot2) : - arrays[i].compare(slot1, slot2); + final int cmp; + if (slot1 == CANDIDATE_SLOT) { + cmp = arrays[i].compareCurrent(slot2); + } else { + cmp = arrays[i].compare(slot1, slot2); + } if (cmp != 0) { return cmp; } @@ -140,6 +159,36 @@ int compare(int slot1, int slot2) { return 0; } + /** + * Returns true if the values in slot1 are equals to the value in slot2. + */ + boolean equals(int slot1, int slot2) { + assert slot2 != CANDIDATE_SLOT; + for (int i = 0; i < arrays.length; i++) { + final int cmp; + if (slot1 == CANDIDATE_SLOT) { + cmp = arrays[i].compareCurrent(slot2); + } else { + cmp = arrays[i].compare(slot1, slot2); + } + if (cmp != 0) { + return false; + } + } + return true; + } + + /** + * Returns a hash code value for the values in slot. + */ + int hashCode(int slot) { + int result = 1; + for (int i = 0; i < arrays.length; i++) { + result = 31 * result + (slot == CANDIDATE_SLOT ? arrays[i].hashCodeCurrent() : arrays[i].hashCode(slot)); + } + return result; + } + /** * Compares the after values with the values in slot. */ @@ -158,7 +207,7 @@ private int compareCurrentWithAfter() { */ CompositeKey toCompositeKey(int slot) throws IOException { assert slot < maxSize; - Comparable[] values = new Comparable[arrays.length]; + Comparable[] values = new Comparable[arrays.length]; for (int i = 0; i < values.length; i++) { values[i] = arrays[i].toComparable(slot); } @@ -178,7 +227,7 @@ LeafBucketCollector getLeafCollector(LeafReaderContext context, LeafBucketCollec * for each document. * The provided collector in is called on each composite bucket. */ - LeafBucketCollector getLeafCollector(Comparable forceLeadSourceValue, + LeafBucketCollector getLeafCollector(Comparable forceLeadSourceValue, LeafReaderContext context, LeafBucketCollector in) throws IOException { int last = arrays.length - 1; LeafBucketCollector collector = in; @@ -209,28 +258,28 @@ int addIfCompetitive() { // this key is greater than the top value collected in the previous round, skip it return -1; } - if (keys.size() >= maxSize) { - // the tree map is full, check if the candidate key should be kept - if (compare(CANDIDATE_SLOT, keys.lastKey()) > 0) { - // the candidate key is not competitive, skip it - return -1; - } + if (size() >= maxSize + // the tree map is full, check if the candidate key should be kept + && compare(CANDIDATE_SLOT, top()) > 0) { + // the candidate key is not competitive, skip it + return -1; } // the candidate key is competitive final int newSlot; - if (keys.size() >= maxSize) { - // the tree map is full, we replace the last key with this candidate - int slot = keys.pollLastEntry().getKey(); + if (size() >= maxSize) { + // the queue is full, we replace the last key with this candidate + int slot = pop(); + map.remove(new Slot(slot)); // and we recycle the deleted slot newSlot = slot; } else { - newSlot = keys.size(); - assert newSlot < maxSize; + newSlot = size(); } // move the candidate key to its new slot copyCurrent(newSlot); - keys.put(newSlot, newSlot); + map.put(new Slot(newSlot), newSlot); + add(newSlot); return newSlot; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java index 633d919f140cc..beb66398a6869 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java @@ -103,12 +103,30 @@ int compareCurrentWithAfter() { return compareValues(currentValue, afterValue); } + @Override + int hashCode(int slot) { + if (missingBucket && bits.get(slot) == false) { + return 0; + } else { + return Double.hashCode(values.get(slot)); + } + } + + @Override + int hashCodeCurrent() { + if (missingCurrentValue) { + return 0; + } else { + return Double.hashCode(currentValue); + } + } + private int compareValues(double v1, double v2) { return Double.compare(v1, v2) * reverseMul; } @Override - void setAfter(Comparable value) { + void setAfter(Comparable value) { if (missingBucket && value == null) { afterValue = null; } else if (value instanceof Number) { @@ -151,7 +169,7 @@ public void collect(int doc, long bucket) throws IOException { } @Override - LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) { + LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) { if (value.getClass() != Double.class) { throw new IllegalArgumentException("Expected Double, got " + value.getClass()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java index 4275d1eb83839..3d29aee19b166 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java @@ -89,7 +89,17 @@ int compareCurrentWithAfter() { } @Override - void setAfter(Comparable value) { + int hashCode(int slot) { + return Long.hashCode(values.get(slot)); + } + + @Override + int hashCodeCurrent() { + return Long.hashCode(currentValue); + } + + @Override + void setAfter(Comparable value) { if (missingBucket && value == null) { afterValue = null; afterValueGlobalOrd = -1L; @@ -138,7 +148,7 @@ public void collect(int doc, long bucket) throws IOException { } @Override - LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) throws IOException { + LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) throws IOException { if (value.getClass() != BytesRef.class) { throw new IllegalArgumentException("Expected BytesRef, got " + value.getClass()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index e93266db805dc..26fcfed9a262f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -227,7 +227,7 @@ InternalBucket next() { } static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket - implements CompositeAggregation.Bucket, KeyComparable { + implements CompositeAggregation.Bucket, KeyComparable { private final CompositeKey key; private final long docCount; @@ -341,7 +341,7 @@ public int compareKey(InternalBucket other) { } assert key.get(i).getClass() == other.key.get(i).getClass(); @SuppressWarnings("unchecked") - int cmp = ((Comparable) key.get(i)).compareTo(other.key.get(i)) * reverseMuls[i]; + int cmp = key.get(i).compareTo(other.key.get(i)) * reverseMuls[i]; if (cmp != 0) { return cmp; } @@ -392,12 +392,12 @@ static Object formatObject(Object obj, DocValueFormat format) { return obj; } - private static class ArrayMap extends AbstractMap { + static class ArrayMap extends AbstractMap implements Comparable { final List keys; + final Comparable[] values; final List formats; - final Object[] values; - ArrayMap(List keys, List formats, Object[] values) { + ArrayMap(List keys, List formats, Comparable[] values) { assert keys.size() == values.length && keys.size() == formats.size(); this.keys = keys; this.formats = formats; @@ -447,5 +447,45 @@ public int size() { } }; } + + @Override + public int compareTo(ArrayMap that) { + if (that == this) { + return 0; + } + + int idx = 0; + int max = Math.min(this.keys.size(), that.keys.size()); + while (idx < max) { + int compare = compareNullables(keys.get(idx), that.keys.get(idx)); + if (compare == 0) { + compare = compareNullables(values[idx], that.values[idx]); + } + if (compare != 0) { + return compare; + } + idx++; + } + if (idx < keys.size()) { + return 1; + } + if (idx < that.keys.size()) { + return -1; + } + return 0; + } + } + + private static int compareNullables(Comparable a, Comparable b) { + if (a == b) { + return 0; + } + if (a == null) { + return -1; + } + if (b == null) { + return 1; + } + return a.compareTo(b); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java index 8e20fe917094b..d71ed3c3bd97d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java @@ -120,12 +120,30 @@ int compareCurrentWithAfter() { return compareValues(currentValue, afterValue); } + @Override + int hashCode(int slot) { + if (missingBucket && bits.get(slot) == false) { + return 0; + } else { + return Long.hashCode(values.get(slot)); + } + } + + @Override + int hashCodeCurrent() { + if (missingCurrentValue) { + return 0; + } else { + return Long.hashCode(currentValue); + } + } + private int compareValues(long v1, long v2) { return Long.compare(v1, v2) * reverseMul; } @Override - void setAfter(Comparable value) { + void setAfter(Comparable value) { if (missingBucket && value == null) { afterValue = null; } else if (value instanceof Number) { @@ -169,7 +187,7 @@ public void collect(int doc, long bucket) throws IOException { } @Override - LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) { + LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) { if (value.getClass() != Long.class) { throw new IllegalArgumentException("Expected Long, got " + value.getClass()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index 9bf51e57df06d..d600e0d887c38 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -54,7 +54,7 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, return DocIdSet.EMPTY; } long lowerBucket = Long.MIN_VALUE; - Comparable lowerValue = queue.getLowerValueLeadSource(); + Comparable lowerValue = queue.getLowerValueLeadSource(); if (lowerValue != null) { if (lowerValue.getClass() != Long.class) { throw new IllegalStateException("expected Long, got " + lowerValue.getClass()); @@ -63,7 +63,7 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, } long upperBucket = Long.MAX_VALUE; - Comparable upperValue = queue.getUpperValueLeadSource(); + Comparable upperValue = queue.getUpperValueLeadSource(); if (upperValue != null) { if (upperValue.getClass() != Long.class) { throw new IllegalStateException("expected Long, got " + upperValue.getClass()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java index c73820fc32a78..f49e20e5bd0dc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSource.java @@ -99,10 +99,20 @@ abstract class SingleDimensionValuesSource> implements R */ abstract int compareCurrentWithAfter(); + /** + * Returns a hash code value for the provided slot. + */ + abstract int hashCode(int slot); + + /** + * Returns a hash code value for the current value. + */ + abstract int hashCodeCurrent(); + /** * Sets the after value for this source. Values that compares smaller are filtered. */ - abstract void setAfter(Comparable value); + abstract void setAfter(Comparable value); /** * Returns the after value set for this source. @@ -129,7 +139,7 @@ T getAfter() { * Creates a {@link LeafBucketCollector} that sets the current value for each document to the provided * value and invokes {@link LeafBucketCollector#collect} on the provided next collector. */ - abstract LeafBucketCollector getLeafCollector(Comparable value, + abstract LeafBucketCollector getLeafCollector(Comparable value, LeafReaderContext context, LeafBucketCollector next) throws IOException; /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java index ef2b37d9c081b..63530a4eed6ed 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/SortedDocsProducer.java @@ -51,7 +51,7 @@ abstract class SortedDocsProducer { * composite buckets. */ protected boolean processBucket(CompositeValuesCollectorQueue queue, LeafReaderContext context, DocIdSetIterator iterator, - Comparable leadSourceBucket, @Nullable DocIdSetBuilder builder) throws IOException { + Comparable leadSourceBucket, @Nullable DocIdSetBuilder builder) throws IOException { final int[] topCompositeCollected = new int[1]; final boolean[] hasCollected = new boolean[1]; final LeafBucketCollector queueCollector = new LeafBucketCollector() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java index 353f391f213d6..38469ff875365 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java @@ -157,7 +157,7 @@ public int shardSize() { if (shardSize < 0) { // Use default heuristic to avoid any wrong-ranking caused by // distributed counting - shardSize = BucketUtils.suggestShardSideQueueSize(requiredSize, context.numberOfShards() == 1); + shardSize = BucketUtils.suggestShardSideQueueSize(requiredSize); } if (requiredSize <= 0 || shardSize <= 0) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 7fe41407af4ca..09fd5877344f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -195,8 +195,7 @@ protected Aggregator doCreateInternal(ValuesSource valuesSource, Aggregator pare // each shard and as // such are impossible to differentiate from non-significant terms // at that early stage. - bucketCountThresholds.setShardSize(2 * BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize(), - context.numberOfShards() == 1)); + bucketCountThresholds.setShardSize(2 * BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); } if (valuesSource instanceof ValuesSource.Bytes) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java index a51a33defdd00..92a136960395d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorFactory.java @@ -175,8 +175,7 @@ protected Aggregator createInternal(Aggregator parent, boolean collectsFromSingl // we want to find have only one occurrence on each shard and as // such are impossible to differentiate from non-significant terms // at that early stage. - bucketCountThresholds.setShardSize(2 * BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize(), - context.numberOfShards() == 1)); + bucketCountThresholds.setShardSize(2 * BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); } // TODO - need to check with mapping that this is indeed a text field.... diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 25f552075dead..1ff0efd3e8307 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -121,8 +121,7 @@ protected Aggregator doCreateInternal(ValuesSource valuesSource, Aggregator pare // The user has not made a shardSize selection. Use default // heuristic to avoid any wrong-ranking caused by distributed // counting - bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize(), - context.numberOfShards() == 1)); + bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); } bucketCountThresholds.ensureValidity(); if (valuesSource instanceof ValuesSource.Bytes) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java index 244aa1dda3fe6..fa8a1118c88ba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java @@ -122,14 +122,6 @@ public Long precisionThreshold() { return precisionThreshold; } - /** - * @deprecated no replacement - values will always be rehashed - */ - @Deprecated - public void rehash(boolean rehash) { - // Deprecated all values are already rehashed so do nothing - } - @Override protected CardinalityAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java index 984b8509db755..bda50fb79b66e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java @@ -25,7 +25,7 @@ import java.io.IOException; -class ParsedWeightedAvg extends ParsedSingleValueNumericMetricsAggregation implements WeightedAvg { +public class ParsedWeightedAvg extends ParsedSingleValueNumericMetricsAggregation implements WeightedAvg { @Override public double getValue() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java index e98fdec992722..b639a384c7691 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -34,7 +33,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -95,22 +93,22 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext return originalAgg.create(new ArrayList<>(buckets.subList(from, Math.min(from + currentSize, bucketsCount)))); } - int queueSize = Math.min(from + currentSize, bucketsCount); - PriorityQueue ordered = new TopNPriorityQueue(queueSize); + List ordered = new ArrayList<>(); for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { ComparableBucket comparableBucket = new ComparableBucket(originalAgg, bucket); if (comparableBucket.skip() == false) { - ordered.insertWithOverflow(new ComparableBucket(originalAgg, bucket)); + ordered.add(comparableBucket); } } - int resultSize = Math.max(ordered.size() - from, 0); + Collections.sort(ordered); - // Popping from the priority queue returns the least element. The elements we want to skip due to offset would pop last. - // Thus, we just have to pop as many elements as we expect in results and store them in reverse order. - LinkedList newBuckets = new LinkedList<>(); - for (int i = 0; i < resultSize; ++i) { - newBuckets.addFirst(ordered.pop().internalBucket); + // We just have to get as many elements as we expect in results and store them in the same order starting from + // the specified offset and taking currentSize into consideration. + int limit = Math.min(from + currentSize, ordered.size()); + List newBuckets = new ArrayList<>(); + for (int i = from; i < limit; ++i) { + newBuckets.add(ordered.get(i).internalBucket); } return originalAgg.create(newBuckets); } @@ -160,11 +158,11 @@ public int compareTo(ComparableBucket that) { if (thisValue == null && thatValue == null) { continue; } else if (thisValue == null) { - return -1; - } else if (thatValue == null) { return 1; + } else if (thatValue == null) { + return -1; } else { - compareResult = sort.order() == SortOrder.DESC ? thisValue.compareTo(thatValue) : -thisValue.compareTo(thatValue); + compareResult = sort.order() == SortOrder.DESC ? -thisValue.compareTo(thatValue) : thisValue.compareTo(thatValue); } if (compareResult != 0) { break; @@ -173,17 +171,4 @@ public int compareTo(ComparableBucket that) { return compareResult; } } - - - private static class TopNPriorityQueue extends PriorityQueue { - - private TopNPriorityQueue(int n) { - super(n); - } - - @Override - protected boolean lessThan(ComparableBucket a, ComparableBucket b) { - return a.compareTo(b) < 0; - } - } } diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index a199ce3a37776..73f6aa2fbff81 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -22,6 +22,7 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -68,6 +69,9 @@ import java.util.stream.Collectors; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; +import static org.elasticsearch.search.internal.SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; +import static org.elasticsearch.search.internal.SearchContext.TRACK_TOTAL_HITS_ACCURATE; +import static org.elasticsearch.search.internal.SearchContext.TRACK_TOTAL_HITS_DISABLED; /** * A search source builder allowing to easily build search source. Simple @@ -110,7 +114,6 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R public static final ParseField SEARCH_AFTER = new ParseField("search_after"); public static final ParseField COLLAPSE = new ParseField("collapse"); public static final ParseField SLICE = new ParseField("slice"); - public static final ParseField ALL_FIELDS_FIELDS = new ParseField("all_fields"); public static SearchSourceBuilder fromXContent(XContentParser parser) throws IOException { return fromXContent(parser, true); @@ -152,7 +155,7 @@ public static HighlightBuilder highlight() { private boolean trackScores = false; - private boolean trackTotalHits = true; + private int trackTotalHitsUpTo = DEFAULT_TRACK_TOTAL_HITS_UP_TO; private SearchAfterBuilder searchAfterBuilder; @@ -249,10 +252,10 @@ public SearchSourceBuilder(StreamInput in) throws IOException { searchAfterBuilder = in.readOptionalWriteable(SearchAfterBuilder::new); sliceBuilder = in.readOptionalWriteable(SliceBuilder::new); collapse = in.readOptionalWriteable(CollapseBuilder::new); - if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) { - trackTotalHits = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + trackTotalHitsUpTo = in.readInt(); } else { - trackTotalHits = true; + trackTotalHitsUpTo = in.readBoolean() ? TRACK_TOTAL_HITS_ACCURATE : TRACK_TOTAL_HITS_DISABLED; } } @@ -312,8 +315,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(searchAfterBuilder); out.writeOptionalWriteable(sliceBuilder); out.writeOptionalWriteable(collapse); - if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) { - out.writeBoolean(trackTotalHits); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeInt(trackTotalHitsUpTo); + } else { + out.writeBoolean(trackTotalHitsUpTo > SearchContext.TRACK_TOTAL_HITS_DISABLED); } } @@ -536,11 +541,24 @@ public boolean trackScores() { * Indicates if the total hit count for the query should be tracked. */ public boolean trackTotalHits() { - return trackTotalHits; + return trackTotalHitsUpTo == TRACK_TOTAL_HITS_ACCURATE; } public SearchSourceBuilder trackTotalHits(boolean trackTotalHits) { - this.trackTotalHits = trackTotalHits; + this.trackTotalHitsUpTo = trackTotalHits ? TRACK_TOTAL_HITS_ACCURATE : TRACK_TOTAL_HITS_DISABLED; + return this; + } + + public int trackTotalHitsUpTo() { + return trackTotalHitsUpTo; + } + + public SearchSourceBuilder trackTotalHitsUpTo(int trackTotalHitsUpTo) { + if (trackTotalHitsUpTo < TRACK_TOTAL_HITS_DISABLED) { + throw new IllegalArgumentException("[track_total_hits] parameter must be positive or equals to -1, " + + "got " + trackTotalHitsUpTo); + } + this.trackTotalHitsUpTo = trackTotalHitsUpTo; return this; } @@ -979,7 +997,7 @@ private SearchSourceBuilder shallowCopy(QueryBuilder queryBuilder, QueryBuilder rewrittenBuilder.terminateAfter = terminateAfter; rewrittenBuilder.timeout = timeout; rewrittenBuilder.trackScores = trackScores; - rewrittenBuilder.trackTotalHits = trackTotalHits; + rewrittenBuilder.trackTotalHitsUpTo = trackTotalHitsUpTo; rewrittenBuilder.version = version; rewrittenBuilder.collapse = collapse; return rewrittenBuilder; @@ -1025,7 +1043,12 @@ public void parseXContent(XContentParser parser, boolean checkTrailingTokens) th } else if (TRACK_SCORES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { trackScores = parser.booleanValue(); } else if (TRACK_TOTAL_HITS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - trackTotalHits = parser.booleanValue(); + if (token == XContentParser.Token.VALUE_BOOLEAN || + (token == XContentParser.Token.VALUE_STRING && Booleans.isBoolean(parser.text()))) { + trackTotalHitsUpTo = parser.booleanValue() ? TRACK_TOTAL_HITS_ACCURATE : TRACK_TOTAL_HITS_DISABLED; + } else { + trackTotalHitsUpTo = parser.intValue(); + } } else if (_SOURCE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { fetchSourceContext = FetchSourceContext.fromXContent(parser); } else if (STORED_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -1231,8 +1254,8 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t builder.field(TRACK_SCORES_FIELD.getPreferredName(), true); } - if (trackTotalHits == false) { - builder.field(TRACK_TOTAL_HITS_FIELD.getPreferredName(), false); + if (trackTotalHitsUpTo != SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO) { + builder.field(TRACK_TOTAL_HITS_FIELD.getPreferredName(), trackTotalHitsUpTo); } if (searchAfterBuilder != null) { @@ -1500,7 +1523,7 @@ public int hashCode() { return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder, indexBoosts, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, - profile, extBuilders, collapse, trackTotalHits); + profile, extBuilders, collapse, trackTotalHitsUpTo); } @Override @@ -1538,7 +1561,7 @@ public boolean equals(Object obj) { && Objects.equals(profile, other.profile) && Objects.equals(extBuilders, other.extBuilders) && Objects.equals(collapse, other.collapse) - && Objects.equals(trackTotalHits, other.trackTotalHits); + && Objects.equals(trackTotalHitsUpTo, other.trackTotalHitsUpTo); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 4f95fcc0195c0..3a7fb9f823f3a 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -322,13 +322,13 @@ public boolean trackScores() { } @Override - public SearchContext trackTotalHits(boolean trackTotalHits) { - return in.trackTotalHits(trackTotalHits); + public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) { + return in.trackTotalHitsUpTo(trackTotalHitsUpTo); } @Override - public boolean trackTotalHits() { - return in.trackTotalHits(); + public int trackTotalHitsUpTo() { + return in.trackTotalHitsUpTo(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java index 48ab4914e386c..e78ce7f3fb194 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java +++ b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java @@ -35,9 +35,12 @@ * {@link SearchResponseSections} subclass that can be serialized over the wire. */ public class InternalSearchResponse extends SearchResponseSections implements Writeable, ToXContentFragment { - public static InternalSearchResponse empty() { - return new InternalSearchResponse(SearchHits.empty(), null, null, null, false, null, 1); + return empty(true); + } + + public static InternalSearchResponse empty(boolean withTotalHits) { + return new InternalSearchResponse(SearchHits.empty(withTotalHits), null, null, null, false, null, 1); } public InternalSearchResponse(SearchHits hits, InternalAggregations aggregations, Suggest suggest, diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 70a52c39ee110..768143dd8fb0b 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -82,6 +82,10 @@ public abstract class SearchContext extends AbstractRefCounted implements Releasable { public static final int DEFAULT_TERMINATE_AFTER = 0; + public static final int TRACK_TOTAL_HITS_ACCURATE = Integer.MAX_VALUE; + public static final int TRACK_TOTAL_HITS_DISABLED = -1; + public static final int DEFAULT_TRACK_TOTAL_HITS_UP_TO = TRACK_TOTAL_HITS_ACCURATE; + private Map> clearables = null; private final AtomicBoolean closed = new AtomicBoolean(false); private InnerHitsContext innerHitsContext; @@ -240,12 +244,13 @@ public InnerHitsContext innerHits() { public abstract boolean trackScores(); - public abstract SearchContext trackTotalHits(boolean trackTotalHits); + public abstract SearchContext trackTotalHitsUpTo(int trackTotalHits); /** - * Indicates if the total hit count for the query should be tracked. Defaults to {@code true} + * Indicates the total number of hits to count accurately. + * Defaults to {@link #DEFAULT_TRACK_TOTAL_HITS_UP_TO}. */ - public abstract boolean trackTotalHits(); + public abstract int trackTotalHitsUpTo(); public abstract SearchContext searchAfter(FieldDoc searchAfter); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java index 72a12b805eb17..0921681124e33 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -76,8 +77,8 @@ public class ShardSearchLocalRequest implements ShardSearchRequest { ShardSearchLocalRequest() { } - ShardSearchLocalRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards, - AliasFilter aliasFilter, float indexBoost, long nowInMillis, String clusterAlias, String[] indexRoutings) { + ShardSearchLocalRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards, AliasFilter aliasFilter, float indexBoost, + long nowInMillis, @Nullable String clusterAlias, String[] indexRoutings) { this(shardId, numberOfShards, searchRequest.searchType(), searchRequest.source(), searchRequest.types(), searchRequest.requestCache(), aliasFilter, indexBoost, searchRequest.allowPartialSearchResults(), indexRoutings, searchRequest.preference()); @@ -113,7 +114,6 @@ public ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType s this.preference = preference; } - @Override public ShardId shardId() { return shardId; diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index 862c1a6960bed..3fc16584eb0bf 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.Index; @@ -151,9 +152,9 @@ static QueryBuilder parseAliasFilter(CheckedFunctionnull if the request if targeted to the local - * cluster. + * Returns the cluster alias in case the request is part of a cross-cluster search request, null otherwise. */ + @Nullable String getClusterAlias(); Rewriteable getRewriteable(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java index e7aad0bd51786..59d1c2e089e02 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -51,12 +52,9 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha private ShardSearchLocalRequest shardSearchLocalRequest; - public ShardSearchTransportRequest(){ - } - public ShardSearchTransportRequest(OriginalIndices originalIndices, SearchRequest searchRequest, ShardId shardId, int numberOfShards, AliasFilter aliasFilter, float indexBoost, long nowInMillis, - String clusterAlias, String[] indexRoutings) { + @Nullable String clusterAlias, String[] indexRoutings) { this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardId, numberOfShards, aliasFilter, indexBoost, nowInMillis, clusterAlias, indexRoutings); this.originalIndices = originalIndices; diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 10fc6a648af66..9ccde2468227a 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -166,7 +166,6 @@ static boolean execute(SearchContext searchContext, } // ... and stop collecting after ${size} matches searchContext.terminateAfter(searchContext.size()); - searchContext.trackTotalHits(false); } else if (canEarlyTerminate(reader, searchContext.sort())) { // now this gets interesting: since the search sort is a prefix of the index sort, we can directly // skip to the desired doc @@ -177,7 +176,6 @@ static boolean execute(SearchContext searchContext, .build(); query = bq; } - searchContext.trackTotalHits(false); } } } diff --git a/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java b/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java index fcf70a4f98c05..2314d11e7e387 100644 --- a/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java +++ b/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; @@ -92,27 +93,32 @@ static class EmptyTopDocsCollectorContext extends TopDocsCollectorContext { * Ctr * @param reader The index reader * @param query The query to execute - * @param trackTotalHits True if the total number of hits should be tracked + * @param trackTotalHitsUpTo True if the total number of hits should be tracked * @param hasFilterCollector True if the collector chain contains a filter */ private EmptyTopDocsCollectorContext(IndexReader reader, Query query, - boolean trackTotalHits, boolean hasFilterCollector) throws IOException { + int trackTotalHitsUpTo, boolean hasFilterCollector) throws IOException { super(REASON_SEARCH_COUNT, 0); - if (trackTotalHits) { + if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) { + this.collector = new EarlyTerminatingCollector(new TotalHitCountCollector(), 0, false); + // for bwc hit count is set to 0, it will be converted to -1 by the coordinating node + this.hitCountSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); + } else { TotalHitCountCollector hitCountCollector = new TotalHitCountCollector(); // implicit total hit counts are valid only when there is no filter collector in the chain int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query); if (hitCount == -1) { - this.collector = hitCountCollector; - this.hitCountSupplier = () -> new TotalHits(hitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO); + if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_ACCURATE) { + this.collector = hitCountCollector; + this.hitCountSupplier = () -> new TotalHits(hitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO); + } else { + this.collector = new EarlyTerminatingCollector(hitCountCollector, trackTotalHitsUpTo, false); + this.hitCountSupplier = () -> new TotalHits(hitCount, TotalHits.Relation.EQUAL_TO); + } } else { this.collector = new EarlyTerminatingCollector(hitCountCollector, 0, false); this.hitCountSupplier = () -> new TotalHits(hitCount, TotalHits.Relation.EQUAL_TO); } - } else { - this.collector = new EarlyTerminatingCollector(new TotalHitCountCollector(), 0, false); - // for bwc hit count is set to 0, it will be converted to -1 by the coordinating node - this.hitCountSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } } @@ -184,7 +190,7 @@ private static TopDocsCollector createCollector(@Nullable SortAndFormats sort } } - private final @Nullable SortAndFormats sortAndFormats; + protected final @Nullable SortAndFormats sortAndFormats; private final Collector collector; private final Supplier totalHitsSupplier; private final Supplier topDocsSupplier; @@ -198,7 +204,7 @@ private static TopDocsCollector createCollector(@Nullable SortAndFormats sort * @param numHits The number of top hits to retrieve * @param searchAfter The doc this request should "search after" * @param trackMaxScore True if max score should be tracked - * @param trackTotalHits True if the total number of hits should be tracked + * @param trackTotalHitsUpTo True if the total number of hits should be tracked * @param hasFilterCollector True if the collector chain contains at least one collector that can filters document */ private SimpleTopDocsCollectorContext(IndexReader reader, @@ -207,25 +213,28 @@ private SimpleTopDocsCollectorContext(IndexReader reader, @Nullable ScoreDoc searchAfter, int numHits, boolean trackMaxScore, - boolean trackTotalHits, + int trackTotalHitsUpTo, boolean hasFilterCollector) throws IOException { super(REASON_SEARCH_TOP_HITS, numHits); this.sortAndFormats = sortAndFormats; - // implicit total hit counts are valid only when there is no filter collector in the chain - final int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query); final TopDocsCollector topDocsCollector; - if (hitCount == -1 && trackTotalHits) { - topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, Integer.MAX_VALUE); + if (trackTotalHitsUpTo == SearchContext.TRACK_TOTAL_HITS_DISABLED) { + // don't compute hit counts via the collector + topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, 1); topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs); - totalHitsSupplier = () -> topDocsSupplier.get().totalHits; + totalHitsSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } else { - topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, 1); // don't compute hit counts via the collector - topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs); + // implicit total hit counts are valid only when there is no filter collector in the chain + final int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query); if (hitCount == -1) { - assert trackTotalHits == false; - totalHitsSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); + topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, trackTotalHitsUpTo); + topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs); + totalHitsSupplier = () -> topDocsSupplier.get().totalHits; } else { + // don't compute hit counts via the collector + topDocsCollector = createCollector(sortAndFormats, numHits, searchAfter, 1); + topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs); totalHitsSupplier = () -> new TotalHits(hitCount, TotalHits.Relation.EQUAL_TO); } } @@ -254,11 +263,23 @@ Collector create(Collector in) { return collector; } + TopDocsAndMaxScore newTopDocs() { + TopDocs in = topDocsSupplier.get(); + float maxScore = maxScoreSupplier.get(); + final TopDocs newTopDocs; + if (in instanceof TopFieldDocs) { + TopFieldDocs fieldDocs = (TopFieldDocs) in; + newTopDocs = new TopFieldDocs(totalHitsSupplier.get(), fieldDocs.scoreDocs, fieldDocs.fields); + } else { + newTopDocs = new TopDocs(totalHitsSupplier.get(), in.scoreDocs); + } + return new TopDocsAndMaxScore(newTopDocs, maxScore); + } + @Override void postProcess(QuerySearchResult result) throws IOException { - final TopDocs topDocs = topDocsSupplier.get(); - topDocs.totalHits = totalHitsSupplier.get(); - result.topDocs(new TopDocsAndMaxScore(topDocs, maxScoreSupplier.get()), sortAndFormats == null ? null : sortAndFormats.formats); + final TopDocsAndMaxScore topDocs = newTopDocs(); + result.topDocs(topDocs, sortAndFormats == null ? null : sortAndFormats.formats); } } @@ -273,18 +294,17 @@ private ScrollingTopDocsCollectorContext(IndexReader reader, int numHits, boolean trackMaxScore, int numberOfShards, - boolean trackTotalHits, + int trackTotalHitsUpTo, boolean hasFilterCollector) throws IOException { super(reader, query, sortAndFormats, scrollContext.lastEmittedDoc, numHits, trackMaxScore, - trackTotalHits, hasFilterCollector); + trackTotalHitsUpTo, hasFilterCollector); this.scrollContext = Objects.requireNonNull(scrollContext); this.numberOfShards = numberOfShards; } @Override void postProcess(QuerySearchResult result) throws IOException { - super.postProcess(result); - final TopDocsAndMaxScore topDocs = result.topDocs(); + final TopDocsAndMaxScore topDocs = newTopDocs(); if (scrollContext.totalHits == null) { // first round scrollContext.totalHits = topDocs.topDocs.totalHits; @@ -302,7 +322,7 @@ void postProcess(QuerySearchResult result) throws IOException { scrollContext.lastEmittedDoc = topDocs.topDocs.scoreDocs[topDocs.topDocs.scoreDocs.length - 1]; } } - result.topDocs(topDocs, result.sortValueFormats()); + result.topDocs(topDocs, sortAndFormats == null ? null : sortAndFormats.formats); } } @@ -351,13 +371,17 @@ static TopDocsCollectorContext createTopDocsCollectorContext(SearchContext searc final int totalNumDocs = Math.max(1, reader.numDocs()); if (searchContext.size() == 0) { // no matter what the value of from is - return new EmptyTopDocsCollectorContext(reader, query, searchContext.trackTotalHits(), hasFilterCollector); + return new EmptyTopDocsCollectorContext(reader, query, searchContext.trackTotalHitsUpTo(), hasFilterCollector); } else if (searchContext.scrollContext() != null) { + // we can disable the tracking of total hits after the initial scroll query + // since the total hits is preserved in the scroll context. + int trackTotalHitsUpTo = searchContext.scrollContext().totalHits != null ? + SearchContext.TRACK_TOTAL_HITS_DISABLED : SearchContext.TRACK_TOTAL_HITS_ACCURATE; // no matter what the value of from is int numDocs = Math.min(searchContext.size(), totalNumDocs); return new ScrollingTopDocsCollectorContext(reader, query, searchContext.scrollContext(), searchContext.sort(), numDocs, searchContext.trackScores(), searchContext.numberOfShards(), - searchContext.trackTotalHits(), hasFilterCollector); + trackTotalHitsUpTo, hasFilterCollector); } else if (searchContext.collapse() != null) { boolean trackScores = searchContext.sort() == null ? true : searchContext.trackScores(); int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs); @@ -372,7 +396,7 @@ static TopDocsCollectorContext createTopDocsCollectorContext(SearchContext searc } } return new SimpleTopDocsCollectorContext(reader, query, searchContext.sort(), searchContext.searchAfter(), numDocs, - searchContext.trackScores(), searchContext.trackTotalHits(), hasFilterCollector) { + searchContext.trackScores(), searchContext.trackTotalHitsUpTo(), hasFilterCollector) { @Override boolean shouldRescore() { return rescore; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java index c4a6f15e03ced..4ca2483c6aed5 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/Laplace.java @@ -119,7 +119,7 @@ public static SmoothingModel fromXContent(XContentParser parser) throws IOExcept @Override public WordScorerFactory buildWordScorerFactory() { - return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) - -> new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha); + return (IndexReader reader, Terms terms, String field, double realWordLikelihood, BytesRef separator) + -> new LaplaceScorer(reader, terms, field, realWordLikelihood, separator, alpha); } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java index 52157a0fe8bde..25539b00163b1 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java @@ -29,8 +29,8 @@ final class LaplaceScorer extends WordScorer { private double alpha; LaplaceScorer(IndexReader reader, Terms terms, String field, - double realWordLikelyhood, BytesRef separator, double alpha) throws IOException { - super(reader, terms, field, realWordLikelyhood, separator); + double realWordLikelihood, BytesRef separator, double alpha) throws IOException { + super(reader, terms, field, realWordLikelihood, separator); this.alpha = alpha; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java index b0c9552f8a8d6..aca226ea41671 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java @@ -32,9 +32,9 @@ public final class LinearInterpolatingScorer extends WordScorer { private final double bigramLambda; private final double trigramLambda; - public LinearInterpolatingScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator, + public LinearInterpolatingScorer(IndexReader reader, Terms terms, String field, double realWordLikelihood, BytesRef separator, double trigramLambda, double bigramLambda, double unigramLambda) throws IOException { - super(reader, terms, field, realWordLikelyhood, separator); + super(reader, terms, field, realWordLikelihood, separator); double sum = unigramLambda + bigramLambda + trigramLambda; this.unigramLambda = unigramLambda / sum; this.bigramLambda = bigramLambda / sum; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java index e609be1d77c18..d38b34f50eeb7 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolation.java @@ -168,8 +168,8 @@ public static LinearInterpolation fromXContent(XContentParser parser) throws IOE @Override public WordScorerFactory buildWordScorerFactory() { - return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) -> - new LinearInterpolatingScorer(reader, terms, field, realWordLikelyhood, separator, trigramLambda, bigramLambda, + return (IndexReader reader, Terms terms, String field, double realWordLikelihood, BytesRef separator) -> + new LinearInterpolatingScorer(reader, terms, field, realWordLikelihood, separator, trigramLambda, bigramLambda, unigramLambda); } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index 635fa64c59b53..7f225f1c3ea73 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -38,14 +38,14 @@ //TODO public for tests public final class NoisyChannelSpellChecker { - public static final double REAL_WORD_LIKELYHOOD = 0.95d; + public static final double REAL_WORD_LIKELIHOOD = 0.95d; public static final int DEFAULT_TOKEN_LIMIT = 10; private final double realWordLikelihood; private final boolean requireUnigram; private final int tokenLimit; public NoisyChannelSpellChecker() { - this(REAL_WORD_LIKELYHOOD); + this(REAL_WORD_LIKELIHOOD); } public NoisyChannelSpellChecker(double nonErrorLikelihood) { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index 10112ad2f43dd..413afd155d45b 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -69,7 +69,7 @@ private PhraseSuggester() {} @Override public Suggestion> innerExecute(String name, PhraseSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { - double realWordErrorLikelihood = suggestion.realworldErrorLikelyhood(); + double realWordErrorLikelihood = suggestion.realworldErrorLikelihood(); final PhraseSuggestion response = new PhraseSuggestion(name, suggestion.getSize()); final IndexReader indexReader = searcher.getIndexReader(); List generators = suggestion.generators(); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java index 4fd37d01ca5ee..84987f30e87f7 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java @@ -40,8 +40,8 @@ class PhraseSuggestionContext extends SuggestionContext { static final float DEFAULT_RWE_ERRORLIKELIHOOD = 0.95f; static final float DEFAULT_MAX_ERRORS = 0.5f; static final String DEFAULT_SEPARATOR = " "; - static final WordScorer.WordScorerFactory DEFAULT_SCORER = (IndexReader reader, Terms terms, String field, double realWordLikelyhood, - BytesRef separator) -> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, 0.4f); + static final WordScorer.WordScorerFactory DEFAULT_SCORER = (IndexReader reader, Terms terms, String field, double realWordLikelihood, + BytesRef separator) -> new StupidBackoffScorer(reader, terms, field, realWordLikelihood, separator, 0.4f); private float maxErrors = DEFAULT_MAX_ERRORS; private BytesRef separator = new BytesRef(DEFAULT_SEPARATOR); @@ -78,7 +78,7 @@ public void setSeparator(BytesRef separator) { this.separator = separator; } - public Float realworldErrorLikelyhood() { + public Float realworldErrorLikelihood() { return realworldErrorLikelihood; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java index c7edde8bbaf76..29d03890b2791 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoff.java @@ -122,7 +122,7 @@ public static SmoothingModel fromXContent(XContentParser parser) throws IOExcept @Override public WordScorerFactory buildWordScorerFactory() { - return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) - -> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount); + return (IndexReader reader, Terms terms, String field, double realWordLikelihood, BytesRef separator) + -> new StupidBackoffScorer(reader, terms, field, realWordLikelihood, separator, discount); } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java index d6862f384bebf..54493acf8a592 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java @@ -29,8 +29,8 @@ class StupidBackoffScorer extends WordScorer { private final double discount; StupidBackoffScorer(IndexReader reader, Terms terms,String field, - double realWordLikelyhood, BytesRef separator, double discount) throws IOException { - super(reader, terms, field, realWordLikelyhood, separator); + double realWordLikelihood, BytesRef separator, double discount) throws IOException { + super(reader, terms, field, realWordLikelihood, separator); this.discount = discount; } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java index b13f33f76394b..cc4a44590e79e 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/WordScorer.java @@ -37,18 +37,18 @@ public abstract class WordScorer { protected final String field; protected final Terms terms; protected final long vocabluarySize; - protected final double realWordLikelyhood; + protected final double realWordLikelihood; protected final BytesRefBuilder spare = new BytesRefBuilder(); protected final BytesRef separator; protected final long numTerms; private final TermsEnum termsEnum; private final boolean useTotalTermFreq; - public WordScorer(IndexReader reader, String field, double realWordLikelyHood, BytesRef separator) throws IOException { - this(reader, MultiTerms.getTerms(reader, field), field, realWordLikelyHood, separator); + public WordScorer(IndexReader reader, String field, double realWordLikelihood, BytesRef separator) throws IOException { + this(reader, MultiTerms.getTerms(reader, field), field, realWordLikelihood, separator); } - public WordScorer(IndexReader reader, Terms terms, String field, double realWordLikelyHood, BytesRef separator) throws IOException { + public WordScorer(IndexReader reader, Terms terms, String field, double realWordLikelihood, BytesRef separator) throws IOException { this.field = field; if (terms == null) { throw new IllegalArgumentException("Field: [" + field + "] does not exist"); @@ -65,7 +65,7 @@ public WordScorer(IndexReader reader, Terms terms, String field, double realWord this.termsEnum = new FreqTermsEnum(reader, field, !useTotalTermFreq, useTotalTermFreq, null, BigArrays.NON_RECYCLING_INSTANCE); // non recycling for now this.reader = reader; - this.realWordLikelyhood = realWordLikelyHood; + this.realWordLikelihood = realWordLikelihood; this.separator = separator; } @@ -78,7 +78,7 @@ public long frequency(BytesRef term) throws IOException { protected double channelScore(Candidate candidate, Candidate original) throws IOException { if (candidate.stringDistance == 1.0d) { - return realWordLikelyhood; + return realWordLikelihood; } return candidate.stringDistance; } @@ -117,6 +117,6 @@ public static BytesRef join(BytesRef separator, BytesRefBuilder result, BytesRef public interface WordScorerFactory { WordScorer newScorer(IndexReader reader, Terms terms, - String field, double realWordLikelyhood, BytesRef separator) throws IOException; + String field, double realWordLikelihood, BytesRef separator) throws IOException; } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotException.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotException.java index d389ed634f3af..05db85d6f7211 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotException.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotException.java @@ -51,10 +51,6 @@ public SnapshotException(final Snapshot snapshot, final String msg, final Throwa } } - public SnapshotException(final String repositoryName, final SnapshotId snapshotId, final String msg) { - this(repositoryName, snapshotId, msg, null); - } - public SnapshotException(final String repositoryName, final SnapshotId snapshotId, final String msg, final Throwable cause) { super("[" + repositoryName + ":" + snapshotId + "] " + msg, cause); this.repositoryName = repositoryName; diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java index 7a8848618c25c..59e1d960bcbfc 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java @@ -131,25 +131,17 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public static SnapshotId fromXContent(XContentParser parser) throws IOException { - // the new format from 5.0 which contains the snapshot name and uuid - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - String name = null; - String uuid = null; - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - String currentFieldName = parser.currentName(); - parser.nextToken(); - if (NAME.equals(currentFieldName)) { - name = parser.text(); - } else if (UUID.equals(currentFieldName)) { - uuid = parser.text(); - } + String name = null; + String uuid = null; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String currentFieldName = parser.currentName(); + parser.nextToken(); + if (NAME.equals(currentFieldName)) { + name = parser.text(); + } else if (UUID.equals(currentFieldName)) { + uuid = parser.text(); } - return new SnapshotId(name, uuid); - } else { - // the old format pre 5.0 that only contains the snapshot name, use the name as the uuid too - final String name = parser.text(); - return new SnapshotId(name, name); } + return new SnapshotId(name, uuid); } - } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 40c89f10ccbc5..c0e196f1f4eb3 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -94,7 +94,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements ClusterStateListener, IndexEventListener { private static final Logger logger = LogManager.getLogger(SnapshotShardsService.class); - public static final String UPDATE_SNAPSHOT_STATUS_ACTION_NAME = "internal:cluster/snapshot/update_snapshot_status"; + private static final String UPDATE_SNAPSHOT_STATUS_ACTION_NAME = "internal:cluster/snapshot/update_snapshot_status"; private final ClusterService clusterService; diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 8c505d20d17ff..65802377be032 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -515,7 +515,7 @@ public void onFailure(Exception e) { cleanupAfterError(e); } - public void onNoLongerMaster(String source) { + public void onNoLongerMaster() { userCreateSnapshotListener.onFailure(e); } @@ -1073,7 +1073,7 @@ public void onFailure(String source, Exception e) { @Override public void onNoLongerMaster(String source) { if (listener != null) { - listener.onNoLongerMaster(source); + listener.onNoLongerMaster(); } } @@ -1423,8 +1423,6 @@ private ImmutableOpenMap shard builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "primary shard is not allocated")); } else if (primary.relocating() || primary.initializing()) { - // The WAITING state was introduced in V1.2.0 - - // don't use it if there are nodes with older version in the cluster builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(primary.currentNodeId(), State.WAITING)); } else if (!primary.started()) { builder.put(shardId, diff --git a/server/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java index 45f53006ecd59..bc17d52bcc236 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java @@ -75,8 +75,12 @@ public final class AutoQueueAdjustingExecutorBuilder extends ExecutorBuilder( minSizeKey, Integer.toString(minQueueSize), - (s) -> Setting.parseInt(s, 0, minSizeKey), + s -> Setting.parseInt(s, 0, minSizeKey), new Setting.Validator() { + @Override + public void validate(Integer value) { + } + @Override public void validate(Integer value, Map, Integer> settings) { if (value > settings.get(tempMaxQueueSizeSetting)) { @@ -94,8 +98,12 @@ public Iterator> settings() { this.maxQueueSizeSetting = new Setting<>( maxSizeKey, Integer.toString(maxQueueSize), - (s) -> Setting.parseInt(s, 0, maxSizeKey), + s -> Setting.parseInt(s, 0, maxSizeKey), new Setting.Validator() { + @Override + public void validate(Integer value) { + } + @Override public void validate(Integer value, Map, Integer> settings) { if (value < settings.get(tempMinQueueSizeSetting)) { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index bae801492e14f..e4eaf20725b63 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -189,9 +189,11 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui builders.put(Names.REFRESH, new ScalingExecutorBuilder(Names.REFRESH, 1, halfProcMaxAt10, TimeValue.timeValueMinutes(5))); builders.put(Names.WARMER, new ScalingExecutorBuilder(Names.WARMER, 1, halfProcMaxAt5, TimeValue.timeValueMinutes(5))); builders.put(Names.SNAPSHOT, new ScalingExecutorBuilder(Names.SNAPSHOT, 1, halfProcMaxAt5, TimeValue.timeValueMinutes(5))); - builders.put(Names.FETCH_SHARD_STARTED, new ScalingExecutorBuilder(Names.FETCH_SHARD_STARTED, 1, 2 * availableProcessors, TimeValue.timeValueMinutes(5))); + builders.put(Names.FETCH_SHARD_STARTED, + new ScalingExecutorBuilder(Names.FETCH_SHARD_STARTED, 1, 2 * availableProcessors, TimeValue.timeValueMinutes(5))); builders.put(Names.FORCE_MERGE, new FixedExecutorBuilder(settings, Names.FORCE_MERGE, 1, -1)); - builders.put(Names.FETCH_SHARD_STORE, new ScalingExecutorBuilder(Names.FETCH_SHARD_STORE, 1, 2 * availableProcessors, TimeValue.timeValueMinutes(5))); + builders.put(Names.FETCH_SHARD_STORE, + new ScalingExecutorBuilder(Names.FETCH_SHARD_STORE, 1, 2 * availableProcessors, TimeValue.timeValueMinutes(5))); for (final ExecutorBuilder builder : customBuilders) { if (builders.containsKey(builder.name())) { throw new IllegalArgumentException("builder with name [" + builder.name() + "] already exists"); @@ -335,9 +337,9 @@ public ExecutorService executor(String name) { * it to this method. * * @param delay delay before the task executes - * @param executor the name of the thread pool on which to execute this task. SAME means "execute on the scheduler thread" which changes the - * meaning of the ScheduledFuture returned by this method. In that case the ScheduledFuture will complete only when the command - * completes. + * @param executor the name of the thread pool on which to execute this task. SAME means "execute on the scheduler thread" which changes + * the meaning of the ScheduledFuture returned by this method. In that case the ScheduledFuture will complete only when the + * command completes. * @param command the command to run * @return a ScheduledFuture who's get will return when the task is has been added to its target thread pool and throw an exception if * the task is canceled before it was added to its target thread pool. Once the task has been added to its target thread pool diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index 237e73e572ae3..9b9243b612b74 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -121,7 +121,6 @@ public String getKey(final String key) { if (Strings.hasLength(s)) { parsePort(s); } - return s; }, Setting.Property.Deprecated, Setting.Property.Dynamic, @@ -346,7 +345,7 @@ private static int indexOfPortSeparator(String remoteHost) { } public static String buildRemoteIndexName(String clusterAlias, String indexName) { - return clusterAlias != null ? clusterAlias + REMOTE_CLUSTER_INDEX_SEPARATOR + indexName : indexName; + return clusterAlias == null || LOCAL_CLUSTER_GROUP_KEY.equals(clusterAlias) + ? indexName : clusterAlias + REMOTE_CLUSTER_INDEX_SEPARATOR + indexName; } - } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index cb802f13fdb50..ff9dea8fe45a7 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -174,13 +174,13 @@ public String getKey(final String key) { public static final Setting.AffixSetting REMOTE_CLUSTER_PING_SCHEDULE = Setting.affixKeySetting( "cluster.remote.", "transport.ping_schedule", - key -> timeSetting(key, TransportSettings.PING_SCHEDULE, Setting.Property.NodeScope), + key -> timeSetting(key, TransportSettings.PING_SCHEDULE, Setting.Property.Dynamic, Setting.Property.NodeScope), REMOTE_CLUSTERS_SEEDS); public static final Setting.AffixSetting REMOTE_CLUSTER_COMPRESS = Setting.affixKeySetting( "cluster.remote.", "transport.compress", - key -> boolSetting(key, TransportSettings.TRANSPORT_COMPRESS, Setting.Property.NodeScope), + key -> boolSetting(key, TransportSettings.TRANSPORT_COMPRESS, Setting.Property.Dynamic, Setting.Property.NodeScope), REMOTE_CLUSTERS_SEEDS); private static final Predicate DEFAULT_NODE_PREDICATE = (node) -> Version.CURRENT.isCompatible(node.getVersion()) diff --git a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java b/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java index 6187fc1f7f6d9..c0891b4c755a0 100644 --- a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java +++ b/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java @@ -60,7 +60,7 @@ public void testQuery() throws Exception { IndexReader ir = DirectoryReader.open(iw); IndexSearcher searcher = new IndexSearcher(ir); - Query query = new SpanMatchNoDocsQuery("unkwown", "field not found"); + Query query = new SpanMatchNoDocsQuery("unknown", "field not found"); assertEquals(searcher.count(query), 0); ScoreDoc[] hits; diff --git a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java index 6414c81058bec..1d4d83457b20e 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionListenerTests.java @@ -23,9 +23,12 @@ import java.util.ArrayList; import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.equalTo; + public class ActionListenerTests extends ESTestCase { public void testWrap() { @@ -148,4 +151,54 @@ public void testOnFailure() { assertEquals("listener index " + i, "booom", excList.get(i).get().getMessage()); } } + + public void testRunAfter() { + { + AtomicBoolean afterSuccess = new AtomicBoolean(); + ActionListener listener = ActionListener.runAfter(ActionListener.wrap(r -> {}, e -> {}), () -> afterSuccess.set(true)); + listener.onResponse(null); + assertThat(afterSuccess.get(), equalTo(true)); + } + { + AtomicBoolean afterFailure = new AtomicBoolean(); + ActionListener listener = ActionListener.runAfter(ActionListener.wrap(r -> {}, e -> {}), () -> afterFailure.set(true)); + listener.onFailure(null); + assertThat(afterFailure.get(), equalTo(true)); + } + } + + public void testNotifyOnce() { + AtomicInteger onResponseTimes = new AtomicInteger(); + AtomicInteger onFailureTimes = new AtomicInteger(); + ActionListener listener = ActionListener.notifyOnce(new ActionListener() { + @Override + public void onResponse(Object o) { + onResponseTimes.getAndIncrement(); + } + @Override + public void onFailure(Exception e) { + onFailureTimes.getAndIncrement(); + } + }); + boolean success = randomBoolean(); + if (success) { + listener.onResponse(null); + } else { + listener.onFailure(new RuntimeException("test")); + } + for (int iters = between(0, 10), i = 0; i < iters; i++) { + if (randomBoolean()) { + listener.onResponse(null); + } else { + listener.onFailure(new RuntimeException("test")); + } + } + if (success) { + assertThat(onResponseTimes.get(), equalTo(1)); + assertThat(onFailureTimes.get(), equalTo(0)); + } else { + assertThat(onResponseTimes.get(), equalTo(0)); + assertThat(onFailureTimes.get(), equalTo(1)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/GetDiscoveredNodesRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/GetDiscoveredNodesRequestTests.java index f15e0af1740f6..676e2e958cac7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/GetDiscoveredNodesRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/GetDiscoveredNodesRequestTests.java @@ -31,20 +31,6 @@ public class GetDiscoveredNodesRequestTests extends ESTestCase { - public void testWaitForNodesValidation() { - final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); - assertThat("default value is 1", getDiscoveredNodesRequest.getWaitForNodes(), is(1)); - - final int newWaitForNodes = randomIntBetween(1, 10); - getDiscoveredNodesRequest.setWaitForNodes(newWaitForNodes); - assertThat("value updated", getDiscoveredNodesRequest.getWaitForNodes(), is(newWaitForNodes)); - - final IllegalArgumentException exception - = expectThrows(IllegalArgumentException.class, () -> getDiscoveredNodesRequest.setWaitForNodes(randomIntBetween(-10, 0))); - assertThat(exception.getMessage(), startsWith("always finds at least one node, waiting for ")); - assertThat(exception.getMessage(), endsWith(" is not allowed")); - } - public void testTimeoutValidation() { final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); assertThat("default value is 30s", getDiscoveredNodesRequest.getTimeout(), is(TimeValue.timeValueSeconds(30))); @@ -65,10 +51,6 @@ public void testTimeoutValidation() { public void testSerialization() throws IOException { final GetDiscoveredNodesRequest originalRequest = new GetDiscoveredNodesRequest(); - if (randomBoolean()) { - originalRequest.setWaitForNodes(randomIntBetween(1, 10)); - } - if (randomBoolean()) { originalRequest.setTimeout(TimeValue.parseTimeValue(randomTimeValue(), "timeout")); } else if (randomBoolean()) { @@ -77,7 +59,6 @@ public void testSerialization() throws IOException { final GetDiscoveredNodesRequest deserialized = copyWriteable(originalRequest, writableRegistry(), GetDiscoveredNodesRequest::new); - assertThat(deserialized.getWaitForNodes(), equalTo(originalRequest.getWaitForNodes())); assertThat(deserialized.getTimeout(), equalTo(originalRequest.getTimeout())); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportBootstrapClusterActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportBootstrapClusterActionTests.java index cf814482f6da0..31486a52bd08f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportBootstrapClusterActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportBootstrapClusterActionTests.java @@ -47,6 +47,7 @@ import org.junit.BeforeClass; import java.io.IOException; +import java.util.Collections; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -96,7 +97,7 @@ public void setupTest() { ESAllocationTestCase.createAllocationService(Settings.EMPTY), new MasterService("local", Settings.EMPTY, threadPool), () -> new InMemoryPersistedState(0, ClusterState.builder(new ClusterName("cluster")).build()), r -> emptyList(), - new NoOpClusterApplier(), new Random(random().nextLong())); + new NoOpClusterApplier(), Collections.emptyList(), new Random(random().nextLong())); } public void testHandlesNonstandardDiscoveryImplementation() throws InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportGetDiscoveredNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportGetDiscoveredNodesActionTests.java index add52a1eedc96..6d94dcf6eca14 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportGetDiscoveredNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/bootstrap/TransportGetDiscoveredNodesActionTests.java @@ -72,6 +72,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.singleton; +import static java.util.Collections.singletonList; import static org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING; import static org.elasticsearch.discovery.PeerFinder.REQUEST_PEERS_ACTION_NAME; import static org.elasticsearch.transport.TransportService.HANDSHAKE_ACTION_NAME; @@ -132,7 +133,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req ESAllocationTestCase.createAllocationService(settings), new MasterService("local", settings, threadPool), () -> new InMemoryPersistedState(0, ClusterState.builder(new ClusterName(clusterName)).build()), r -> emptyList(), - new NoOpClusterApplier(), new Random(random().nextLong())); + new NoOpClusterApplier(), Collections.emptyList(), new Random(random().nextLong())); } public void testHandlesNonstandardDiscoveryImplementation() throws InterruptedException { @@ -204,8 +205,8 @@ public void testFailsQuicklyWithZeroTimeoutAndAcceptsNullTimeout() throws Interr { final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); - getDiscoveredNodesRequest.setWaitForNodes(2); getDiscoveredNodesRequest.setTimeout(null); + getDiscoveredNodesRequest.setRequiredNodes(singletonList("not-a-node")); transportService.sendRequest(localNode, GetDiscoveredNodesAction.NAME, getDiscoveredNodesRequest, new ResponseHandler() { @Override public void handleResponse(GetDiscoveredNodesResponse response) { @@ -221,8 +222,8 @@ public void handleException(TransportException exp) { { final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); - getDiscoveredNodesRequest.setWaitForNodes(2); getDiscoveredNodesRequest.setTimeout(TimeValue.ZERO); + getDiscoveredNodesRequest.setRequiredNodes(singletonList("not-a-node")); final CountDownLatch countDownLatch = new CountDownLatch(1); transportService.sendRequest(localNode, GetDiscoveredNodesAction.NAME, getDiscoveredNodesRequest, new ResponseHandler() { @@ -254,7 +255,6 @@ public void testFailsIfAlreadyBootstrapped() throws InterruptedException { final CountDownLatch countDownLatch = new CountDownLatch(1); final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); - getDiscoveredNodesRequest.setWaitForNodes(2); getDiscoveredNodesRequest.setTimeout(null); transportService.sendRequest(localNode, GetDiscoveredNodesAction.NAME, getDiscoveredNodesRequest, new ResponseHandler() { @Override @@ -283,8 +283,8 @@ public void testFailsIfAcceptsClusterStateWithNonemptyConfiguration() throws Int final CountDownLatch countDownLatch = new CountDownLatch(1); final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); - getDiscoveredNodesRequest.setWaitForNodes(3); getDiscoveredNodesRequest.setTimeout(null); + getDiscoveredNodesRequest.setRequiredNodes(singletonList("not-a-node")); transportService.sendRequest(localNode, GetDiscoveredNodesAction.NAME, getDiscoveredNodesRequest, new ResponseHandler() { @Override public void handleResponse(GetDiscoveredNodesResponse response) { @@ -342,7 +342,6 @@ public PublishWithJoinResponse read(StreamInput in) throws IOException { public void testGetsDiscoveredNodesWithZeroTimeout() throws InterruptedException { setupGetDiscoveredNodesAction(); final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); - getDiscoveredNodesRequest.setWaitForNodes(2); getDiscoveredNodesRequest.setTimeout(TimeValue.ZERO); assertWaitConditionMet(getDiscoveredNodesRequest); } @@ -377,7 +376,6 @@ public void testGetsDiscoveredNodesDuplicateName() throws InterruptedException { final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); String name = localNode.getName(); getDiscoveredNodesRequest.setRequiredNodes(Arrays.asList(name, name)); - getDiscoveredNodesRequest.setWaitForNodes(1); getDiscoveredNodesRequest.setTimeout(TimeValue.ZERO); assertWaitConditionFailedOnDuplicate(getDiscoveredNodesRequest, "[" + localNode + "] matches [" + name + ", " + name + ']'); } @@ -396,7 +394,6 @@ public void testGetsDiscoveredNodesTimeoutOnMissing() throws InterruptedExceptio final CountDownLatch latch = new CountDownLatch(1); final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); getDiscoveredNodesRequest.setRequiredNodes(Arrays.asList(localNode.getAddress().toString(), "_missing")); - getDiscoveredNodesRequest.setWaitForNodes(1); getDiscoveredNodesRequest.setTimeout(TimeValue.ZERO); transportService.sendRequest(localNode, GetDiscoveredNodesAction.NAME, getDiscoveredNodesRequest, new ResponseHandler() { @Override @@ -423,8 +420,7 @@ public void testThrowsExceptionIfDuplicateDiscoveredLater() throws InterruptedEx final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); final String ip = localNode.getAddress().getAddress(); - getDiscoveredNodesRequest.setRequiredNodes(Collections.singletonList(ip)); - getDiscoveredNodesRequest.setWaitForNodes(2); + getDiscoveredNodesRequest.setRequiredNodes(Arrays.asList(ip, "not-a-node")); final CountDownLatch countDownLatch = new CountDownLatch(1); transportService.sendRequest(localNode, GetDiscoveredNodesAction.NAME, getDiscoveredNodesRequest, new ResponseHandler() { @@ -480,7 +476,7 @@ private void setupGetDiscoveredNodesAction() throws InterruptedException { executeRequestPeersAction(); final GetDiscoveredNodesRequest getDiscoveredNodesRequest = new GetDiscoveredNodesRequest(); - getDiscoveredNodesRequest.setWaitForNodes(2); + getDiscoveredNodesRequest.setRequiredNodes(Arrays.asList(localNode.getName(), otherNode.getName())); assertWaitConditionMet(getDiscoveredNodesRequest); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 59292ba077f9b..ecca51c7bbb83 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -51,8 +51,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.nio.MockNioTransport; import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.Before; import java.io.IOException; import java.util.Collections; @@ -73,22 +72,15 @@ */ public abstract class TaskManagerTestCase extends ESTestCase { - protected static ThreadPool threadPool; - public static final Settings CLUSTER_SETTINGS = Settings.builder().put("cluster.name", "test-cluster").build(); + protected ThreadPool threadPool; protected TestNode[] testNodes; protected int nodesCount; - @BeforeClass - public static void beforeClass() { + @Before + public void setupThreadPool() { threadPool = new TestThreadPool(TransportTasksActionTests.class.getSimpleName()); } - @AfterClass - public static void afterClass() { - ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); - threadPool = null; - } - public void setupTestNodes(Settings settings) { nodesCount = randomIntBetween(2, 10); testNodes = new TestNode[nodesCount]; @@ -102,6 +94,8 @@ public final void shutdownTestNodes() throws Exception { for (TestNode testNode : testNodes) { testNode.close(); } + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java index c345e34d20c3a..6786a630d86ab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdaterTests.java @@ -29,12 +29,16 @@ import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; +import static java.util.Arrays.asList; import static org.elasticsearch.common.settings.AbstractScopedSettings.ARCHIVED_SETTINGS_PREFIX; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -221,24 +225,11 @@ public void testUpdateWithUnknownAndSettings() { // these are invalid settings that exist as either persistent or transient settings final int numberOfInvalidSettings = randomIntBetween(0, 7); - final List> invalidSettings = new ArrayList<>(numberOfInvalidSettings); - for (int i = 0; i < numberOfInvalidSettings; i++) { - final Setting invalidSetting = Setting.simpleString( - "invalid.setting" + i, - (value, settings) -> { - throw new IllegalArgumentException("invalid"); - }, - Property.NodeScope); - invalidSettings.add(invalidSetting); - } + final List> invalidSettings = invalidSettings(numberOfInvalidSettings); // these are unknown settings that exist as either persistent or transient settings final int numberOfUnknownSettings = randomIntBetween(0, 7); - final List> unknownSettings = new ArrayList<>(numberOfUnknownSettings); - for (int i = 0; i < numberOfUnknownSettings; i++) { - final Setting unknownSetting = Setting.simpleString("unknown.setting" + i, Property.NodeScope); - unknownSettings.add(unknownSetting); - } + final List> unknownSettings = unknownSettings(numberOfUnknownSettings); final Settings.Builder existingPersistentSettings = Settings.builder(); final Settings.Builder existingTransientSettings = Settings.builder(); @@ -393,24 +384,11 @@ public void testRemovingArchivedSettingsDoesNotRemoveNonArchivedInvalidOrUnknown // these are invalid settings that exist as either persistent or transient settings final int numberOfInvalidSettings = randomIntBetween(0, 7); - final List> invalidSettings = new ArrayList<>(numberOfInvalidSettings); - for (int i = 0; i < numberOfInvalidSettings; i++) { - final Setting invalidSetting = Setting.simpleString( - "invalid.setting" + i, - (value, settings) -> { - throw new IllegalArgumentException("invalid"); - }, - Property.NodeScope); - invalidSettings.add(invalidSetting); - } + final List> invalidSettings = invalidSettings(numberOfInvalidSettings); // these are unknown settings that exist as either persistent or transient settings final int numberOfUnknownSettings = randomIntBetween(0, 7); - final List> unknownSettings = new ArrayList<>(numberOfUnknownSettings); - for (int i = 0; i < numberOfUnknownSettings; i++) { - final Setting unknownSetting = Setting.simpleString("unknown.setting" + i, Property.NodeScope); - unknownSettings.add(unknownSetting); - } + final List> unknownSettings = unknownSettings(numberOfUnknownSettings); final Settings.Builder existingPersistentSettings = Settings.builder(); final Settings.Builder existingTransientSettings = Settings.builder(); @@ -511,4 +489,120 @@ public void testRemovingArchivedSettingsDoesNotRemoveNonArchivedInvalidOrUnknown } } + private static List> unknownSettings(int numberOfUnknownSettings) { + final List> unknownSettings = new ArrayList<>(numberOfUnknownSettings); + for (int i = 0; i < numberOfUnknownSettings; i++) { + unknownSettings.add(Setting.simpleString("unknown.setting" + i, Property.NodeScope)); + } + return unknownSettings; + } + + private static List> invalidSettings(int numberOfInvalidSettings) { + final List> invalidSettings = new ArrayList<>(numberOfInvalidSettings); + for (int i = 0; i < numberOfInvalidSettings; i++) { + invalidSettings.add(randomBoolean() ? invalidInIsolationSetting(i) : invalidWithDependenciesSetting(i)); + } + return invalidSettings; + } + + private static Setting invalidInIsolationSetting(int index) { + return Setting.simpleString("invalid.setting" + index, + new Setting.Validator() { + @Override + public void validate(String value) { + throw new IllegalArgumentException("Invalid in isolation setting"); + } + + @Override + public void validate(String value, Map, String> settings) { + } + }, + Property.NodeScope); + } + + private static Setting invalidWithDependenciesSetting(int index) { + return Setting.simpleString("invalid.setting" + index, + new Setting.Validator() { + @Override + public void validate(String value) { + } + + @Override + public void validate(String value, Map, String> settings) { + throw new IllegalArgumentException("Invalid with dependencies setting"); + } + }, + Property.NodeScope); + } + + private static class FooLowSettingValidator implements Setting.Validator { + @Override + public void validate(Integer value) { + } + + @Override + public void validate(Integer low, Map, Integer> settings) { + if (settings.containsKey(SETTING_FOO_HIGH) && low > settings.get(SETTING_FOO_HIGH)) { + throw new IllegalArgumentException("[low]=" + low + " is higher than [high]=" + settings.get(SETTING_FOO_HIGH)); + } + } + + @Override + public Iterator> settings() { + return asList(SETTING_FOO_LOW, SETTING_FOO_HIGH).iterator(); + } + } + + private static class FooHighSettingValidator implements Setting.Validator { + @Override + public void validate(Integer value) { + } + + @Override + public void validate(Integer high, Map, Integer> settings) { + if (settings.containsKey(SETTING_FOO_LOW) && high < settings.get(SETTING_FOO_LOW)) { + throw new IllegalArgumentException("[high]=" + high + " is lower than [low]=" + settings.get(SETTING_FOO_LOW)); + } + } + + @Override + public Iterator> settings() { + return asList(SETTING_FOO_LOW, SETTING_FOO_HIGH).iterator(); + } + } + + private static final Setting SETTING_FOO_LOW = new Setting<>("foo.low", "10", + Integer::valueOf, new FooLowSettingValidator(), Property.Dynamic, Setting.Property.NodeScope); + private static final Setting SETTING_FOO_HIGH = new Setting<>("foo.high", "100", + Integer::valueOf, new FooHighSettingValidator(), Property.Dynamic, Setting.Property.NodeScope); + + public void testUpdateOfValidationDependentSettings() { + final ClusterSettings settings = new ClusterSettings(Settings.EMPTY, new HashSet<>(asList(SETTING_FOO_LOW, SETTING_FOO_HIGH))); + final SettingsUpdater updater = new SettingsUpdater(settings); + final MetaData.Builder metaData = MetaData.builder().persistentSettings(Settings.EMPTY).transientSettings(Settings.EMPTY); + + ClusterState cluster = ClusterState.builder(new ClusterName("cluster")).metaData(metaData).build(); + + cluster = updater.updateSettings(cluster, Settings.builder().put(SETTING_FOO_LOW.getKey(), 20).build(), Settings.EMPTY, logger); + assertThat(cluster.getMetaData().settings().get(SETTING_FOO_LOW.getKey()), equalTo("20")); + + cluster = updater.updateSettings(cluster, Settings.builder().put(SETTING_FOO_HIGH.getKey(), 40).build(), Settings.EMPTY, logger); + assertThat(cluster.getMetaData().settings().get(SETTING_FOO_LOW.getKey()), equalTo("20")); + assertThat(cluster.getMetaData().settings().get(SETTING_FOO_HIGH.getKey()), equalTo("40")); + + cluster = updater.updateSettings(cluster, Settings.builder().put(SETTING_FOO_LOW.getKey(), 5).build(), Settings.EMPTY, logger); + assertThat(cluster.getMetaData().settings().get(SETTING_FOO_LOW.getKey()), equalTo("5")); + assertThat(cluster.getMetaData().settings().get(SETTING_FOO_HIGH.getKey()), equalTo("40")); + + cluster = updater.updateSettings(cluster, Settings.builder().put(SETTING_FOO_HIGH.getKey(), 8).build(), Settings.EMPTY, logger); + assertThat(cluster.getMetaData().settings().get(SETTING_FOO_LOW.getKey()), equalTo("5")); + assertThat(cluster.getMetaData().settings().get(SETTING_FOO_HIGH.getKey()), equalTo("8")); + + final ClusterState finalCluster = cluster; + Exception exception = expectThrows(IllegalArgumentException.class, () -> + updater.updateSettings(finalCluster, Settings.builder().put(SETTING_FOO_HIGH.getKey(), 2).build(), Settings.EMPTY, logger)); + + assertThat(exception.getMessage(), equalTo("[high]=2 is lower than [low]=5")); + } + } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseActionTests.java new file mode 100644 index 0000000000000..6fc744db2f3f3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/close/TransportVerifyShardBeforeCloseActionTests.java @@ -0,0 +1,330 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.indices.close; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.replication.ReplicationOperation; +import org.elasticsearch.action.support.replication.ReplicationResponse; +import org.elasticsearch.action.support.replication.TransportReplicationAction; +import org.elasticsearch.action.support.replication.TransportReplicationAction.ConcreteShardRequest; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.seqno.SeqNoStats; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ReplicationGroup; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.CapturingTransport; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class TransportVerifyShardBeforeCloseActionTests extends ESTestCase { + + private static ThreadPool threadPool; + + private IndexShard indexShard; + private TransportVerifyShardBeforeCloseAction action; + private ClusterService clusterService; + private ClusterBlock clusterBlock; + private CapturingTransport transport; + + @BeforeClass + public static void beforeClass() { + threadPool = new TestThreadPool(getTestClass().getName()); + } + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + indexShard = mock(IndexShard.class); + when(indexShard.getActiveOperationsCount()).thenReturn(0); + when(indexShard.getGlobalCheckpoint()).thenReturn(0L); + when(indexShard.seqNoStats()).thenReturn(new SeqNoStats(0L, 0L, 0L)); + + final ShardId shardId = new ShardId("index", "_na_", randomIntBetween(0, 3)); + when(indexShard.shardId()).thenReturn(shardId); + + clusterService = createClusterService(threadPool); + + clusterBlock = MetaDataIndexStateService.createIndexClosingBlock(); + setState(clusterService, new ClusterState.Builder(clusterService.state()) + .blocks(ClusterBlocks.builder().blocks(clusterService.state().blocks()).addIndexBlock("index", clusterBlock).build()).build()); + + transport = new CapturingTransport(); + TransportService transportService = transport.createTransportService(Settings.EMPTY, threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); + transportService.start(); + transportService.acceptIncomingRequests(); + + ShardStateAction shardStateAction = new ShardStateAction(clusterService, transportService, null, null, threadPool); + action = new TransportVerifyShardBeforeCloseAction(Settings.EMPTY, transportService, clusterService, mock(IndicesService.class), + mock(ThreadPool.class), shardStateAction, mock(ActionFilters.class), mock(IndexNameExpressionResolver.class)); + } + + @Override + @After + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + } + + @AfterClass + public static void afterClass() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + } + + private void executeOnPrimaryOrReplica() throws Exception { + final TaskId taskId = new TaskId("_node_id", randomNonNegativeLong()); + final TransportVerifyShardBeforeCloseAction.ShardRequest request = + new TransportVerifyShardBeforeCloseAction.ShardRequest(indexShard.shardId(), clusterBlock, taskId); + if (randomBoolean()) { + assertNotNull(action.shardOperationOnPrimary(request, indexShard)); + } else { + assertNotNull(action.shardOperationOnPrimary(request, indexShard)); + } + } + + public void testOperationSuccessful() throws Exception { + executeOnPrimaryOrReplica(); + verify(indexShard, times(1)).flush(any(FlushRequest.class)); + } + + public void testOperationFailsWithOnGoingOps() { + when(indexShard.getActiveOperationsCount()).thenReturn(randomIntBetween(1, 10)); + + IllegalStateException exception = expectThrows(IllegalStateException.class, this::executeOnPrimaryOrReplica); + assertThat(exception.getMessage(), + equalTo("On-going operations in progress while checking index shard " + indexShard.shardId() + " before closing")); + verify(indexShard, times(0)).flush(any(FlushRequest.class)); + } + + public void testOperationFailsWithNoBlock() { + setState(clusterService, new ClusterState.Builder(new ClusterName("test")).build()); + + IllegalStateException exception = expectThrows(IllegalStateException.class, this::executeOnPrimaryOrReplica); + assertThat(exception.getMessage(), + equalTo("Index shard " + indexShard.shardId() + " must be blocked by " + clusterBlock + " before closing")); + verify(indexShard, times(0)).flush(any(FlushRequest.class)); + } + + public void testOperationFailsWithGlobalCheckpointNotCaughtUp() { + final long maxSeqNo = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, Long.MAX_VALUE); + final long localCheckpoint = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, maxSeqNo); + final long globalCheckpoint = randomValueOtherThan(maxSeqNo, + () -> randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, localCheckpoint)); + when(indexShard.seqNoStats()).thenReturn(new SeqNoStats(maxSeqNo, localCheckpoint, globalCheckpoint)); + when(indexShard.getGlobalCheckpoint()).thenReturn(globalCheckpoint); + + IllegalStateException exception = expectThrows(IllegalStateException.class, this::executeOnPrimaryOrReplica); + assertThat(exception.getMessage(), equalTo("Global checkpoint [" + globalCheckpoint + "] mismatches maximum sequence number [" + + maxSeqNo + "] on index shard " + indexShard.shardId())); + verify(indexShard, times(0)).flush(any(FlushRequest.class)); + } + + public void testUnavailableShardsMarkedAsStale() throws Exception { + final String index = "test"; + final ShardId shardId = new ShardId(index, "_na_", 0); + + final int nbReplicas = randomIntBetween(1, 10); + final ShardRoutingState[] replicaStates = new ShardRoutingState[nbReplicas]; + for (int i = 0; i < replicaStates.length; i++) { + replicaStates[i] = ShardRoutingState.STARTED; + } + final ClusterState clusterState = state(index, true, ShardRoutingState.STARTED, replicaStates); + setState(clusterService, clusterState); + + IndexShardRoutingTable shardRoutingTable = clusterState.routingTable().index(index).shard(shardId.id()); + final IndexMetaData indexMetaData = clusterState.getMetaData().index(index); + final ShardRouting primaryRouting = shardRoutingTable.primaryShard(); + final long primaryTerm = indexMetaData.primaryTerm(0); + + final Set inSyncAllocationIds = indexMetaData.inSyncAllocationIds(0); + final Set trackedShards = shardRoutingTable.getAllAllocationIds(); + + List unavailableShards = randomSubsetOf(randomIntBetween(1, nbReplicas), shardRoutingTable.replicaShards()); + IndexShardRoutingTable.Builder shardRoutingTableBuilder = new IndexShardRoutingTable.Builder(shardRoutingTable); + unavailableShards.forEach(shardRoutingTableBuilder::removeShard); + shardRoutingTable = shardRoutingTableBuilder.build(); + + final ReplicationGroup replicationGroup = new ReplicationGroup(shardRoutingTable, inSyncAllocationIds, trackedShards); + assertThat(replicationGroup.getUnavailableInSyncShards().size(), greaterThan(0)); + + final PlainActionFuture listener = new PlainActionFuture<>(); + TaskId taskId = new TaskId(clusterService.localNode().getId(), 0L); + TransportVerifyShardBeforeCloseAction.ShardRequest request = + new TransportVerifyShardBeforeCloseAction.ShardRequest(shardId, clusterBlock, taskId); + ReplicationOperation.Replicas proxy = action.newReplicasProxy(primaryTerm); + ReplicationOperation operation = + new ReplicationOperation<>(request, createPrimary(primaryRouting, replicationGroup), listener, proxy, logger, "test"); + operation.execute(); + + final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + assertThat(capturedRequests.length, equalTo(nbReplicas)); + + for (CapturingTransport.CapturedRequest capturedRequest : capturedRequests) { + final String actionName = capturedRequest.action; + if (actionName.startsWith(ShardStateAction.SHARD_FAILED_ACTION_NAME)) { + assertThat(capturedRequest.request, instanceOf(ShardStateAction.FailedShardEntry.class)); + String allocationId = ((ShardStateAction.FailedShardEntry) capturedRequest.request).getAllocationId(); + assertTrue(unavailableShards.stream().anyMatch(shardRouting -> shardRouting.allocationId().getId().equals(allocationId))); + transport.handleResponse(capturedRequest.requestId, TransportResponse.Empty.INSTANCE); + + } else if (actionName.startsWith(TransportVerifyShardBeforeCloseAction.NAME)) { + assertThat(capturedRequest.request, instanceOf(ConcreteShardRequest.class)); + String allocationId = ((ConcreteShardRequest) capturedRequest.request).getTargetAllocationID(); + assertFalse(unavailableShards.stream().anyMatch(shardRouting -> shardRouting.allocationId().getId().equals(allocationId))); + assertTrue(inSyncAllocationIds.stream().anyMatch(inSyncAllocationId -> inSyncAllocationId.equals(allocationId))); + transport.handleResponse(capturedRequest.requestId, new TransportReplicationAction.ReplicaResponse(0L, 0L)); + + } else { + fail("Test does not support action " + capturedRequest.action); + } + } + + final ReplicationResponse.ShardInfo shardInfo = listener.get().getShardInfo(); + assertThat(shardInfo.getFailed(), equalTo(0)); + assertThat(shardInfo.getFailures(), arrayWithSize(0)); + assertThat(shardInfo.getSuccessful(), equalTo(1 + nbReplicas - unavailableShards.size())); + } + + private static ReplicationOperation.Primary< + TransportVerifyShardBeforeCloseAction.ShardRequest, + TransportVerifyShardBeforeCloseAction.ShardRequest, + PrimaryResult> + createPrimary(final ShardRouting primary, final ReplicationGroup replicationGroup) { + return new ReplicationOperation.Primary< + TransportVerifyShardBeforeCloseAction.ShardRequest, + TransportVerifyShardBeforeCloseAction.ShardRequest, + PrimaryResult>() { + @Override + public ShardRouting routingEntry() { + return primary; + } + + @Override + public ReplicationGroup getReplicationGroup() { + return replicationGroup; + } + + @Override + public PrimaryResult perform(TransportVerifyShardBeforeCloseAction.ShardRequest request) throws Exception { + return new PrimaryResult(request); + } + + @Override + public void failShard(String message, Exception exception) { + + } + + @Override + public void updateLocalCheckpointForShard(String allocationId, long checkpoint) { + } + + @Override + public void updateGlobalCheckpointForShard(String allocationId, long globalCheckpoint) { + } + + @Override + public long localCheckpoint() { + return 0; + } + + @Override + public long globalCheckpoint() { + return 0; + } + + @Override + public long maxSeqNoOfUpdatesOrDeletes() { + return 0; + } + }; + } + + private static class PrimaryResult implements ReplicationOperation.PrimaryResult { + + private final TransportVerifyShardBeforeCloseAction.ShardRequest replicaRequest; + private final SetOnce shardInfo; + + private PrimaryResult(final TransportVerifyShardBeforeCloseAction.ShardRequest replicaRequest) { + this.replicaRequest = replicaRequest; + this.shardInfo = new SetOnce<>(); + } + + @Override + public TransportVerifyShardBeforeCloseAction.ShardRequest replicaRequest() { + return replicaRequest; + } + + @Override + public void setShardInfo(ReplicationResponse.ShardInfo shardInfo) { + this.shardInfo.set(shardInfo); + } + + public ReplicationResponse.ShardInfo getShardInfo() { + return shardInfo.get(); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 3253eb1dc1c88..05da57cc5da45 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -24,17 +24,20 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.RangeQueryBuilder; @@ -120,6 +123,56 @@ public void testDoubleAddMapping() throws Exception { } } + public void testNonNestedMappings() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("_doc", XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("date") + .field("type", "date") + .endObject() + .endObject() + .endObject())); + + GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); + + ImmutableOpenMap mappings = response.mappings().get("test"); + assertNotNull(mappings); + + MappingMetaData metadata = mappings.get("_doc"); + assertNotNull(metadata); + assertFalse(metadata.sourceAsMap().isEmpty()); + } + + public void testEmptyNestedMappings() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("_doc", XContentFactory.jsonBuilder().startObject().endObject())); + + GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); + + ImmutableOpenMap mappings = response.mappings().get("test"); + assertNotNull(mappings); + + MappingMetaData metadata = mappings.get("_doc"); + assertNotNull(metadata); + assertTrue(metadata.sourceAsMap().isEmpty()); + } + + public void testEmptyMappings() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("_doc", XContentFactory.jsonBuilder().startObject() + .startObject("_doc").endObject() + .endObject())); + + GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); + + ImmutableOpenMap mappings = response.mappings().get("test"); + assertNotNull(mappings); + + MappingMetaData metadata = mappings.get("_doc"); + assertNotNull(metadata); + assertTrue(metadata.sourceAsMap().isEmpty()); + } + public void testInvalidShardCountSettings() throws Exception { int value = randomIntBetween(-10, 0); try { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexResponseTests.java index 3991442fd5b87..af3ab33e915db 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexResponseTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.junit.Assert; @@ -72,6 +73,10 @@ protected GetIndexResponse createBlankInstance() { @Override protected GetIndexResponse createTestInstance() { + return createTestInstance(randomBoolean()); + } + + private GetIndexResponse createTestInstance(boolean randomTypeName) { String[] indices = generateRandomStringArray(5, 5, false, false); ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); @@ -80,7 +85,9 @@ protected GetIndexResponse createTestInstance() { IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS; boolean includeDefaults = randomBoolean(); for (String index: indices) { - mappings.put(index, GetMappingsResponseTests.createMappingsForIndex()); + // rarely have no types + int typeCount = rarely() ? 0 : 1; + mappings.put(index, GetMappingsResponseTests.createMappingsForIndex(typeCount, randomTypeName)); List aliasMetaDataList = new ArrayList<>(); int aliasesNum = randomIntBetween(0, 3); @@ -103,6 +110,12 @@ protected GetIndexResponse createTestInstance() { ); } + @Override + protected GetIndexResponse createXContextTestInstance(XContentType xContentType) { + // don't use random type names for XContent roundtrip tests because we cannot parse them back anymore + return createTestInstance(false); + } + @Override protected Predicate getRandomFieldsExcludeFilter() { //we do not want to add new fields at the root (index-level), or inside the blocks @@ -190,5 +203,4 @@ public void testCanOutput622Response() throws IOException { Assert.assertEquals(TEST_6_3_0_RESPONSE_BYTES, base64OfResponse); } - } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java index beae91df77e3d..481d05ea8cc07 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; @@ -84,22 +85,30 @@ protected GetMappingsResponse mutateInstance(GetMappingsResponse instance) throw public static ImmutableOpenMap createMappingsForIndex() { // rarely have no types int typeCount = rarely() ? 0 : scaledRandomIntBetween(1, 3); + return createMappingsForIndex(typeCount, true); + } + + public static ImmutableOpenMap createMappingsForIndex(int typeCount, boolean randomTypeName) { List typeMappings = new ArrayList<>(typeCount); for (int i = 0; i < typeCount; i++) { - Map mappings = new HashMap<>(); if (rarely() == false) { // rarely have no fields + Map mappings = new HashMap<>(); mappings.put("field-" + i, randomFieldMapping()); if (randomBoolean()) { mappings.put("field2-" + i, randomFieldMapping()); } - } - try { - MappingMetaData mmd = new MappingMetaData("type-" + randomAlphaOfLength(5), mappings); - typeMappings.add(mmd); - } catch (IOException e) { - fail("shouldn't have failed " + e); + try { + String typeName = MapperService.SINGLE_MAPPING_NAME; + if (randomTypeName) { + typeName = "type-" + randomAlphaOfLength(5); + } + MappingMetaData mmd = new MappingMetaData(typeName, mappings); + typeMappings.add(mmd); + } catch (IOException e) { + fail("shouldn't have failed " + e); + } } } ImmutableOpenMap.Builder typeBuilder = ImmutableOpenMap.builder(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index c75d8bb054f3a..6b8d1ab4fafb7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -272,6 +272,7 @@ public void testRolloverOnExistingIndex() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037") public void testRolloverWithDateMath() { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); assumeTrue("only works on the same day", now.plusMinutes(5).getDayOfYear() == now.getDayOfYear()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index cbc9499cda327..ec3c82ba70b2f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -165,7 +165,7 @@ public void testCreateUpdateAliasRequest() { assertEquals(sourceAlias, ((AliasAction.Remove) action).getAlias()); foundRemove = true; } else { - throw new AssertionError("Unknow index [" + action.getIndex() + "]"); + throw new AssertionError("Unknown index [" + action.getIndex() + "]"); } } assertTrue(foundAdd); @@ -195,7 +195,7 @@ public void testCreateUpdateAliasRequestWithExplicitWriteIndex() { assertFalse(addAction.writeIndex()); foundRemoveWrite = true; } else { - throw new AssertionError("Unknow index [" + action.getIndex() + "]"); + throw new AssertionError("Unknown index [" + action.getIndex() + "]"); } } assertTrue(foundAddWrite); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 6302766be9017..a929ee63c576e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -97,17 +97,6 @@ public void testIndexTemplateWithAliasNameEqualToTemplatePattern() { assertThat(errors.get(0).getMessage(), equalTo("Alias [foobar] cannot be the same as any pattern in [foo, foobar]")); } - public void testIndexTemplateWithValidateEmptyMapping() throws Exception { - PutRequest request = new PutRequest("api", "validate_template"); - request.patterns(Collections.singletonList("validate_template")); - request.putMapping("type1", "{}"); - - List errors = putTemplateDetail(request); - assertThat(errors.size(), equalTo(1)); - assertThat(errors.get(0), instanceOf(MapperParsingException.class)); - assertThat(errors.get(0).getMessage(), containsString("malformed mapping no root object found")); - } - public void testIndexTemplateWithValidateMapping() throws Exception { PutRequest request = new PutRequest("api", "validate_template"); request.patterns(Collections.singletonList("te*")); @@ -132,17 +121,6 @@ public void testBrokenMapping() throws Exception { assertThat(errors.get(0).getMessage(), containsString("Failed to parse mapping ")); } - public void testBlankMapping() throws Exception { - PutRequest request = new PutRequest("api", "blank_mapping"); - request.patterns(Collections.singletonList("te*")); - request.putMapping("type1", "{}"); - - List errors = putTemplateDetail(request); - assertThat(errors.size(), equalTo(1)); - assertThat(errors.get(0), instanceOf(MapperParsingException.class)); - assertThat(errors.get(0).getMessage(), containsString("malformed mapping no root object found")); - } - public void testAliasInvalidFilterInvalidJson() throws Exception { //invalid json: put index template fails PutRequest request = new PutRequest("api", "blank_mapping"); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java index 325894366fe64..de8a2d9d67bbe 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java @@ -57,7 +57,7 @@ protected Collection> nodePlugins() { public void testBulkIndexCreatesMapping() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/bulk-log.json"); BulkRequestBuilder bulkBuilder = client().prepareBulk(); - bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); bulkBuilder.get(); assertBusy(() -> { GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings().get(); @@ -105,16 +105,13 @@ public void testBulkWithGlobalDefaults() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk-missing-index-type.json"); { BulkRequestBuilder bulkBuilder = client().prepareBulk(); - bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); ActionRequestValidationException ex = expectThrows(ActionRequestValidationException.class, bulkBuilder::get); assertThat(ex.validationErrors(), containsInAnyOrder( "index is missing", "index is missing", - "index is missing", - "type is missing", - "type is missing", - "type is missing")); + "index is missing")); } { @@ -123,7 +120,7 @@ public void testBulkWithGlobalDefaults() throws Exception { .routing("routing") .pipeline("pipeline"); - bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkBuilder.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); BulkResponse bulkItemResponses = bulkBuilder.get(); assertFalse(bulkItemResponses.hasFailures()); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 65935bea96e59..75701e0685290 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.script.Script; import org.elasticsearch.test.ESTestCase; @@ -57,42 +58,46 @@ public class BulkRequestTests extends ESTestCase { public void testSimpleBulk1() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json"); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(3)); assertThat(((IndexRequest) bulkRequest.requests().get(0)).source(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }"))); assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class)); assertThat(((IndexRequest) bulkRequest.requests().get(2)).source(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }"))); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulkWithCarriageReturn() throws Exception { String bulkAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n{ \"field1\" : \"value1\" }\r\n"; BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(1)); assertThat(((IndexRequest) bulkRequest.requests().get(0)).source(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }"))); Map sourceMap = XContentHelper.convertToMap(((IndexRequest) bulkRequest.requests().get(0)).source(), false, XContentType.JSON).v2(); assertEquals("value1", sourceMap.get("field1")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulk2() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk2.json"); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(3)); } public void testSimpleBulk3() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk3.json"); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(3)); } public void testSimpleBulk4() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk4.json"); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(4)); assertThat(bulkRequest.requests().get(0).id(), equalTo("1")); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2)); @@ -109,6 +114,8 @@ public void testSimpleBulk4() throws Exception { assertThat(scriptParams.size(), equalTo(1)); assertThat(scriptParams.get("param1"), equalTo(1)); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testBulkAllowExplicitIndex() throws Exception { @@ -120,6 +127,8 @@ public void testBulkAllowExplicitIndex() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json"); new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false, XContentType.JSON); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testBulkAddIterable() { @@ -139,32 +148,38 @@ public void testSimpleBulk6() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json"); BulkRequest bulkRequest = new BulkRequest(); ParsingException exc = expectThrows(ParsingException.class, - () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON)); assertThat(exc.getMessage(), containsString("Unknown key for a VALUE_STRING in [hello]")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulk7() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json"); BulkRequest bulkRequest = new BulkRequest(); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON)); assertThat(exc.getMessage(), containsString("Malformed action/metadata line [5], expected a simple value for field [_unknown] but found [START_ARRAY]")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulk8() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json"); BulkRequest bulkRequest = new BulkRequest(); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON)); assertThat(exc.getMessage(), containsString("Action/metadata line [3] contains an unknown parameter [_foo]")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testSimpleBulk9() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json"); BulkRequest bulkRequest = new BulkRequest(); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON)); assertThat(exc.getMessage(), containsString("Malformed action/metadata line [3], expected START_OBJECT or END_OBJECT but found [START_ARRAY]")); } @@ -172,8 +187,10 @@ public void testSimpleBulk9() throws Exception { public void testSimpleBulk10() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk10.json"); BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); assertThat(bulkRequest.numberOfActions(), equalTo(9)); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testBulkActionShouldNotContainArray() throws Exception { @@ -181,13 +198,13 @@ public void testBulkActionShouldNotContainArray() throws Exception { + "{ \"field1\" : \"value1\" }\r\n"; BulkRequest bulkRequest = new BulkRequest(); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON)); assertEquals(exc.getMessage(), "Malformed action/metadata line [1]" + ", expected a simple value for field [_index] but found [START_ARRAY]"); } public void testBulkEmptyObject() throws Exception { - String bulkIndexAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n"; + String bulkIndexAction = "{ \"index\":{\"_index\":\"test\",\"_id\":\"1\"} }\r\n"; String bulkIndexSource = "{ \"field1\" : \"value1\" }\r\n"; String emptyObject = "{}\r\n"; StringBuilder bulk = new StringBuilder(); @@ -207,7 +224,7 @@ public void testBulkEmptyObject() throws Exception { String bulkAction = bulk.toString(); BulkRequest bulkRequest = new BulkRequest(); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, - () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON)); assertThat(exc.getMessage(), containsString("Malformed action/metadata line [" + emptyLine + "], expected FIELD_NAME but found [END_OBJECT]")); } @@ -218,7 +235,7 @@ public void testBulkRequestWithRefresh() throws Exception { // We force here a "id is missing" validation error bulkRequest.add(new DeleteRequest("index", "type", null).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); // We force here a "type is missing" validation error - bulkRequest.add(new DeleteRequest("index", null, "id")); + bulkRequest.add(new DeleteRequest("index", "", "id")); bulkRequest.add(new DeleteRequest("index", "type", "id").setRefreshPolicy(RefreshPolicy.IMMEDIATE)); bulkRequest.add(new UpdateRequest("index", "type", "id").doc("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); bulkRequest.add(new IndexRequest("index", "type", "id").source("{}", XContentType.JSON).setRefreshPolicy(RefreshPolicy.IMMEDIATE)); @@ -290,6 +307,8 @@ public void testSmileIsSupported() throws IOException { IndexRequest request = (IndexRequest) docWriteRequest; assertEquals(1, request.sourceAsMap().size()); assertEquals("value", request.sourceAsMap().get("field")); + //This test's content contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testToValidateUpsertRequestAndVersionInBulkRequest() throws IOException { @@ -324,18 +343,22 @@ public void testToValidateUpsertRequestAndVersionInBulkRequest() throws IOExcept bulkRequest.add(data, null, null, xContentType); assertThat(bulkRequest.validate().validationErrors(), contains("can't provide both upsert request and a version", "can't provide version in upsert request")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testBulkTerminatedByNewline() throws Exception { String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk11.json"); IllegalArgumentException expectThrows = expectThrows(IllegalArgumentException.class, () -> new BulkRequest() - .add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + .add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON)); assertEquals("The bulk request must be terminated by a newline [\n]", expectThrows.getMessage()); String bulkActionWithNewLine = bulkAction + "\n"; BulkRequest bulkRequestWithNewLine = new BulkRequest(); - bulkRequestWithNewLine.add(bulkActionWithNewLine.getBytes(StandardCharsets.UTF_8), 0, bulkActionWithNewLine.length(), null, null, + bulkRequestWithNewLine.add(bulkActionWithNewLine.getBytes(StandardCharsets.UTF_8), 0, bulkActionWithNewLine.length(), null, XContentType.JSON); assertEquals(3, bulkRequestWithNewLine.numberOfActions()); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index 593bba9f377b9..89ccbefb00e59 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; @@ -178,7 +179,7 @@ private void runTestTook(boolean controlled) throws Exception { bulkAction = Strings.replace(bulkAction, "\r\n", "\n"); } BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON); + bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON); AtomicLong expected = new AtomicLong(); TransportBulkAction action = createAction(controlled, expected); action.doExecute(null, bulkRequest, new ActionListener() { @@ -200,6 +201,8 @@ public void onFailure(Exception e) { } }); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } static class Resolver extends IndexNameExpressionResolver { diff --git a/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestTests.java b/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestTests.java index 5f897d0b8349b..4fe4c3548c224 100644 --- a/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestTests.java @@ -36,11 +36,20 @@ public void testValidation() { } { - final DeleteRequest request = new DeleteRequest("index4", randomBoolean() ? "" : null, randomBoolean() ? "" : null); + //Empty types are accepted but fail validation + final DeleteRequest request = new DeleteRequest("index4", "", randomBoolean() ? "" : null); final ActionRequestValidationException validate = request.validate(); assertThat(validate, not(nullValue())); assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing")); } + { + // Null types are defaulted + final DeleteRequest request = new DeleteRequest("index4", randomBoolean() ? "" : null); + final ActionRequestValidationException validate = request.validate(); + + assertThat(validate, not(nullValue())); + assertThat(validate.validationErrors(), hasItems("id is missing")); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 193878e2f5e04..70f70268a0a03 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -116,7 +116,8 @@ private void runTestTook(final boolean controlled) { public void testBuildShardSearchTransportRequest() { final AtomicLong expected = new AtomicLong(); AbstractSearchAsyncAction action = createAction(false, expected); - SearchShardIterator iterator = new SearchShardIterator("test-cluster", new ShardId(new Index("name", "foo"), 1), + String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); + SearchShardIterator iterator = new SearchShardIterator(clusterAlias, new ShardId(new Index("name", "foo"), 1), Collections.emptyList(), new OriginalIndices(new String[] {"name", "name1"}, IndicesOptions.strictExpand())); ShardSearchTransportRequest shardSearchTransportRequest = action.buildShardSearchRequest(iterator); assertEquals(IndicesOptions.strictExpand(), shardSearchTransportRequest.indicesOptions()); @@ -126,5 +127,6 @@ public void testBuildShardSearchTransportRequest() { assertArrayEquals(new String[] {"name", "name1"}, shardSearchTransportRequest.indices()); assertArrayEquals(new String[] {"bar", "baz"}, shardSearchTransportRequest.indexRoutings()); assertEquals("_shards:1,3", shardSearchTransportRequest.preference()); + assertEquals(clusterAlias, shardSearchTransportRequest.getClusterAlias()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index a3618ffa16f9d..21ac0cdf636d2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -62,27 +62,16 @@ public class SearchAsyncActionTests extends ESTestCase { public void testSkipSearchShards() throws InterruptedException { SearchRequest request = new SearchRequest(); request.allowPartialSearchResults(true); - CountDownLatch latch = new CountDownLatch(1); - AtomicReference response = new AtomicReference<>(); - ActionListener responseListener = new ActionListener() { - @Override - public void onResponse(SearchResponse searchResponse) { - response.set((TestSearchResponse) searchResponse); - } - - @Override - public void onFailure(Exception e) { - logger.warn("test failed", e); - fail(e.getMessage()); - } - }; + int numShards = 10; + ActionListener responseListener = ActionListener.wrap(response -> {}, + (e) -> { throw new AssertionError("unexpected", e);}); DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); AtomicInteger contextIdGenerator = new AtomicInteger(0); GroupShardsIterator shardsIter = getShardsIter("idx", new OriginalIndices(new String[]{"idx"}, SearchRequest.DEFAULT_INDICES_OPTIONS), - 10, randomBoolean(), primaryNode, replicaNode); + numShards, randomBoolean(), primaryNode, replicaNode); int numSkipped = 0; for (SearchShardIterator iter : shardsIter) { if (iter.shardId().id() % 2 == 0) { @@ -90,6 +79,8 @@ public void onFailure(Exception e) { numSkipped++; } } + CountDownLatch latch = new CountDownLatch(numShards - numSkipped); + AtomicBoolean searchPhaseDidRun = new AtomicBoolean(false); SearchTransportService transportService = new SearchTransportService(null, null); Map lookup = new HashMap<>(); @@ -142,15 +133,22 @@ protected SearchPhase getNextPhase(SearchPhaseResults res return new SearchPhase("test") { @Override public void run() { - latch.countDown(); + assertTrue(searchPhaseDidRun.compareAndSet(false, true)); } }; } + + @Override + protected void executeNext(Runnable runnable, Thread originalThread) { + super.executeNext(runnable, originalThread); + latch.countDown(); + } }; asyncAction.start(); latch.await(); + assertTrue(searchPhaseDidRun.get()); SearchResponse searchResponse = asyncAction.buildSearchResponse(null, null); - assertEquals(shardsIter.size()-numSkipped, numRequests.get()); + assertEquals(shardsIter.size() - numSkipped, numRequests.get()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(numSkipped, searchResponse.getSkippedShards()); assertEquals(shardsIter.size(), searchResponse.getSuccessfulShards()); @@ -161,20 +159,25 @@ public void testLimitConcurrentShardRequests() throws InterruptedException { request.allowPartialSearchResults(true); int numConcurrent = randomIntBetween(1, 5); request.setMaxConcurrentShardRequests(numConcurrent); - CountDownLatch latch = new CountDownLatch(1); - AtomicReference response = new AtomicReference<>(); - ActionListener responseListener = new ActionListener() { - @Override - public void onResponse(SearchResponse searchResponse) { - response.set((TestSearchResponse) searchResponse); - } - - @Override - public void onFailure(Exception e) { - logger.warn("test failed", e); - fail(e.getMessage()); + boolean doReplicas = randomBoolean(); + int numShards = randomIntBetween(5, 10); + int numShardAttempts = numShards; + Boolean[] shardFailures = new Boolean[numShards]; + // at least one response otherwise the entire request fails + shardFailures[randomIntBetween(0, shardFailures.length - 1)] = false; + for (int i = 0; i < shardFailures.length; i++) { + if (shardFailures[i] == null) { + boolean failure = randomBoolean(); + shardFailures[i] = failure; + if (failure && doReplicas) { + numShardAttempts++; + } } - }; + } + CountDownLatch latch = new CountDownLatch(numShardAttempts); + AtomicBoolean searchPhaseDidRun = new AtomicBoolean(false); + ActionListener responseListener = ActionListener.wrap(response -> {}, + (e) -> { throw new AssertionError("unexpected", e);}); DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); // for the sake of this test we place the replica on the same node. ie. this is not a mistake since we limit per node now DiscoveryNode replicaNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); @@ -182,7 +185,7 @@ public void onFailure(Exception e) { AtomicInteger contextIdGenerator = new AtomicInteger(0); GroupShardsIterator shardsIter = getShardsIter("idx", new OriginalIndices(new String[]{"idx"}, SearchRequest.DEFAULT_INDICES_OPTIONS), - 10, randomBoolean(), primaryNode, replicaNode); + numShards, doReplicas, primaryNode, replicaNode); SearchTransportService transportService = new SearchTransportService(null, null); Map lookup = new HashMap<>(); Map seenShard = new ConcurrentHashMap<>(); @@ -191,7 +194,6 @@ public void onFailure(Exception e) { Map aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)); CountDownLatch awaitInitialRequests = new CountDownLatch(1); AtomicInteger numRequests = new AtomicInteger(0); - AtomicInteger numResponses = new AtomicInteger(0); AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction( "test", @@ -218,7 +220,7 @@ public void onFailure(Exception e) { protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting shard, SearchActionListener listener) { seenShard.computeIfAbsent(shard.shardId(), (i) -> { - numRequests.incrementAndGet(); // only count this once per replica + numRequests.incrementAndGet(); // only count this once per shard copy return Boolean.TRUE; }); @@ -231,13 +233,11 @@ protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting sha Transport.Connection connection = getConnection(null, shard.currentNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult(contextIdGenerator.incrementAndGet(), connection.getNode()); - if (numResponses.getAndIncrement() > 0 && randomBoolean()) { // at least one response otherwise the entire - // request fails + if (shardFailures[shard.shardId().id()]) { listener.onFailure(new RuntimeException()); } else { listener.onResponse(testSearchPhaseResult); } - }).start(); } @@ -246,16 +246,23 @@ protected SearchPhase getNextPhase(SearchPhaseResults res return new SearchPhase("test") { @Override public void run() { - latch.countDown(); + assertTrue(searchPhaseDidRun.compareAndSet(false, true)); } }; } + + @Override + protected void executeNext(Runnable runnable, Thread originalThread) { + super.executeNext(runnable, originalThread); + latch.countDown(); + } }; asyncAction.start(); assertEquals(numConcurrent, numRequests.get()); awaitInitialRequests.countDown(); latch.await(); - assertEquals(10, numRequests.get()); + assertTrue(searchPhaseDidRun.get()); + assertEquals(numShards, numRequests.get()); } public void testFanOutAndCollect() throws InterruptedException { @@ -263,26 +270,18 @@ public void testFanOutAndCollect() throws InterruptedException { request.allowPartialSearchResults(true); request.setMaxConcurrentShardRequests(randomIntBetween(1, 100)); AtomicReference response = new AtomicReference<>(); - ActionListener responseListener = new ActionListener() { - @Override - public void onResponse(SearchResponse searchResponse) { - response.set((TestSearchResponse) searchResponse); - } - - @Override - public void onFailure(Exception e) { - logger.warn("test failed", e); - fail(e.getMessage()); - } - }; + ActionListener responseListener = ActionListener.wrap( + searchResponse -> response.set((TestSearchResponse) searchResponse), + (e) -> { throw new AssertionError("unexpected", e);}); DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); + int numShards = randomIntBetween(1, 10); GroupShardsIterator shardsIter = getShardsIter("idx", new OriginalIndices(new String[]{"idx"}, SearchRequest.DEFAULT_INDICES_OPTIONS), - randomIntBetween(1, 10), randomBoolean(), primaryNode, replicaNode); + numShards, randomBoolean(), primaryNode, replicaNode); AtomicInteger numFreedContext = new AtomicInteger(); SearchTransportService transportService = new SearchTransportService(null, null) { @Override @@ -296,9 +295,8 @@ public void sendFreeContext(Transport.Connection connection, long contextId, Ori lookup.put(primaryNode.getId(), new MockConnection(primaryNode)); lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)); - final ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); - final CountDownLatch latch = new CountDownLatch(1); - final AtomicBoolean latchTriggered = new AtomicBoolean(); + ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); + final CountDownLatch latch = new CountDownLatch(numShards); AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction( "test", @@ -349,13 +347,15 @@ public void run() { sendReleaseSearchContext(result.getRequestId(), new MockConnection(result.node), OriginalIndices.NONE); } responseListener.onResponse(response); - if (latchTriggered.compareAndSet(false, true) == false) { - throw new AssertionError("latch triggered twice"); - } - latch.countDown(); } }; } + + @Override + protected void executeNext(Runnable runnable, Thread originalThread) { + super.executeNext(runnable, originalThread); + latch.countDown(); + } }; asyncAction.start(); latch.await(); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 4a8afe22b18aa..a5ab81d83fbcd 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -29,12 +29,14 @@ import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -47,6 +49,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; @@ -54,28 +57,34 @@ import org.junit.Before; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; public class SearchPhaseControllerTests extends ESTestCase { private SearchPhaseController searchPhaseController; + private List reductions; @Before public void setup() { + reductions = new CopyOnWriteArrayList<>(); searchPhaseController = new SearchPhaseController( - (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); + (finalReduce) -> { + reductions.add(finalReduce); + return new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, finalReduce); + }); } public void testSort() { @@ -154,16 +163,22 @@ public void testMerge() { int nShards = randomIntBetween(1, 20); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); AtomicArray queryResults = generateQueryResults(nShards, suggestions, queryResultSize, false); - for (boolean trackTotalHits : new boolean[] {true, false}) { + for (int trackTotalHits : new int[] {SearchContext.TRACK_TOTAL_HITS_DISABLED, SearchContext.TRACK_TOTAL_HITS_ACCURATE}) { SearchPhaseController.ReducedQueryPhase reducedQueryPhase = - searchPhaseController.reducedQueryPhase(queryResults.asList(), false, trackTotalHits); - AtomicArray searchPhaseResultAtomicArray = generateFetchResults(nShards, + searchPhaseController.reducedQueryPhase(queryResults.asList(), false, trackTotalHits, true); + AtomicArray fetchResults = generateFetchResults(nShards, reducedQueryPhase.sortedTopDocs.scoreDocs, reducedQueryPhase.suggest); InternalSearchResponse mergedResponse = searchPhaseController.merge(false, - reducedQueryPhase, - searchPhaseResultAtomicArray.asList(), searchPhaseResultAtomicArray::get); - if (trackTotalHits == false) { + reducedQueryPhase, fetchResults.asList(), fetchResults::get); + if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); + } else { + assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + } + for (SearchHit hit : mergedResponse.hits().getHits()) { + SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); + assertSame(searchPhaseResult.getSearchShardTarget(), hit.getShard()); } int suggestSize = 0; for (Suggest.Suggestion s : reducedQueryPhase.suggest) { @@ -182,6 +197,8 @@ public void testMerge() { assertThat(options.size(), equalTo(suggestion.getEntries().get(0).getOptions().size())); for (CompletionSuggestion.Entry.Option option : options) { assertNotNull(option.getHit()); + SearchPhaseResult searchPhaseResult = fetchResults.get(option.getHit().getShard().getShardId().id()); + assertSame(searchPhaseResult.getSearchShardTarget(), option.getHit().getShard()); } } } @@ -193,8 +210,10 @@ private static AtomicArray generateQueryResults(int nShards, int searchHitsSize, boolean useConstantScore) { AtomicArray queryResults = new AtomicArray<>(nShards); for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { - QuerySearchResult querySearchResult = new QuerySearchResult(shardIndex, - new SearchShardTarget("", new Index("", ""), shardIndex, null)); + String clusterAlias = randomBoolean() ? null : "remote"; + SearchShardTarget searchShardTarget = new SearchShardTarget("", new ShardId("", "", shardIndex), + clusterAlias, OriginalIndices.NONE); + QuerySearchResult querySearchResult = new QuerySearchResult(shardIndex, searchShardTarget); final TopDocs topDocs; float maxScore = 0; if (searchHitsSize == 0) { @@ -237,7 +256,7 @@ private static AtomicArray generateQueryResults(int nShards, return queryResults; } - private int getTotalQueryHits(AtomicArray results) { + private static int getTotalQueryHits(AtomicArray results) { int resultCount = 0; for (SearchPhaseResult shardResult : results.asList()) { TopDocs topDocs = shardResult.queryResult().topDocs().topDocs; @@ -247,7 +266,7 @@ private int getTotalQueryHits(AtomicArray results) { return resultCount; } - private Suggest reducedSuggest(AtomicArray results) { + private static Suggest reducedSuggest(AtomicArray results) { Map>> groupedSuggestion = new HashMap<>(); for (SearchPhaseResult entry : results.asList()) { for (Suggest.Suggestion suggestion : entry.queryResult().suggest()) { @@ -260,11 +279,12 @@ private Suggest reducedSuggest(AtomicArray results) { .collect(Collectors.toList())); } - private AtomicArray generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) { + private static AtomicArray generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) { AtomicArray fetchResults = new AtomicArray<>(nShards); for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { float maxScore = -1F; - SearchShardTarget shardTarget = new SearchShardTarget("", new Index("", ""), shardIndex, null); + String clusterAlias = randomBoolean() ? null : "remote"; + SearchShardTarget shardTarget = new SearchShardTarget("", new ShardId("", "", shardIndex), clusterAlias, OriginalIndices.NONE); FetchSearchResult fetchSearchResult = new FetchSearchResult(shardIndex, shardTarget); List searchHits = new ArrayList<>(); for (ScoreDoc scoreDoc : mergedSearchDocs) { @@ -297,14 +317,15 @@ private AtomicArray generateFetchResults(int nShards, ScoreDo public void testConsumer() { int bufferSize = randomIntBetween(2, 3); - SearchRequest request = new SearchRequest(); + SearchRequest request = randomBoolean() ? new SearchRequest() : new SearchRequest("remote"); request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo"))); request.setBatchedReduceSize(bufferSize); InitialSearchPhase.ArraySearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(request, 3); + assertEquals(0, reductions.size()); QuerySearchResult result = new QuerySearchResult(0, new SearchShardTarget("node", new Index("a", "b"), 0, null)); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); - InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 1.0D, DocValueFormat.RAW, + InternalAggregations aggs = new InternalAggregations(Collections.singletonList(new InternalMax("test", 1.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); result.setShardIndex(0); @@ -313,7 +334,7 @@ public void testConsumer() { result = new QuerySearchResult(1, new SearchShardTarget("node", new Index("a", "b"), 0, null)); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); - aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 3.0D, DocValueFormat.RAW, + aggs = new InternalAggregations(Collections.singletonList(new InternalMax("test", 3.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); result.setShardIndex(2); @@ -322,23 +343,29 @@ public void testConsumer() { result = new QuerySearchResult(1, new SearchShardTarget("node", new Index("a", "b"), 0, null)); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); - aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 2.0D, DocValueFormat.RAW, + aggs = new InternalAggregations(Collections.singletonList(new InternalMax("test", 2.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); result.setShardIndex(1); consumer.consumeResult(result); - int numTotalReducePhases = 1; + final int numTotalReducePhases; if (bufferSize == 2) { assertThat(consumer, instanceOf(SearchPhaseController.QueryPhaseResultConsumer.class)); assertEquals(1, ((SearchPhaseController.QueryPhaseResultConsumer)consumer).getNumReducePhases()); assertEquals(2, ((SearchPhaseController.QueryPhaseResultConsumer)consumer).getNumBuffered()); - numTotalReducePhases++; + assertEquals(1, reductions.size()); + assertEquals(false, reductions.get(0)); + numTotalReducePhases = 2; } else { assertThat(consumer, not(instanceOf(SearchPhaseController.QueryPhaseResultConsumer.class))); + assertEquals(0, reductions.size()); + numTotalReducePhases = 1; } SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertEquals(numTotalReducePhases, reduce.numReducePhases); + assertEquals(numTotalReducePhases, reductions.size()); + assertFinalReduction(request); InternalMax max = (InternalMax) reduce.aggregations.asList().get(0); assertEquals(3.0D, max.getValue(), 0.0D); assertFalse(reduce.sortedTopDocs.isSortedByField); @@ -351,7 +378,7 @@ public void testConsumerConcurrently() throws InterruptedException { int expectedNumResults = randomIntBetween(1, 100); int bufferSize = randomIntBetween(2, 200); - SearchRequest request = new SearchRequest(); + SearchRequest request = randomBoolean() ? new SearchRequest() : new SearchRequest("remote"); request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo"))); request.setBatchedReduceSize(bufferSize); InitialSearchPhase.ArraySearchPhaseResults consumer = @@ -367,7 +394,7 @@ public void testConsumerConcurrently() throws InterruptedException { result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); - InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", (double) number, + InternalAggregations aggs = new InternalAggregations(Collections.singletonList(new InternalMax("test", (double) number, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); result.setShardIndex(id); @@ -381,6 +408,7 @@ public void testConsumerConcurrently() throws InterruptedException { threads[i].join(); } SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); + assertFinalReduction(request); InternalMax internalMax = (InternalMax) reduce.aggregations.asList().get(0); assertEquals(max.get(), internalMax.getValue(), 0.0D); assertEquals(1, reduce.sortedTopDocs.scoreDocs.length); @@ -396,7 +424,7 @@ public void testConsumerConcurrently() throws InterruptedException { public void testConsumerOnlyAggs() { int expectedNumResults = randomIntBetween(1, 100); int bufferSize = randomIntBetween(2, 200); - SearchRequest request = new SearchRequest(); + SearchRequest request = randomBoolean() ? new SearchRequest() : new SearchRequest("remote"); request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo")).size(0)); request.setBatchedReduceSize(bufferSize); InitialSearchPhase.ArraySearchPhaseResults consumer = @@ -408,7 +436,7 @@ public void testConsumerOnlyAggs() { QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new Index("a", "b"), i, null)); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number), new DocValueFormat[0]); - InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", (double) number, + InternalAggregations aggs = new InternalAggregations(Collections.singletonList(new InternalMax("test", (double) number, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); result.setShardIndex(i); @@ -416,6 +444,7 @@ public void testConsumerOnlyAggs() { consumer.consumeResult(result); } SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); + assertFinalReduction(request); InternalMax internalMax = (InternalMax) reduce.aggregations.asList().get(0); assertEquals(max.get(), internalMax.getValue(), 0.0D); assertEquals(0, reduce.sortedTopDocs.scoreDocs.length); @@ -430,7 +459,7 @@ public void testConsumerOnlyAggs() { public void testConsumerOnlyHits() { int expectedNumResults = randomIntBetween(1, 100); int bufferSize = randomIntBetween(2, 200); - SearchRequest request = new SearchRequest(); + SearchRequest request = randomBoolean() ? new SearchRequest() : new SearchRequest("remote"); if (randomBoolean()) { request.source(new SearchSourceBuilder().size(randomIntBetween(1, 10))); } @@ -449,6 +478,7 @@ public void testConsumerOnlyHits() { consumer.consumeResult(result); } SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); + assertFinalReduction(request); assertEquals(1, reduce.sortedTopDocs.scoreDocs.length); assertEquals(max.get(), reduce.maxScore, 0.0f); assertEquals(expectedNumResults, reduce.totalHits.value); @@ -459,6 +489,12 @@ public void testConsumerOnlyHits() { assertNull(reduce.sortedTopDocs.collapseValues); } + private void assertFinalReduction(SearchRequest searchRequest) { + assertThat(reductions.size(), greaterThanOrEqualTo(1)); + //the last reduction step was the final one only if no cluster alias was provided with the search request + assertEquals(searchRequest.getLocalClusterAlias() == null, reductions.get(reductions.size() - 1)); + } + public void testNewSearchPhaseResults() { for (int i = 0; i < 10; i++) { int expectedNumResults = randomIntBetween(1, 10); @@ -529,7 +565,7 @@ public void testReduceTopNWithFromOffset() { public void testConsumerSortByField() { int expectedNumResults = randomIntBetween(1, 100); int bufferSize = randomIntBetween(2, 200); - SearchRequest request = new SearchRequest(); + SearchRequest request = randomBoolean() ? new SearchRequest() : new SearchRequest("remote"); int size = randomIntBetween(1, 10); request.setBatchedReduceSize(bufferSize); InitialSearchPhase.ArraySearchPhaseResults consumer = @@ -549,6 +585,7 @@ public void testConsumerSortByField() { consumer.consumeResult(result); } SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); + assertFinalReduction(request); assertEquals(Math.min(expectedNumResults, size), reduce.sortedTopDocs.scoreDocs.length); assertEquals(expectedNumResults, reduce.totalHits.value); assertEquals(max.get(), ((FieldDoc)reduce.sortedTopDocs.scoreDocs[0]).fields[0]); @@ -563,7 +600,7 @@ public void testConsumerSortByField() { public void testConsumerFieldCollapsing() { int expectedNumResults = randomIntBetween(30, 100); int bufferSize = randomIntBetween(2, 200); - SearchRequest request = new SearchRequest(); + SearchRequest request = randomBoolean() ? new SearchRequest() : new SearchRequest("remote"); int size = randomIntBetween(5, 10); request.setBatchedReduceSize(bufferSize); InitialSearchPhase.ArraySearchPhaseResults consumer = @@ -585,6 +622,7 @@ public void testConsumerFieldCollapsing() { consumer.consumeResult(result); } SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); + assertFinalReduction(request); assertEquals(3, reduce.sortedTopDocs.scoreDocs.length); assertEquals(expectedNumResults, reduce.totalHits.value); assertEquals(a, ((FieldDoc)reduce.sortedTopDocs.scoreDocs[0]).fields[0]); diff --git a/server/src/test/java/org/elasticsearch/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java similarity index 72% rename from server/src/test/java/org/elasticsearch/search/SearchRequestTests.java rename to server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index ced279a1babd6..91f6c0c09cd20 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -17,27 +17,53 @@ * under the License. */ -package org.elasticsearch.search; +package org.elasticsearch.action.search; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.AbstractSearchTestCase; +import org.elasticsearch.search.RandomSearchRequestGenerator; +import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; +import java.util.Base64; import java.util.List; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class SearchRequestTests extends AbstractSearchTestCase { + @Override + protected SearchRequest createSearchRequest() throws IOException { + if (randomBoolean()) { + return super.createSearchRequest(); + } + //clusterAlias and absoluteStartMillis do not have public getters/setters hence we randomize them only in this test specifically. + SearchRequest searchRequest = new SearchRequest(randomAlphaOfLengthBetween(5, 10), randomNonNegativeLong()); + RandomSearchRequestGenerator.randomSearchRequest(searchRequest, this::createSearchSourceBuilder); + return searchRequest; + } + + public void testClusterAliasValidation() { + expectThrows(NullPointerException.class, () -> new SearchRequest(null, 0)); + expectThrows(IllegalArgumentException.class, () -> new SearchRequest("", -1)); + SearchRequest searchRequest = new SearchRequest("", 0); + assertNull(searchRequest.validate()); + } + public void testSerialization() throws Exception { SearchRequest searchRequest = createSearchRequest(); SearchRequest deserializedRequest = copyWriteable(searchRequest, namedWriteableRegistry, SearchRequest::new); @@ -46,6 +72,37 @@ public void testSerialization() throws Exception { assertNotSame(deserializedRequest, searchRequest); } + public void testClusterAliasSerialization() throws IOException { + SearchRequest searchRequest = createSearchRequest(); + Version version = VersionUtils.randomVersion(random()); + SearchRequest deserializedRequest = copyWriteable(searchRequest, namedWriteableRegistry, SearchRequest::new, version); + if (version.before(Version.V_6_7_0)) { + assertNull(deserializedRequest.getLocalClusterAlias()); + assertAbsoluteStartMillisIsCurrentTime(deserializedRequest); + } else { + assertEquals(searchRequest.getLocalClusterAlias(), deserializedRequest.getLocalClusterAlias()); + assertEquals(searchRequest.getOrCreateAbsoluteStartMillis(), deserializedRequest.getOrCreateAbsoluteStartMillis()); + } + } + + public void testReadFromPre6_7_0() throws IOException { + String msg = "AAEBBWluZGV4AAAAAQACAAAA/////w8AAAAAAAAA/////w8AAAAAAAACAAAAAAABAAMCBAUBAAKABACAAQIAAA=="; + try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(msg))) { + in.setVersion(VersionUtils.randomVersionBetween(random(), Version.V_6_4_0, VersionUtils.getPreviousVersion(Version.V_6_7_0))); + SearchRequest searchRequest = new SearchRequest(in); + assertArrayEquals(new String[]{"index"}, searchRequest.indices()); + assertNull(searchRequest.getLocalClusterAlias()); + assertAbsoluteStartMillisIsCurrentTime(searchRequest); + } + } + + private static void assertAbsoluteStartMillisIsCurrentTime(SearchRequest searchRequest) { + long before = System.currentTimeMillis(); + long absoluteStartMillis = searchRequest.getOrCreateAbsoluteStartMillis(); + long after = System.currentTimeMillis(); + assertThat(absoluteStartMillis, allOf(greaterThanOrEqualTo(before), lessThanOrEqualTo(after))); + } + public void testIllegalArguments() { SearchRequest searchRequest = new SearchRequest(); assertNotNull(searchRequest.indices()); @@ -140,11 +197,11 @@ public void testCopyConstructor() throws IOException { } public void testEqualsAndHashcode() throws IOException { - checkEqualsAndHashCode(createSearchRequest(), SearchRequestTests::copyRequest, this::mutate); + checkEqualsAndHashCode(createSearchRequest(), SearchRequest::new, this::mutate); } private SearchRequest mutate(SearchRequest searchRequest) { - SearchRequest mutation = copyRequest(searchRequest); + SearchRequest mutation = new SearchRequest(searchRequest); List mutators = new ArrayList<>(); mutators.add(() -> mutation.indices(ArrayUtils.concat(searchRequest.indices(), new String[] { randomAlphaOfLength(10) }))); mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(searchRequest.indicesOptions(), @@ -161,8 +218,4 @@ private SearchRequest mutate(SearchRequest searchRequest) { randomFrom(mutators).run(); return mutation; } - - private static SearchRequest copyRequest(SearchRequest searchRequest) { - return new SearchRequest(searchRequest); - } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java new file mode 100644 index 0000000000000..09595650932c5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +public class SearchShardIteratorTests extends ESTestCase { + + public void testShardId() { + ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomInt()); + SearchShardIterator searchShardIterator = new SearchShardIterator(null, shardId, Collections.emptyList(), OriginalIndices.NONE); + assertSame(shardId, searchShardIterator.shardId()); + } + + public void testGetOriginalIndices() { + ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomInt()); + OriginalIndices originalIndices = new OriginalIndices(new String[]{randomAlphaOfLengthBetween(3, 10)}, + IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + SearchShardIterator searchShardIterator = new SearchShardIterator(null, shardId, Collections.emptyList(), originalIndices); + assertSame(originalIndices, searchShardIterator.getOriginalIndices()); + } + + public void testGetClusterAlias() { + String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); + ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomInt()); + SearchShardIterator searchShardIterator = new SearchShardIterator(clusterAlias, shardId, Collections.emptyList(), + OriginalIndices.NONE); + assertEquals(clusterAlias, searchShardIterator.getClusterAlias()); + } + + public void testNewSearchShardTarget() { + String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); + ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomInt()); + OriginalIndices originalIndices = new OriginalIndices(new String[]{randomAlphaOfLengthBetween(3, 10)}, + IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + SearchShardIterator searchShardIterator = new SearchShardIterator(clusterAlias, shardId, Collections.emptyList(), originalIndices); + String nodeId = randomAlphaOfLengthBetween(3, 10); + SearchShardTarget searchShardTarget = searchShardIterator.newSearchShardTarget(nodeId); + assertEquals(clusterAlias, searchShardTarget.getClusterAlias()); + assertSame(shardId, searchShardTarget.getShardId()); + assertEquals(nodeId, searchShardTarget.getNodeId()); + assertSame(originalIndices, searchShardTarget.getOriginalIndices()); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionSingleNodeTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionSingleNodeTests.java new file mode 100644 index 0000000000000..19bd76ec09da2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionSingleNodeTests.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.ESSingleNodeTestCase; + +public class TransportSearchActionSingleNodeTests extends ESSingleNodeTestCase { + + public void testLocalClusterAlias() { + long nowInMillis = System.currentTimeMillis(); + IndexRequest indexRequest = new IndexRequest("test"); + indexRequest.id("1"); + indexRequest.source("field", "value"); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); + IndexResponse indexResponse = client().index(indexRequest).actionGet(); + assertEquals(RestStatus.CREATED, indexResponse.status()); + + { + SearchRequest searchRequest = new SearchRequest("local", nowInMillis); + SearchResponse searchResponse = client().search(searchRequest).actionGet(); + assertEquals(1, searchResponse.getHits().getTotalHits().value); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertEquals(1, hits.length); + SearchHit hit = hits[0]; + assertEquals("local", hit.getClusterAlias()); + assertEquals("test", hit.getIndex()); + assertEquals("1", hit.getId()); + } + { + SearchRequest searchRequest = new SearchRequest("", nowInMillis); + SearchResponse searchResponse = client().search(searchRequest).actionGet(); + assertEquals(1, searchResponse.getHits().getTotalHits().value); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertEquals(1, hits.length); + SearchHit hit = hits[0]; + assertEquals("", hit.getClusterAlias()); + assertEquals("test", hit.getIndex()); + assertEquals("1", hit.getId()); + } + } + + public void testAbsoluteStartMillis() { + { + IndexRequest indexRequest = new IndexRequest("test-1970.01.01"); + indexRequest.id("1"); + indexRequest.source("date", "1970-01-01"); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); + IndexResponse indexResponse = client().index(indexRequest).actionGet(); + assertEquals(RestStatus.CREATED, indexResponse.status()); + } + { + IndexRequest indexRequest = new IndexRequest("test-1982.01.01"); + indexRequest.id("1"); + indexRequest.source("date", "1982-01-01"); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); + IndexResponse indexResponse = client().index(indexRequest).actionGet(); + assertEquals(RestStatus.CREATED, indexResponse.status()); + } + { + SearchRequest searchRequest = new SearchRequest(); + SearchResponse searchResponse = client().search(searchRequest).actionGet(); + assertEquals(2, searchResponse.getHits().getTotalHits().value); + } + { + SearchRequest searchRequest = new SearchRequest(""); + searchRequest.indicesOptions(IndicesOptions.fromOptions(true, true, true, true)); + SearchResponse searchResponse = client().search(searchRequest).actionGet(); + assertEquals(0, searchResponse.getTotalShards()); + } + { + SearchRequest searchRequest = new SearchRequest("", 0); + SearchResponse searchResponse = client().search(searchRequest).actionGet(); + assertEquals(2, searchResponse.getHits().getTotalHits().value); + } + { + SearchRequest searchRequest = new SearchRequest("", 0); + searchRequest.indices(""); + SearchResponse searchResponse = client().search(searchRequest).actionGet(); + assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + } + { + SearchRequest searchRequest = new SearchRequest("", 0); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + RangeQueryBuilder rangeQuery = new RangeQueryBuilder("date"); + rangeQuery.gte("1970-01-01"); + rangeQuery.lt("1982-01-01"); + sourceBuilder.query(rangeQuery); + searchRequest.source(sourceBuilder); + SearchResponse searchResponse = client().search(searchRequest).actionGet(); + assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 3e4747a4db757..16ff4389d7c4a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; @@ -33,6 +34,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.shard.ShardId; @@ -42,6 +44,10 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -50,8 +56,11 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.BiFunction; +import java.util.function.Function; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.hamcrest.CoreMatchers.startsWith; public class TransportSearchActionTests extends ESTestCase { @@ -109,8 +118,9 @@ public void testMergeShardsIterators() { remoteShardIterators.add(remoteShardIterator3); } + String localClusterAlias = randomBoolean() ? null : "local"; GroupShardsIterator searchShardIterators = TransportSearchAction.mergeShardsIterators(localShardsIterator, - localIndices, remoteShardIterators); + localIndices, localClusterAlias, remoteShardIterators); assertEquals(searchShardIterators.size(), 5); int i = 0; @@ -120,26 +130,31 @@ public void testMergeShardsIterators() { assertEquals("local_index", searchShardIterator.shardId().getIndexName()); assertEquals(0, searchShardIterator.shardId().getId()); assertSame(localIndices, searchShardIterator.getOriginalIndices()); + assertEquals(localClusterAlias, searchShardIterator.getClusterAlias()); break; case 1: assertEquals("local_index_2", searchShardIterator.shardId().getIndexName()); assertEquals(1, searchShardIterator.shardId().getId()); assertSame(localIndices, searchShardIterator.getOriginalIndices()); + assertEquals(localClusterAlias, searchShardIterator.getClusterAlias()); break; case 2: assertEquals("remote_index", searchShardIterator.shardId().getIndexName()); assertEquals(2, searchShardIterator.shardId().getId()); assertSame(remoteIndices, searchShardIterator.getOriginalIndices()); + assertEquals("remote", searchShardIterator.getClusterAlias()); break; case 3: assertEquals("remote_index_2", searchShardIterator.shardId().getIndexName()); assertEquals(3, searchShardIterator.shardId().getId()); assertSame(remoteIndices, searchShardIterator.getOriginalIndices()); + assertEquals("remote", searchShardIterator.getClusterAlias()); break; case 4: assertEquals("remote_index_3", searchShardIterator.shardId().getIndexName()); assertEquals(4, searchShardIterator.shardId().getId()); assertSame(remoteIndices2, searchShardIterator.getOriginalIndices()); + assertEquals("remote", searchShardIterator.getClusterAlias()); break; } } @@ -239,6 +254,56 @@ public void testProcessRemoteShards() { } } + public void testBuildConnectionLookup() { + Function localNodes = (nodeId) -> new DiscoveryNode("local-" + nodeId, + new TransportAddress(TransportAddress.META_ADDRESS, 1024), Version.CURRENT); + BiFunction remoteNodes = (clusterAlias, nodeId) -> new DiscoveryNode("remote-" + nodeId, + new TransportAddress(TransportAddress.META_ADDRESS, 2048), Version.CURRENT); + BiFunction nodeToConnection = (clusterAlias, node) -> new Transport.Connection() { + @Override + public DiscoveryNode getNode() { + return node; + } + + @Override + public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) + throws TransportException { + } + + @Override + public void addCloseListener(ActionListener listener) { + } + + @Override + public boolean isClosed() { + return false; + } + + @Override + public void close() { + } + }; + + { + BiFunction connectionLookup = TransportSearchAction.buildConnectionLookup( + null, localNodes, remoteNodes, nodeToConnection); + + Transport.Connection localConnection = connectionLookup.apply(null, randomAlphaOfLengthBetween(5, 10)); + assertThat(localConnection.getNode().getId(), startsWith("local-")); + Transport.Connection remoteConnection = connectionLookup.apply(randomAlphaOfLengthBetween(5, 10), + randomAlphaOfLengthBetween(5, 10)); + assertThat(remoteConnection.getNode().getId(), startsWith("remote-")); + } + { + String requestClusterAlias = randomAlphaOfLengthBetween(5, 10); + BiFunction connectionLookup = TransportSearchAction.buildConnectionLookup( + requestClusterAlias, localNodes, remoteNodes, nodeToConnection); + + Transport.Connection localConnection = connectionLookup.apply(requestClusterAlias, randomAlphaOfLengthBetween(5, 10)); + assertThat(localConnection.getNode().getId(), startsWith("local-")); + } + } + public void testBuildClusters() { OriginalIndices localIndices = randomBoolean() ? null : randomOriginalIndices(); Map remoteIndices = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java index 49d7450096bb5..a666f45c34311 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java @@ -45,7 +45,6 @@ public void testParseScrollId() throws IOException { array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); - String scrollId = TransportSearchHelper.buildScrollId(array); ParsedScrollId parseScrollId = TransportSearchHelper.parseScrollId(scrollId); assertEquals(3, parseScrollId.getContext().length); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index e33c7ca5324ab..4534fbe23e3c2 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -524,9 +524,18 @@ public void testValidate() { assertThat(validate, nullValue()); } + { + // Null types are defaulted to "_doc" + UpdateRequest request = new UpdateRequest("index", null, randomBoolean() ? "" : null); + request.doc("{}", XContentType.JSON); + ActionRequestValidationException validate = request.validate(); + assertThat(validate, not(nullValue())); + assertThat(validate.validationErrors(), hasItems("id is missing")); + } { - UpdateRequest request = new UpdateRequest("index", randomBoolean() ? "" : null, randomBoolean() ? "" : null); + // Non-null types are accepted but fail validation + UpdateRequest request = new UpdateRequest("index", "", randomBoolean() ? "" : null); request.doc("{}", XContentType.JSON); ActionRequestValidationException validate = request.validate(); diff --git a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java index 4e625dcc2adef..be8387be87cfb 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java @@ -729,7 +729,6 @@ public void testDiscoveryConfiguredCheck() throws NodeValidationException { ensureChecksPass.accept(Settings.builder().putList(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey())); ensureChecksPass.accept(Settings.builder().putList(SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey())); - ensureChecksPass.accept(Settings.builder().put(ClusterBootstrapService.INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 0)); ensureChecksPass.accept(Settings.builder().putList(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey())); } } diff --git a/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java b/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java index b67dd5db18a01..7557c565039c1 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java @@ -76,12 +76,12 @@ public void testMaxMapCountCheckBelowLimitAndMemoryMapAllowed() { /* * There are two ways that memory maps are allowed: * - by default - * - mmapfs is explicitly allowed - * We want to test that if mmapfs is allowed then the max map count check is enforced. + * - mmap is explicitly allowed + * We want to test that if mmap is allowed then the max map count check is enforced. */ final List settingsThatAllowMemoryMap = new ArrayList<>(); settingsThatAllowMemoryMap.add(Settings.EMPTY); - settingsThatAllowMemoryMap.add(Settings.builder().put("node.store.allow_mmapfs", true).build()); + settingsThatAllowMemoryMap.add(Settings.builder().put("node.store.allow_mmap", true).build()); for (final Settings settingThatAllowsMemoryMap : settingsThatAllowMemoryMap) { assertFailure(check.check(createTestContext(settingThatAllowsMemoryMap, MetaData.EMPTY_META_DATA))); @@ -89,8 +89,8 @@ public void testMaxMapCountCheckBelowLimitAndMemoryMapAllowed() { } public void testMaxMapCountCheckNotEnforcedIfMemoryMapNotAllowed() { - // nothing should happen if current vm.max_map_count is under the limit but mmapfs is not allowed - final Settings settings = Settings.builder().put("node.store.allow_mmapfs", false).build(); + // nothing should happen if current vm.max_map_count is under the limit but mmap is not allowed + final Settings settings = Settings.builder().put("node.store.allow_mmap", false).build(); final BootstrapContext context = createTestContext(settings, MetaData.EMPTY_META_DATA); final BootstrapCheck.BootstrapCheckResult result = check.check(context); assertTrue(result.isSuccess()); diff --git a/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index cec4e9b36fa04..ea00d0e648063 100644 --- a/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -91,7 +91,7 @@ public void testSimpleMinimumMasterNodes() throws Exception { logger.info("--> should be blocked, no master..."); ClusterState state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); + assertThat(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); assertThat(state.nodes().getSize(), equalTo(1)); // verify that we still see the local node in the cluster state logger.info("--> start second node, cluster should be formed"); @@ -102,9 +102,9 @@ public void testSimpleMinimumMasterNodes() throws Exception { assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); + assertThat(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); + assertThat(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); state = client().admin().cluster().prepareState().execute().actionGet().getState(); assertThat(state.nodes().getSize(), equalTo(2)); @@ -131,10 +131,10 @@ public void testSimpleMinimumMasterNodes() throws Exception { internalCluster().stopCurrentMasterNode(); awaitBusy(() -> { ClusterState clusterState = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - return clusterState.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID); + return clusterState.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID); }); state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); + assertThat(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); // verify that both nodes are still in the cluster state but there is no master assertThat(state.nodes().getSize(), equalTo(2)); assertThat(state.nodes().getMasterNode(), equalTo(null)); @@ -147,9 +147,9 @@ public void testSimpleMinimumMasterNodes() throws Exception { assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); + assertThat(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); + assertThat(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); state = client().admin().cluster().prepareState().execute().actionGet().getState(); assertThat(state.nodes().getSize(), equalTo(2)); @@ -165,7 +165,7 @@ public void testSimpleMinimumMasterNodes() throws Exception { internalCluster().stopRandomNonMasterNode(); assertBusy(() -> { ClusterState state1 = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state1.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); + assertThat(state1.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); }); logger.info("--> starting the previous master node again..."); @@ -177,9 +177,9 @@ public void testSimpleMinimumMasterNodes() throws Exception { assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); + assertThat(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); + assertThat(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(false)); state = client().admin().cluster().prepareState().execute().actionGet().getState(); assertThat(state.nodes().getSize(), equalTo(2)); @@ -209,7 +209,7 @@ public void testMultipleNodesShutdownNonMasterNodes() throws Exception { assertBusy(() -> { for (Client client : clients()) { ClusterState state1 = client.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertThat(state1.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); + assertThat(state1.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID), equalTo(true)); } }); @@ -303,7 +303,7 @@ public void testDynamicUpdateMinimumMasterNodes() throws Exception { private void assertNoMasterBlockOnAllNodes() throws InterruptedException { Predicate hasNoMasterBlock = client -> { ClusterState state = client.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - return state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID); + return state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID); }; assertTrue(awaitBusy( () -> { diff --git a/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index 00e2b84154a21..60c3bbee87a7a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -28,24 +28,28 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.coordination.ClusterBootstrapService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.MasterNotDiscoveredException; -import org.elasticsearch.discovery.zen.ElectMasterService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.InternalTestCluster.RestartCallback; +import org.elasticsearch.test.disruption.NetworkDisruption; +import org.elasticsearch.test.disruption.NetworkDisruption.IsolateAllNodes; +import org.elasticsearch.test.disruption.NetworkDisruption.NetworkDisconnect; +import org.elasticsearch.test.transport.MockTransportService; -import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.HashSet; +import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -53,7 +57,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -@ClusterScope(scope = Scope.TEST, numDataNodes = 0, autoMinMasterNodes = false) +@ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class NoMasterNodeIT extends ESIntegTestCase { @Override @@ -61,106 +65,105 @@ protected int numberOfReplicas() { return 2; } + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(MockTransportService.TestPlugin.class); + } + public void testNoMasterActions() throws Exception { Settings settings = Settings.builder() .put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true) - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.MAX_VALUE) .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all") - .put(ClusterBootstrapService.INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 3) .build(); final TimeValue timeout = TimeValue.timeValueMillis(10); - internalCluster().startNodes(3, settings); + final List nodes = internalCluster().startNodes(3, settings); createIndex("test"); client().admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet(); - internalCluster().stopRandomDataNode(); - - internalCluster().restartRandomDataNode(new RestartCallback() { - @Override - public Settings onNodeStopped(String nodeName) throws Exception { - - final Client remainingClient = client(Arrays.stream( - internalCluster().getNodeNames()).filter(n -> n.equals(nodeName) == false).findAny().get()); - - assertBusy(() -> { - ClusterState state = remainingClient.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertTrue(state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID)); - }); - - assertThrows(remainingClient.prepareGet("test", "type1", "1"), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - assertThrows(remainingClient.prepareGet("no_index", "type1", "1"), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - assertThrows(remainingClient.prepareMultiGet().add("test", "type1", "1"), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - assertThrows(remainingClient.prepareMultiGet().add("no_index", "type1", "1"), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - assertThrows(remainingClient.admin().indices().prepareAnalyze("test", "this is a test"), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - assertThrows(remainingClient.admin().indices().prepareAnalyze("no_index", "this is a test"), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - assertThrows(remainingClient.prepareSearch("test").setSize(0), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - assertThrows(remainingClient.prepareSearch("no_index").setSize(0), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - checkUpdateAction(false, timeout, - remainingClient.prepareUpdate("test", "type1", "1") - .setScript(new Script( - ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", - Collections.emptyMap())).setTimeout(timeout)); - - checkUpdateAction(true, timeout, - remainingClient.prepareUpdate("no_index", "type1", "1") - .setScript(new Script( - ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", - Collections.emptyMap())).setTimeout(timeout)); - - - checkWriteAction(remainingClient.prepareIndex("test", "type1", "1") - .setSource(XContentFactory.jsonBuilder().startObject().endObject()).setTimeout(timeout)); - - checkWriteAction(remainingClient.prepareIndex("no_index", "type1", "1") - .setSource(XContentFactory.jsonBuilder().startObject().endObject()).setTimeout(timeout)); - - BulkRequestBuilder bulkRequestBuilder = remainingClient.prepareBulk(); - bulkRequestBuilder.add(remainingClient.prepareIndex("test", "type1", "1") - .setSource(XContentFactory.jsonBuilder().startObject().endObject())); - bulkRequestBuilder.add(remainingClient.prepareIndex("test", "type1", "2") - .setSource(XContentFactory.jsonBuilder().startObject().endObject())); - bulkRequestBuilder.setTimeout(timeout); - checkWriteAction(bulkRequestBuilder); - - bulkRequestBuilder = remainingClient.prepareBulk(); - bulkRequestBuilder.add(remainingClient.prepareIndex("no_index", "type1", "1") - .setSource(XContentFactory.jsonBuilder().startObject().endObject())); - bulkRequestBuilder.add(remainingClient.prepareIndex("no_index", "type1", "2") - .setSource(XContentFactory.jsonBuilder().startObject().endObject())); - bulkRequestBuilder.setTimeout(timeout); - checkWriteAction(bulkRequestBuilder); - - return Settings.EMPTY; - } + + final NetworkDisruption disruptionScheme + = new NetworkDisruption(new IsolateAllNodes(new HashSet<>(nodes)), new NetworkDisconnect()); + internalCluster().setDisruptionScheme(disruptionScheme); + disruptionScheme.startDisrupting(); + + final Client clientToMasterlessNode = client(); + + assertBusy(() -> { + ClusterState state = clientToMasterlessNode.admin().cluster().prepareState().setLocal(true) + .execute().actionGet().getState(); + assertTrue(state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID)); }); - internalCluster().startNode(settings); + assertThrows(clientToMasterlessNode.prepareGet("test", "type1", "1"), + ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE + ); + + assertThrows(clientToMasterlessNode.prepareGet("no_index", "type1", "1"), + ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE + ); + + assertThrows(clientToMasterlessNode.prepareMultiGet().add("test", "type1", "1"), + ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE + ); + + assertThrows(clientToMasterlessNode.prepareMultiGet().add("no_index", "type1", "1"), + ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE + ); + + assertThrows(clientToMasterlessNode.admin().indices().prepareAnalyze("test", "this is a test"), + ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE + ); + + assertThrows(clientToMasterlessNode.admin().indices().prepareAnalyze("no_index", "this is a test"), + ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE + ); + + assertThrows(clientToMasterlessNode.prepareSearch("test").setSize(0), + ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE + ); + + assertThrows(clientToMasterlessNode.prepareSearch("no_index").setSize(0), + ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE + ); + + checkUpdateAction(false, timeout, + clientToMasterlessNode.prepareUpdate("test", "type1", "1") + .setScript(new Script( + ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", + Collections.emptyMap())).setTimeout(timeout)); + + checkUpdateAction(true, timeout, + clientToMasterlessNode.prepareUpdate("no_index", "type1", "1") + .setScript(new Script( + ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "test script", + Collections.emptyMap())).setTimeout(timeout)); + + + checkWriteAction(clientToMasterlessNode.prepareIndex("test", "type1", "1") + .setSource(XContentFactory.jsonBuilder().startObject().endObject()).setTimeout(timeout)); + + checkWriteAction(clientToMasterlessNode.prepareIndex("no_index", "type1", "1") + .setSource(XContentFactory.jsonBuilder().startObject().endObject()).setTimeout(timeout)); + + BulkRequestBuilder bulkRequestBuilder = clientToMasterlessNode.prepareBulk(); + bulkRequestBuilder.add(clientToMasterlessNode.prepareIndex("test", "type1", "1") + .setSource(XContentFactory.jsonBuilder().startObject().endObject())); + bulkRequestBuilder.add(clientToMasterlessNode.prepareIndex("test", "type1", "2") + .setSource(XContentFactory.jsonBuilder().startObject().endObject())); + bulkRequestBuilder.setTimeout(timeout); + checkWriteAction(bulkRequestBuilder); + + bulkRequestBuilder = clientToMasterlessNode.prepareBulk(); + bulkRequestBuilder.add(clientToMasterlessNode.prepareIndex("no_index", "type1", "1") + .setSource(XContentFactory.jsonBuilder().startObject().endObject())); + bulkRequestBuilder.add(clientToMasterlessNode.prepareIndex("no_index", "type1", "2") + .setSource(XContentFactory.jsonBuilder().startObject().endObject())); + bulkRequestBuilder.setTimeout(timeout); + checkWriteAction(bulkRequestBuilder); + + disruptionScheme.stopDisrupting(); client().admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForNodes("3").execute().actionGet(); } @@ -192,12 +195,10 @@ void checkWriteAction(ActionRequestBuilder builder) { public void testNoMasterActionsWriteMasterBlock() throws Exception { Settings settings = Settings.builder() .put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false) - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.MAX_VALUE) .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "write") - .put(ClusterBootstrapService.INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 3) .build(); - internalCluster().startNodes(3, settings); + final List nodes = internalCluster().startNodes(3, settings); prepareCreate("test1").setSettings( Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2)).get(); @@ -213,63 +214,57 @@ public void testNoMasterActionsWriteMasterBlock() throws Exception { ClusterStateResponse clusterState = client().admin().cluster().prepareState().get(); logger.info("Cluster state:\n{}", clusterState.getState()); - internalCluster().stopRandomDataNode(); - internalCluster().restartRandomDataNode(new RestartCallback() { - @Override - public Settings onNodeStopped(String nodeName) throws Exception { - - final Client remainingClient = client(Arrays.stream( - internalCluster().getNodeNames()).filter(n -> n.equals(nodeName) == false).findAny().get()); - - assertTrue(awaitBusy(() -> { - ClusterState state = remainingClient.admin().cluster().prepareState().setLocal(true).get().getState(); - return state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID); - } - )); - - GetResponse getResponse = remainingClient.prepareGet("test1", "type1", "1").get(); - assertExists(getResponse); - - SearchResponse countResponse = remainingClient.prepareSearch("test1").setAllowPartialSearchResults(true).setSize(0).get(); - assertHitCount(countResponse, 1L); - - logger.info("--> here 3"); - SearchResponse searchResponse = remainingClient.prepareSearch("test1").setAllowPartialSearchResults(true).get(); - assertHitCount(searchResponse, 1L); - - countResponse = remainingClient.prepareSearch("test2").setAllowPartialSearchResults(true).setSize(0).get(); - assertThat(countResponse.getTotalShards(), equalTo(3)); - assertThat(countResponse.getSuccessfulShards(), equalTo(1)); - - TimeValue timeout = TimeValue.timeValueMillis(200); - long now = System.currentTimeMillis(); - try { - remainingClient.prepareUpdate("test1", "type1", "1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").setTimeout(timeout).get(); - fail("Expected ClusterBlockException"); - } catch (ClusterBlockException e) { - assertThat(System.currentTimeMillis() - now, greaterThan(timeout.millis() - 50)); - assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); - } catch (Exception e) { - logger.info("unexpected", e); - throw e; - } - - try { - remainingClient.prepareIndex("test1", "type1", "1") - .setSource(XContentFactory.jsonBuilder().startObject().endObject()).setTimeout(timeout).get(); - fail("Expected ClusterBlockException"); - } catch (ClusterBlockException e) { - assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); - } - - logger.info("finished assertions, restarting node [{}]", nodeName); - - return Settings.EMPTY; + final NetworkDisruption disruptionScheme + = new NetworkDisruption(new IsolateAllNodes(new HashSet<>(nodes)), new NetworkDisconnect()); + internalCluster().setDisruptionScheme(disruptionScheme); + disruptionScheme.startDisrupting(); + + final Client clientToMasterlessNode = client(); + + assertTrue(awaitBusy(() -> { + ClusterState state = clientToMasterlessNode.admin().cluster().prepareState().setLocal(true).get().getState(); + return state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID); } - }); + )); + + GetResponse getResponse = clientToMasterlessNode.prepareGet("test1", "type1", "1").get(); + assertExists(getResponse); + + SearchResponse countResponse = clientToMasterlessNode.prepareSearch("test1").setAllowPartialSearchResults(true).setSize(0).get(); + assertHitCount(countResponse, 1L); + + logger.info("--> here 3"); + SearchResponse searchResponse = clientToMasterlessNode.prepareSearch("test1").setAllowPartialSearchResults(true).get(); + assertHitCount(searchResponse, 1L); + + countResponse = clientToMasterlessNode.prepareSearch("test2").setAllowPartialSearchResults(true).setSize(0).get(); + assertThat(countResponse.getTotalShards(), equalTo(3)); + assertThat(countResponse.getSuccessfulShards(), equalTo(1)); + + TimeValue timeout = TimeValue.timeValueMillis(200); + long now = System.currentTimeMillis(); + try { + clientToMasterlessNode.prepareUpdate("test1", "type1", "1") + .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").setTimeout(timeout).get(); + fail("Expected ClusterBlockException"); + } catch (ClusterBlockException e) { + assertThat(System.currentTimeMillis() - now, greaterThan(timeout.millis() - 50)); + assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); + } catch (Exception e) { + logger.info("unexpected", e); + throw e; + } + + try { + clientToMasterlessNode.prepareIndex("test1", "type1", "1") + .setSource(XContentFactory.jsonBuilder().startObject().endObject()).setTimeout(timeout).get(); + fail("Expected ClusterBlockException"); + } catch (ClusterBlockException e) { + assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); + } + + disruptionScheme.stopDisrupting(); - internalCluster().startNode(settings); client().admin().cluster().prepareHealth().setWaitForGreenStatus().setWaitForNodes("3").get(); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java b/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java index a84d160cf0c95..4cf7b6f9c6d3a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java @@ -20,36 +20,35 @@ package org.elasticsearch.cluster.block; import org.elasticsearch.Version; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import java.util.Arrays; import java.util.Collections; -import java.util.EnumSet; +import java.util.List; +import static java.util.EnumSet.copyOf; +import static org.elasticsearch.test.VersionUtils.getPreviousVersion; import static org.elasticsearch.test.VersionUtils.randomVersion; +import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isOneOf; +import static org.hamcrest.Matchers.nullValue; public class ClusterBlockTests extends ESTestCase { + public void testSerialization() throws Exception { - int iterations = randomIntBetween(10, 100); + int iterations = randomIntBetween(5, 20); for (int i = 0; i < iterations; i++) { - // Get a random version Version version = randomVersion(random()); - - // Get a random list of ClusterBlockLevels - EnumSet levels = EnumSet.noneOf(ClusterBlockLevel.class); - int nbLevels = randomIntBetween(1, ClusterBlockLevel.values().length); - for (int j = 0; j < nbLevels; j++) { - levels.add(randomFrom(ClusterBlockLevel.values())); - } - - ClusterBlock clusterBlock = new ClusterBlock(randomInt(), "cluster block #" + randomInt(), randomBoolean(), - randomBoolean(), false, randomFrom(RestStatus.values()), levels); + ClusterBlock clusterBlock = randomClusterBlock(version); BytesStreamOutput out = new BytesStreamOutput(); out.setVersion(version); @@ -59,37 +58,133 @@ public void testSerialization() throws Exception { in.setVersion(version); ClusterBlock result = ClusterBlock.readClusterBlock(in); - assertThat(result.id(), equalTo(clusterBlock.id())); - assertThat(result.status(), equalTo(clusterBlock.status())); - assertThat(result.description(), equalTo(clusterBlock.description())); - assertThat(result.retryable(), equalTo(clusterBlock.retryable())); - assertThat(result.disableStatePersistence(), equalTo(clusterBlock.disableStatePersistence())); - assertArrayEquals(result.levels().toArray(), clusterBlock.levels().toArray()); + assertClusterBlockEquals(clusterBlock, result); } } - public void testToStringDanglingComma() { - EnumSet levels = EnumSet.noneOf(ClusterBlockLevel.class); - int nbLevels = randomIntBetween(1, ClusterBlockLevel.values().length); - for (int j = 0; j < nbLevels; j++) { - levels.add(randomFrom(ClusterBlockLevel.values())); + public void testBwcSerialization() throws Exception { + for (int runs = 0; runs < randomIntBetween(5, 20); runs++) { + // Generate a random cluster block in version < 7.0.0 + final Version version = randomVersionBetween(random(), Version.V_6_0_0, getPreviousVersion(Version.V_7_0_0)); + final ClusterBlock expected = randomClusterBlock(version); + assertNull(expected.uuid()); + + // Serialize to node in current version + final BytesStreamOutput out = new BytesStreamOutput(); + expected.writeTo(out); + + // Deserialize and check the cluster block + final ClusterBlock actual = ClusterBlock.readClusterBlock(out.bytes().streamInput()); + assertClusterBlockEquals(expected, actual); } - ClusterBlock clusterBlock = new ClusterBlock(randomInt(), "cluster block #" + randomInt(), randomBoolean(), - randomBoolean(), false, randomFrom(RestStatus.values()), levels); + + for (int runs = 0; runs < randomIntBetween(5, 20); runs++) { + // Generate a random cluster block in current version + final ClusterBlock expected = randomClusterBlock(Version.CURRENT); + + // Serialize to node in version < 7.0.0 + final BytesStreamOutput out = new BytesStreamOutput(); + out.setVersion(randomVersionBetween(random(), Version.V_6_0_0, getPreviousVersion(Version.V_7_0_0))); + expected.writeTo(out); + + // Deserialize and check the cluster block + final StreamInput in = out.bytes().streamInput(); + in.setVersion(out.getVersion()); + final ClusterBlock actual = ClusterBlock.readClusterBlock(in); + + assertThat(actual.id(), equalTo(expected.id())); + assertThat(actual.status(), equalTo(expected.status())); + assertThat(actual.description(), equalTo(expected.description())); + assertThat(actual.retryable(), equalTo(expected.retryable())); + assertThat(actual.disableStatePersistence(), equalTo(expected.disableStatePersistence())); + assertArrayEquals(actual.levels().toArray(), expected.levels().toArray()); + } + } + + public void testToStringDanglingComma() { + final ClusterBlock clusterBlock = randomClusterBlock(); assertThat(clusterBlock.toString(), not(endsWith(","))); } public void testGlobalBlocksCheckedIfNoIndicesSpecified() { - EnumSet levels = EnumSet.noneOf(ClusterBlockLevel.class); - int nbLevels = randomIntBetween(1, ClusterBlockLevel.values().length); - for (int j = 0; j < nbLevels; j++) { - levels.add(randomFrom(ClusterBlockLevel.values())); - } - ClusterBlock globalBlock = new ClusterBlock(randomInt(), "cluster block #" + randomInt(), randomBoolean(), - randomBoolean(), false, randomFrom(RestStatus.values()), levels); + ClusterBlock globalBlock = randomClusterBlock(); ClusterBlocks clusterBlocks = new ClusterBlocks(Collections.singleton(globalBlock), ImmutableOpenMap.of()); ClusterBlockException exception = clusterBlocks.indicesBlockedException(randomFrom(globalBlock.levels()), new String[0]); assertNotNull(exception); assertEquals(exception.blocks(), Collections.singleton(globalBlock)); } + + public void testRemoveIndexBlockWithId() { + final ClusterBlocks.Builder builder = ClusterBlocks.builder(); + builder.addIndexBlock("index-1", + new ClusterBlock(1, "uuid", "", true, true, true, RestStatus.OK, copyOf(ClusterBlockLevel.ALL))); + builder.addIndexBlock("index-1", + new ClusterBlock(2, "uuid", "", true, true, true, RestStatus.OK, copyOf(ClusterBlockLevel.ALL))); + builder.addIndexBlock("index-1", + new ClusterBlock(3, "uuid", "", true, true, true, RestStatus.OK, copyOf(ClusterBlockLevel.ALL))); + builder.addIndexBlock("index-1", + new ClusterBlock(3, "other uuid", "", true, true, true, RestStatus.OK, copyOf(ClusterBlockLevel.ALL))); + + builder.addIndexBlock("index-2", + new ClusterBlock(3, "uuid3", "", true, true, true, RestStatus.OK, copyOf(ClusterBlockLevel.ALL))); + + ClusterBlocks clusterBlocks = builder.build(); + assertThat(clusterBlocks.indices().get("index-1").size(), equalTo(4)); + assertThat(clusterBlocks.indices().get("index-2").size(), equalTo(1)); + + builder.removeIndexBlockWithId("index-1", 3); + clusterBlocks = builder.build(); + + assertThat(clusterBlocks.indices().get("index-1").size(), equalTo(2)); + assertThat(clusterBlocks.hasIndexBlockWithId("index-1", 1), is(true)); + assertThat(clusterBlocks.hasIndexBlockWithId("index-1", 2), is(true)); + assertThat(clusterBlocks.indices().get("index-2").size(), equalTo(1)); + assertThat(clusterBlocks.hasIndexBlockWithId("index-2", 3), is(true)); + + builder.removeIndexBlockWithId("index-2", 3); + clusterBlocks = builder.build(); + + assertThat(clusterBlocks.indices().get("index-1").size(), equalTo(2)); + assertThat(clusterBlocks.hasIndexBlockWithId("index-1", 1), is(true)); + assertThat(clusterBlocks.hasIndexBlockWithId("index-1", 2), is(true)); + assertThat(clusterBlocks.indices().get("index-2"), nullValue()); + assertThat(clusterBlocks.hasIndexBlockWithId("index-2", 3), is(false)); + } + + public void testGetIndexBlockWithId() { + final int blockId = randomInt(); + final ClusterBlock[] clusterBlocks = new ClusterBlock[randomIntBetween(1, 5)]; + + final ClusterBlocks.Builder builder = ClusterBlocks.builder(); + for (int i = 0; i < clusterBlocks.length; i++) { + clusterBlocks[i] = new ClusterBlock(blockId, "uuid" + i, "", true, true, true, RestStatus.OK, copyOf(ClusterBlockLevel.ALL)); + builder.addIndexBlock("index", clusterBlocks[i]); + } + + assertThat(builder.build().indices().get("index").size(), equalTo(clusterBlocks.length)); + assertThat(builder.build().getIndexBlockWithId("index", blockId), isOneOf(clusterBlocks)); + assertThat(builder.build().getIndexBlockWithId("index", randomValueOtherThan(blockId, ESTestCase::randomInt)), nullValue()); + } + + private ClusterBlock randomClusterBlock() { + return randomClusterBlock(randomVersion(random())); + } + + private ClusterBlock randomClusterBlock(final Version version) { + final String uuid = (version.onOrAfter(Version.V_7_0_0) && randomBoolean()) ? UUIDs.randomBase64UUID() : null; + final List levels = Arrays.asList(ClusterBlockLevel.values()); + return new ClusterBlock(randomInt(), uuid, "cluster block #" + randomInt(), randomBoolean(), randomBoolean(), randomBoolean(), + randomFrom(RestStatus.values()), copyOf(randomSubsetOf(randomIntBetween(1, levels.size()), levels))); + } + + private void assertClusterBlockEquals(final ClusterBlock expected, final ClusterBlock actual) { + assertEquals(expected, actual); + assertThat(actual.id(), equalTo(expected.id())); + assertThat(actual.uuid(), equalTo(expected.uuid())); + assertThat(actual.status(), equalTo(expected.status())); + assertThat(actual.description(), equalTo(expected.description())); + assertThat(actual.retryable(), equalTo(expected.retryable())); + assertThat(actual.disableStatePersistence(), equalTo(expected.disableStatePersistence())); + assertArrayEquals(actual.levels().toArray(), expected.levels().toArray()); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterBootstrapServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterBootstrapServiceTests.java index 618f24c8e3c8c..542247c058861 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterBootstrapServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterBootstrapServiceTests.java @@ -51,7 +51,6 @@ import static java.util.Collections.emptySet; import static java.util.Collections.singleton; import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; -import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODE_COUNT_SETTING; import static org.elasticsearch.common.settings.Settings.builder; import static org.elasticsearch.discovery.DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING; import static org.elasticsearch.discovery.zen.SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING; @@ -83,17 +82,9 @@ protected void onSendRequest(long requestId, String action, TransportRequest req transportService = transport.createTransportService(Settings.EMPTY, deterministicTaskQueue.getThreadPool(), TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundTransportAddress -> localNode, null, emptySet()); - clusterBootstrapService = new ClusterBootstrapService(builder().put(INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 3).build(), + clusterBootstrapService = new ClusterBootstrapService(Settings.builder().putList(INITIAL_MASTER_NODES_SETTING.getKey(), + localNode.getName(), otherNode1.getName(), otherNode2.getName()).build(), transportService); - - final Settings settings; - if (randomBoolean()) { - settings = Settings.builder().put(INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 3).build(); - } else { - settings = Settings.builder() - .putList(INITIAL_MASTER_NODES_SETTING.getKey(), localNode.getName(), otherNode1.getName(), otherNode2.getName()).build(); - } - clusterBootstrapService = new ClusterBootstrapService(settings, transportService); } private DiscoveryNode newDiscoveryNode(String nodeName) { @@ -126,10 +117,6 @@ public void testDoesNothingByDefaultIfUnicastHostsConfigured() { testConfiguredIfSettingSet(builder().putList(DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey())); } - public void testDoesNothingByDefaultIfMasterNodeCountConfigured() { - testConfiguredIfSettingSet(builder().put(INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 0)); - } - public void testDoesNothingByDefaultIfMasterNodesConfigured() { testConfiguredIfSettingSet(builder().putList(INITIAL_MASTER_NODES_SETTING.getKey())); } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java index f17e80c2a16aa..48e9ba42933df 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java @@ -39,7 +39,6 @@ import static java.util.Collections.emptySet; import static java.util.Collections.singletonList; import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; -import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODE_COUNT_SETTING; import static org.elasticsearch.node.Node.NODE_NAME_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -172,31 +171,11 @@ public void testDescriptionBeforeBootstrapping() { "[cluster.initial_master_nodes] is empty on this node: have discovered [" + otherNode + "]; " + "discovery will continue using [] from hosts providers and [" + localNode + "] from last-known cluster state")); - assertThat(new ClusterFormationState(Settings.builder().put(INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 2).build(), - clusterState, emptyList(), emptyList()).getDescription(), - is("master not discovered yet, this node has not previously joined a bootstrapped (v7+) cluster, and " + - "this node must discover at least [2] master-eligible nodes to bootstrap a cluster: have discovered []; " + - "discovery will continue using [] from hosts providers and [" + localNode + "] from last-known cluster state")); - assertThat(new ClusterFormationState(Settings.builder().putList(INITIAL_MASTER_NODES_SETTING.getKey(), "other").build(), clusterState, emptyList(), emptyList()).getDescription(), is("master not discovered yet, this node has not previously joined a bootstrapped (v7+) cluster, and " + "this node must discover master-eligible nodes [other] to bootstrap a cluster: have discovered []; " + "discovery will continue using [] from hosts providers and [" + localNode + "] from last-known cluster state")); - - assertThat(new ClusterFormationState(Settings.builder().putList(INITIAL_MASTER_NODES_SETTING.getKey(), "other") - .put(INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 1).build(), - clusterState, emptyList(), emptyList()).getDescription(), - is("master not discovered yet, this node has not previously joined a bootstrapped (v7+) cluster, and " + - "this node must discover master-eligible nodes [other] to bootstrap a cluster: have discovered []; " + - "discovery will continue using [] from hosts providers and [" + localNode + "] from last-known cluster state")); - - assertThat(new ClusterFormationState(Settings.builder().putList(INITIAL_MASTER_NODES_SETTING.getKey(), "other") - .put(INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), 2).build(), - clusterState, emptyList(), emptyList()).getDescription(), - is("master not discovered yet, this node has not previously joined a bootstrapped (v7+) cluster, and " + - "this node must discover at least [2] master-eligible nodes, including [other], to bootstrap a cluster: have discovered " + - "[]; discovery will continue using [] from hosts providers and [" + localNode + "] from last-known cluster state")); } private static VotingConfiguration config(String[] nodeIds) { diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationStateTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationStateTests.java index ab2ad25e72152..afe2e065a875e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationStateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationStateTests.java @@ -94,7 +94,7 @@ public static DiscoveryNode createNode(String id) { public void testSetInitialState() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); assertTrue(state1.getLastAcceptedConfiguration().hasQuorum(Collections.singleton(node1.getId()))); assertTrue(state1.getLastCommittedConfiguration().hasQuorum(Collections.singleton(node1.getId()))); cs1.setInitialState(state1); @@ -103,7 +103,7 @@ public void testSetInitialState() { public void testSetInitialStateWhenAlreadySet() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); assertTrue(state1.getLastAcceptedConfiguration().hasQuorum(Collections.singleton(node1.getId()))); assertTrue(state1.getLastCommittedConfiguration().hasQuorum(Collections.singleton(node1.getId()))); cs1.setInitialState(state1); @@ -129,7 +129,7 @@ public void testStartJoinBeforeBootstrap() { public void testStartJoinAfterBootstrap() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); assertTrue(state1.getLastAcceptedConfiguration().hasQuorum(Collections.singleton(node1.getId()))); assertTrue(state1.getLastCommittedConfiguration().hasQuorum(Collections.singleton(node1.getId()))); cs1.setInitialState(state1); @@ -157,7 +157,7 @@ public void testJoinBeforeBootstrap() { StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); assertThat(expectThrows(CoordinationStateRejectedException.class, () -> cs1.handleJoin(v1)).getMessage(), - containsString("initial configuration not set")); + containsString("this node has not received its initial configuration yet")); } public void testJoinWithNoStartJoinAfterReboot() { @@ -178,7 +178,7 @@ public void testJoinWithWrongTarget() { public void testJoinWithBadCurrentTerm() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node2, randomLongBetween(1, 5)); @@ -191,7 +191,7 @@ public void testJoinWithBadCurrentTerm() { public void testJoinWithHigherAcceptedTerm() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node2, randomLongBetween(1, 5)); @@ -209,7 +209,7 @@ public void testJoinWithHigherAcceptedTerm() { public void testJoinWithSameAcceptedTermButHigherVersion() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node2, randomLongBetween(1, 5)); @@ -227,7 +227,7 @@ public void testJoinWithSameAcceptedTermButHigherVersion() { public void testJoinWithLowerLastAcceptedTermWinsElection() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node2, randomLongBetween(1, 5)); @@ -248,7 +248,7 @@ public void testJoinWithLowerLastAcceptedTermWinsElection() { public void testJoinWithSameLastAcceptedTermButLowerOrSameVersionWinsElection() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node2, randomLongBetween(1, 5)); @@ -269,7 +269,7 @@ public void testJoinWithSameLastAcceptedTermButLowerOrSameVersionWinsElection() public void testJoinDoesNotWinElection() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node2, randomLongBetween(1, 5)); @@ -289,7 +289,7 @@ public void testJoinDoesNotWinElection() { public void testJoinDoesNotWinElectionWhenOnlyCommittedConfigQuorum() { VotingConfiguration configNode1 = new VotingConfiguration(Collections.singleton(node1.getId())); VotingConfiguration configNode2 = new VotingConfiguration(Collections.singleton(node2.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, configNode1, configNode2, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, configNode1, configNode2, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest = new StartJoinRequest(node1, randomLongBetween(1, 5)); @@ -303,7 +303,7 @@ public void testJoinDoesNotWinElectionWhenOnlyCommittedConfigQuorum() { public void testJoinDoesNotWinElectionWhenOnlyLastAcceptedConfigQuorum() { VotingConfiguration configNode1 = new VotingConfiguration(Collections.singleton(node1.getId())); VotingConfiguration configNode2 = new VotingConfiguration(Collections.singleton(node2.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, configNode2, configNode1, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, configNode2, configNode1, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest = new StartJoinRequest(node1, randomLongBetween(1, 5)); @@ -316,7 +316,7 @@ public void testJoinDoesNotWinElectionWhenOnlyLastAcceptedConfigQuorum() { public void testHandleClientValue() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -339,7 +339,7 @@ public void testHandleClientValue() { public void testHandleClientValueWhenElectionNotWon() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); if (randomBoolean()) { cs1.setInitialState(state1); } @@ -349,7 +349,7 @@ public void testHandleClientValueWhenElectionNotWon() { public void testHandleClientValueDuringOngoingPublication() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -366,7 +366,7 @@ public void testHandleClientValueDuringOngoingPublication() { public void testHandleClientValueWithBadTerm() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(3, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -383,21 +383,21 @@ public void testHandleClientValueWithBadTerm() { public void testHandleClientValueWithOldVersion() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); assertTrue(cs1.handleJoin(v1)); assertTrue(cs1.electionWon()); - ClusterState state2 = clusterState(startJoinRequest1.getTerm(), 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state2 = clusterState(startJoinRequest1.getTerm(), 0L, node1, initialConfig, initialConfig, 42L); assertThat(expectThrows(CoordinationStateRejectedException.class, () -> cs1.handleClientValue(state2)).getMessage(), containsString("lower or equal to last published version")); } public void testHandleClientValueWithDifferentReconfigurationWhileAlreadyReconfiguring() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -418,7 +418,7 @@ public void testHandleClientValueWithDifferentReconfigurationWhileAlreadyReconfi public void testHandleClientValueWithSameReconfigurationWhileAlreadyReconfiguring() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -438,7 +438,7 @@ public void testHandleClientValueWithSameReconfigurationWhileAlreadyReconfigurin public void testHandleClientValueWithIllegalCommittedConfigurationChange() { assumeTrue("test only works with assertions enabled", Assertions.ENABLED); VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -455,7 +455,7 @@ public void testHandleClientValueWithIllegalCommittedConfigurationChange() { public void testHandleClientValueWithConfigurationChangeButNoJoinQuorum() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -470,7 +470,7 @@ public void testHandleClientValueWithConfigurationChangeButNoJoinQuorum() { public void testHandlePublishRequest() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -491,7 +491,7 @@ public void testHandlePublishRequest() { public void testHandlePublishRequestWithBadTerm() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -511,7 +511,7 @@ public void testHandlePublishRequestWithBadTerm() { // scenario when handling a publish request from a master that we already received a newer state from public void testHandlePublishRequestWithSameTermButOlderOrSameVersion() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -544,7 +544,7 @@ public void testHandlePublishRequestWithTermHigherThanLastAcceptedTerm() { public void testHandlePublishResponseWithCommit() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -562,7 +562,7 @@ public void testHandlePublishResponseWithCommit() { public void testHandlePublishResponseWhenSteppedDownAsLeader() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -581,7 +581,7 @@ public void testHandlePublishResponseWhenSteppedDownAsLeader() { public void testHandlePublishResponseWithoutPublishConfigQuorum() { VotingConfiguration configNode1 = new VotingConfiguration(Collections.singleton(node1.getId())); VotingConfiguration configNode2 = new VotingConfiguration(Collections.singleton(node2.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, configNode1, configNode1, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, configNode1, configNode1, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -599,7 +599,7 @@ public void testHandlePublishResponseWithoutPublishConfigQuorum() { public void testHandlePublishResponseWithoutCommitedConfigQuorum() { VotingConfiguration configNode1 = new VotingConfiguration(Collections.singleton(node1.getId())); VotingConfiguration configNode2 = new VotingConfiguration(Collections.singleton(node2.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, configNode1, configNode1, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, configNode1, configNode1, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -616,7 +616,7 @@ public void testHandlePublishResponseWithoutCommitedConfigQuorum() { public void testHandlePublishResponseWithoutCommit() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -631,7 +631,7 @@ public void testHandlePublishResponseWithoutCommit() { public void testHandlePublishResponseWithBadTerm() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -650,7 +650,7 @@ public void testHandlePublishResponseWithBadTerm() { public void testHandlePublishResponseWithVersionMismatch() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -665,7 +665,7 @@ public void testHandlePublishResponseWithVersionMismatch() { public void testHandleCommit() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -687,7 +687,7 @@ public void testHandleCommit() { public void testHandleCommitWithBadCurrentTerm() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -707,7 +707,7 @@ public void testHandleCommitWithBadCurrentTerm() { public void testHandleCommitWithBadLastAcceptedTerm() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -720,7 +720,7 @@ public void testHandleCommitWithBadLastAcceptedTerm() { public void testHandleCommitWithBadVersion() { VotingConfiguration initialConfig = new VotingConfiguration(Collections.singleton(node1.getId())); - ClusterState state1 = clusterState(0L, 1L, node1, initialConfig, initialConfig, 42L); + ClusterState state1 = clusterState(0L, 0L, node1, initialConfig, initialConfig, 42L); cs1.setInitialState(state1); StartJoinRequest startJoinRequest1 = new StartJoinRequest(node1, randomLongBetween(1, 5)); Join v1 = cs1.handleStartJoin(startJoinRequest1); @@ -830,7 +830,7 @@ void reboot() { } void setInitialState(VotingConfiguration initialConfig, long initialValue) { - final ClusterState.Builder builder = ClusterState.builder(state.getLastAcceptedState()).incrementVersion(); + final ClusterState.Builder builder = ClusterState.builder(state.getLastAcceptedState()); builder.metaData(MetaData.builder() .coordinationMetaData(CoordinationMetaData.builder() .lastAcceptedConfiguration(initialConfig) diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index f38f7a52ce3e1..b2fa9948f4bf7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.cluster.coordination; +import com.carrotsearch.randomizedtesting.RandomizedContext; import org.apache.logging.log4j.CloseableThreadContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -65,6 +66,7 @@ import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -73,7 +75,9 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; @@ -141,6 +145,24 @@ public void resetPortCounterBeforeEachTest() { resetPortCounter(); } + // check that runRandomly leads to reproducible results + public void testRepeatableTests() throws Exception { + final Callable test = () -> { + final Cluster cluster = new Cluster(randomIntBetween(1, 5)); + cluster.runRandomly(); + final long afterRunRandomly = value(cluster.getAnyNode().getLastAppliedClusterState()); + cluster.stabilise(); + final long afterStabilisation = value(cluster.getAnyNode().getLastAppliedClusterState()); + return afterRunRandomly ^ afterStabilisation; + }; + final long seed = randomLong(); + logger.info("First run with seed [{}]", seed); + final long result1 = RandomizedContext.current().runWithPrivateRandomness(seed, test); + logger.info("Second run with seed [{}]", seed); + final long result2 = RandomizedContext.current().runWithPrivateRandomness(seed, test); + assertEquals(result1, result2); + } + public void testCanUpdateClusterStateAfterStabilisation() { final Cluster cluster = new Cluster(randomIntBetween(1, 5)); cluster.runRandomly(); @@ -967,6 +989,67 @@ private void testAppliesNoMasterBlock(String noMasterBlockSetting, ClusterBlock // TODO reboot the leader and verify that the same block is applied when it restarts } + public void testNodeCannotJoinIfJoinValidationFailsOnMaster() { + final Cluster cluster = new Cluster(randomIntBetween(1, 3)); + cluster.runRandomly(); + cluster.stabilise(); + + // check that if node join validation fails on master, the nodes can't join + List addedNodes = cluster.addNodes(randomIntBetween(1, 2)); + final Set validatedNodes = new HashSet<>(); + cluster.getAnyLeader().extraJoinValidators.add((discoveryNode, clusterState) -> { + validatedNodes.add(discoveryNode); + throw new IllegalArgumentException("join validation failed"); + }); + final long previousClusterStateVersion = cluster.getAnyLeader().getLastAppliedClusterState().version(); + cluster.runFor(10000, "failing join validation"); + assertEquals(validatedNodes, addedNodes.stream().map(ClusterNode::getLocalNode).collect(Collectors.toSet())); + assertTrue(addedNodes.stream().allMatch(ClusterNode::isCandidate)); + final long newClusterStateVersion = cluster.getAnyLeader().getLastAppliedClusterState().version(); + assertEquals(previousClusterStateVersion, newClusterStateVersion); + + cluster.getAnyLeader().extraJoinValidators.clear(); + cluster.stabilise(); + } + + public void testNodeCannotJoinIfJoinValidationFailsOnJoiningNode() { + final Cluster cluster = new Cluster(randomIntBetween(1, 3)); + cluster.runRandomly(); + cluster.stabilise(); + + // check that if node join validation fails on joining node, the nodes can't join + List addedNodes = cluster.addNodes(randomIntBetween(1, 2)); + final Set validatedNodes = new HashSet<>(); + addedNodes.stream().forEach(cn -> cn.extraJoinValidators.add((discoveryNode, clusterState) -> { + validatedNodes.add(discoveryNode); + throw new IllegalArgumentException("join validation failed"); + })); + final long previousClusterStateVersion = cluster.getAnyLeader().getLastAppliedClusterState().version(); + cluster.runFor(10000, "failing join validation"); + assertEquals(validatedNodes, addedNodes.stream().map(ClusterNode::getLocalNode).collect(Collectors.toSet())); + assertTrue(addedNodes.stream().allMatch(ClusterNode::isCandidate)); + final long newClusterStateVersion = cluster.getAnyLeader().getLastAppliedClusterState().version(); + assertEquals(previousClusterStateVersion, newClusterStateVersion); + + addedNodes.stream().forEach(cn -> cn.extraJoinValidators.clear()); + cluster.stabilise(); + } + + public void testClusterCannotFormWithFailingJoinValidation() { + final Cluster cluster = new Cluster(randomIntBetween(1, 5)); + // fail join validation on a majority of nodes in the initial configuration + randomValueOtherThanMany(nodes -> + cluster.initialConfiguration.hasQuorum( + nodes.stream().map(ClusterNode::getLocalNode).map(DiscoveryNode::getId).collect(Collectors.toSet())) == false, + () -> randomSubsetOf(cluster.clusterNodes)) + .forEach(cn -> cn.extraJoinValidators.add((discoveryNode, clusterState) -> { + throw new IllegalArgumentException("join validation failed"); + })); + cluster.bootstrapIfNecessary(); + cluster.runFor(10000, "failing join validation"); + assertTrue(cluster.clusterNodes.stream().allMatch(cn -> cn.getLastAppliedClusterState().version() == 0)); + } + private static long defaultMillis(Setting setting) { return setting.get(Settings.EMPTY).millis() + Cluster.DEFAULT_DELAY_VARIABILITY; } @@ -1054,8 +1137,8 @@ class Cluster { initialNodeCount, masterEligibleNodeIds, initialConfiguration); } - void addNodesAndStabilise(int newNodesCount) { - addNodes(newNodesCount); + List addNodesAndStabilise(int newNodesCount) { + final List addedNodes = addNodes(newNodesCount); stabilise( // The first pinging discovers the master defaultMillis(DISCOVERY_FIND_PEERS_INTERVAL_SETTING) @@ -1065,16 +1148,20 @@ void addNodesAndStabilise(int newNodesCount) { // followup reconfiguration + newNodesCount * 2 * DEFAULT_CLUSTER_STATE_UPDATE_DELAY); // TODO Investigate whether 4 publications is sufficient due to batching? A bound linear in the number of nodes isn't great. + return addedNodes; } - void addNodes(int newNodesCount) { + List addNodes(int newNodesCount) { logger.info("--> adding {} nodes", newNodesCount); final int nodeSizeAtStart = clusterNodes.size(); + final List addedNodes = new ArrayList<>(); for (int i = 0; i < newNodesCount; i++) { final ClusterNode clusterNode = new ClusterNode(nodeSizeAtStart + i, true); - clusterNodes.add(clusterNode); + addedNodes.add(clusterNode); } + clusterNodes.addAll(addedNodes); + return addedNodes; } int size() { @@ -1212,15 +1299,7 @@ void stabilise(long stabilisationDurationMillis) { deterministicTaskQueue.getExecutionDelayVariabilityMillis(), lessThanOrEqualTo(DEFAULT_DELAY_VARIABILITY)); assertFalse("stabilisation requires stable storage", disruptStorage); - if (clusterNodes.stream().allMatch(ClusterNode::isNotUsefullyBootstrapped)) { - assertThat("setting initial configuration may fail with disconnected nodes", disconnectedNodes, empty()); - assertThat("setting initial configuration may fail with blackholed nodes", blackholedNodes, empty()); - runFor(defaultMillis(DISCOVERY_FIND_PEERS_INTERVAL_SETTING) * 2, "discovery prior to setting initial configuration"); - final ClusterNode bootstrapNode = getAnyMasterEligibleNode(); - bootstrapNode.applyInitialConfiguration(); - } else { - logger.info("setting initial configuration not required"); - } + bootstrapIfNecessary(); runFor(stabilisationDurationMillis, "stabilising"); @@ -1259,12 +1338,12 @@ void stabilise(long stabilisationDurationMillis) { assertThat(nodeId + " has correct master", clusterNode.getLastAppliedClusterState().nodes().getMasterNode(), equalTo(leader.getLocalNode())); assertThat(nodeId + " has no NO_MASTER_BLOCK", - clusterNode.getLastAppliedClusterState().blocks().hasGlobalBlock(NO_MASTER_BLOCK_ID), equalTo(false)); + clusterNode.getLastAppliedClusterState().blocks().hasGlobalBlockWithId(NO_MASTER_BLOCK_ID), equalTo(false)); } else { assertThat(nodeId + " is not following " + leaderId, clusterNode.coordinator.getMode(), is(CANDIDATE)); assertThat(nodeId + " has no master", clusterNode.getLastAppliedClusterState().nodes().getMasterNode(), nullValue()); assertThat(nodeId + " has NO_MASTER_BLOCK", - clusterNode.getLastAppliedClusterState().blocks().hasGlobalBlock(NO_MASTER_BLOCK_ID), equalTo(true)); + clusterNode.getLastAppliedClusterState().blocks().hasGlobalBlockWithId(NO_MASTER_BLOCK_ID), equalTo(true)); assertFalse(nodeId + " is not in the applied state on " + leaderId, leader.getLastAppliedClusterState().getNodes().nodeExists(nodeId)); } @@ -1286,6 +1365,18 @@ void stabilise(long stabilisationDurationMillis) { leader.improveConfiguration(lastAcceptedState), sameInstance(lastAcceptedState)); } + void bootstrapIfNecessary() { + if (clusterNodes.stream().allMatch(ClusterNode::isNotUsefullyBootstrapped)) { + assertThat("setting initial configuration may fail with disconnected nodes", disconnectedNodes, empty()); + assertThat("setting initial configuration may fail with blackholed nodes", blackholedNodes, empty()); + runFor(defaultMillis(DISCOVERY_FIND_PEERS_INTERVAL_SETTING) * 2, "discovery prior to setting initial configuration"); + final ClusterNode bootstrapNode = getAnyMasterEligibleNode(); + bootstrapNode.applyInitialConfiguration(); + } else { + logger.info("setting initial configuration not required"); + } + } + void runFor(long runDurationMillis, String description) { final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + runDurationMillis; logger.info("--> runFor({}ms) running until [{}ms]: {}", runDurationMillis, endTime, description); @@ -1429,6 +1520,7 @@ class ClusterNode { private AckedFakeThreadPoolMasterService masterService; private TransportService transportService; private DisruptableMockTransport mockTransport; + private List> extraJoinValidators = new ArrayList<>(); private ClusterStateApplyResponse clusterStateApplyResponse = ClusterStateApplyResponse.SUCCEED; ClusterNode(int nodeIndex, boolean masterEligible) { @@ -1501,9 +1593,11 @@ protected void onBlackholedDuringSend(long requestId, String action, DiscoveryNo transportService = mockTransport.createTransportService( settings, deterministicTaskQueue.getThreadPool(runnable -> onNode(localNode, runnable)), NOOP_TRANSPORT_INTERCEPTOR, a -> localNode, null, emptySet()); + final Collection> onJoinValidators = + Collections.singletonList((dn, cs) -> extraJoinValidators.forEach(validator -> validator.accept(dn, cs))); coordinator = new Coordinator("test_node", settings, clusterSettings, transportService, writableRegistry(), ESAllocationTestCase.createAllocationService(Settings.EMPTY), masterService, this::getPersistedState, - Cluster.this::provideUnicastHosts, clusterApplier, Randomness.get()); + Cluster.this::provideUnicastHosts, clusterApplier, onJoinValidators, Randomness.get()); masterService.setClusterStatePublisher(coordinator); try { @@ -1535,6 +1629,10 @@ boolean isLeader() { return coordinator.getMode() == LEADER; } + boolean isCandidate() { + return coordinator.getMode() == CANDIDATE; + } + ClusterState improveConfiguration(ClusterState currentState) { synchronized (coordinator.mutex) { return coordinator.improveConfiguration(currentState); @@ -1730,7 +1828,7 @@ private List provideUnicastHosts(HostsResolver ignored) { } } - private static Runnable onNode(DiscoveryNode node, Runnable runnable) { + public static Runnable onNode(DiscoveryNode node, Runnable runnable) { final String nodeId = "{" + node.getId() + "}{" + node.getEphemeralId() + "}"; return new Runnable() { @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java index 6e7965f896f4c..ef843717fb469 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java @@ -44,7 +44,8 @@ public void testJoinDeduplication() { deterministicTaskQueue.getThreadPool(), TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> localNode, null, Collections.emptySet()); JoinHelper joinHelper = new JoinHelper(Settings.EMPTY, null, null, transportService, () -> 0L, - (joinRequest, joinCallback) -> { throw new AssertionError(); }, startJoinRequest -> { throw new AssertionError(); }); + (joinRequest, joinCallback) -> { throw new AssertionError(); }, startJoinRequest -> { throw new AssertionError(); }, + Collections.emptyList()); transportService.start(); DiscoveryNode node1 = new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java index ea45eb42d89a3..108b9e6dd7f1e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RequestHandlerRegistry; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportResponse; @@ -73,6 +74,7 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.transport.TransportService.HANDSHAKE_ACTION_NAME; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; @TestLogging("org.elasticsearch.cluster.service:TRACE,org.elasticsearch.cluster.coordination:TRACE") public class NodeJoinTests extends ESTestCase { @@ -82,7 +84,7 @@ public class NodeJoinTests extends ESTestCase { private MasterService masterService; private Coordinator coordinator; private DeterministicTaskQueue deterministicTaskQueue; - private RequestHandlerRegistry transportRequestHandler; + private Transport transport; @BeforeClass public static void beforeClass() { @@ -155,6 +157,8 @@ protected void onSendRequest(long requestId, String action, TransportRequest req if (action.equals(HANDSHAKE_ACTION_NAME)) { handleResponse(requestId, new TransportService.HandshakeResponse(destination, initialState.getClusterName(), destination.getVersion())); + } else if (action.equals(JoinHelper.VALIDATE_JOIN_ACTION_NAME)) { + handleResponse(requestId, new TransportResponse.Empty()); } else { super.onSendRequest(requestId, action, request, destination); } @@ -171,10 +175,11 @@ transportService, writableRegistry(), masterService, () -> new InMemoryPersistedState(term, initialState), r -> emptyList(), new NoOpClusterApplier(), + Collections.emptyList(), random); transportService.start(); transportService.acceptIncomingRequests(); - transportRequestHandler = capturingTransport.getRequestHandler(JoinHelper.JOIN_ACTION_NAME); + transport = capturingTransport; coordinator.start(); coordinator.startInitialJoin(); } @@ -219,7 +224,9 @@ private SimpleFuture joinNodeAsync(final JoinRequest joinRequest) { // clone the node before submitting to simulate an incoming join, which is guaranteed to have a new // disco node object serialized off the network try { - transportRequestHandler.processMessageReceived(joinRequest, new TransportChannel() { + final RequestHandlerRegistry joinHandler = (RequestHandlerRegistry) + transport.getRequestHandler(JoinHelper.JOIN_ACTION_NAME); + joinHandler.processMessageReceived(joinRequest, new TransportChannel() { @Override public String getProfileName() { return "dummy"; @@ -353,7 +360,7 @@ public void testJoinAccumulation() { FutureUtils.get(futNode1); } - public void testJoinFollowerWithHigherTerm() { + public void testJoinFollowerWithHigherTerm() throws Exception { DiscoveryNode node0 = newNode(0, true); DiscoveryNode node1 = newNode(1, true); long initialTerm = randomLongBetween(1, 10); @@ -361,18 +368,74 @@ public void testJoinFollowerWithHigherTerm() { setupFakeMasterServiceAndCoordinator(initialTerm, initialState(node0, initialTerm, initialVersion, new VotingConfiguration(Collections.singleton(node0.getId())))); long newTerm = initialTerm + randomLongBetween(1, 10); - coordinator.coordinationState.get().handleStartJoin(new StartJoinRequest(node1, newTerm)); - synchronized (coordinator.mutex) { - coordinator.becomeFollower("test", node1); - } - assertFalse(isLocalNodeElectedMaster()); + handleStartJoinFrom(node1, newTerm); + handleFollowerCheckFrom(node1, newTerm); long newerTerm = newTerm + randomLongBetween(1, 10); joinNodeAndRun(new JoinRequest(node1, Optional.of(new Join(node1, node0, newerTerm, initialTerm, initialVersion)))); assertTrue(isLocalNodeElectedMaster()); } - public void testJoinFollowerFails() { + private void handleStartJoinFrom(DiscoveryNode node, long term) throws Exception { + final RequestHandlerRegistry startJoinHandler = (RequestHandlerRegistry) + transport.getRequestHandler(JoinHelper.START_JOIN_ACTION_NAME); + startJoinHandler.processMessageReceived(new StartJoinRequest(node, term), new TransportChannel() { + @Override + public String getProfileName() { + return "dummy"; + } + + @Override + public String getChannelType() { + return "dummy"; + } + + @Override + public void sendResponse(TransportResponse response) { + + } + + @Override + public void sendResponse(Exception exception) { + fail(); + } + }); + deterministicTaskQueue.runAllRunnableTasks(); + assertFalse(isLocalNodeElectedMaster()); + assertThat(coordinator.getMode(), equalTo(Coordinator.Mode.CANDIDATE)); + } + + private void handleFollowerCheckFrom(DiscoveryNode node, long term) throws Exception { + final RequestHandlerRegistry followerCheckHandler = + (RequestHandlerRegistry) + transport.getRequestHandler(FollowersChecker.FOLLOWER_CHECK_ACTION_NAME); + followerCheckHandler.processMessageReceived(new FollowersChecker.FollowerCheckRequest(term, node), new TransportChannel() { + @Override + public String getProfileName() { + return "dummy"; + } + + @Override + public String getChannelType() { + return "dummy"; + } + + @Override + public void sendResponse(TransportResponse response) { + + } + + @Override + public void sendResponse(Exception exception) { + fail(); + } + }); + deterministicTaskQueue.runAllRunnableTasks(); + assertFalse(isLocalNodeElectedMaster()); + assertThat(coordinator.getMode(), equalTo(Coordinator.Mode.FOLLOWER)); + } + + public void testJoinFollowerFails() throws Exception { DiscoveryNode node0 = newNode(0, true); DiscoveryNode node1 = newNode(1, true); long initialTerm = randomLongBetween(1, 10); @@ -380,18 +443,15 @@ public void testJoinFollowerFails() { setupFakeMasterServiceAndCoordinator(initialTerm, initialState(node0, initialTerm, initialVersion, new VotingConfiguration(Collections.singleton(node0.getId())))); long newTerm = initialTerm + randomLongBetween(1, 10); - coordinator.coordinationState.get().handleStartJoin(new StartJoinRequest(node1, newTerm)); - synchronized (coordinator.mutex) { - coordinator.becomeFollower("test", node1); - } - assertFalse(isLocalNodeElectedMaster()); + handleStartJoinFrom(node1, newTerm); + handleFollowerCheckFrom(node1, newTerm); assertThat(expectThrows(CoordinationStateRejectedException.class, () -> joinNodeAndRun(new JoinRequest(node1, Optional.empty()))).getMessage(), containsString("join target is a follower")); assertFalse(isLocalNodeElectedMaster()); } - public void testBecomeFollowerFailsPendingJoin() { + public void testBecomeFollowerFailsPendingJoin() throws Exception { DiscoveryNode node0 = newNode(0, true); DiscoveryNode node1 = newNode(1, true); long initialTerm = randomLongBetween(1, 10); @@ -403,9 +463,7 @@ public void testBecomeFollowerFailsPendingJoin() { deterministicTaskQueue.runAllRunnableTasks(); assertFalse(fut.isDone()); assertFalse(isLocalNodeElectedMaster()); - synchronized (coordinator.mutex) { - coordinator.becomeFollower("test", node1); - } + handleFollowerCheckFrom(node1, newTerm); assertFalse(isLocalNodeElectedMaster()); assertThat(expectThrows(CoordinationStateRejectedException.class, () -> FutureUtils.get(fut)).getMessage(), diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTests.java index 914ee1e95f782..4435073d95363 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTests.java @@ -156,7 +156,7 @@ protected void sendApplyCommit(DiscoveryNode destination, ApplyCommitRequest app Function nodeResolver = dn -> nodes.stream().filter(mn -> mn.localNode.equals(dn)).findFirst().get(); private void initializeCluster(VotingConfiguration initialConfig) { - node1.coordinationState.setInitialState(CoordinationStateTests.clusterState(0L, 1L, n1, initialConfig, initialConfig, 0L)); + node1.coordinationState.setInitialState(CoordinationStateTests.clusterState(0L, 0L, n1, initialConfig, initialConfig, 0L)); StartJoinRequest startJoinRequest = new StartJoinRequest(n1, 1L); node1.coordinationState.handleJoin(node1.coordinationState.handleStartJoin(startJoinRequest)); node1.coordinationState.handleJoin(node2.coordinationState.handleStartJoin(startJoinRequest)); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/Zen1IT.java b/server/src/test/java/org/elasticsearch/cluster/coordination/Zen1IT.java index e8a2a17eb06df..eb4b2d75c73f9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/Zen1IT.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/Zen1IT.java @@ -24,22 +24,34 @@ import org.elasticsearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Allocation; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster.RestartCallback; import org.elasticsearch.test.discovery.TestZenDiscovery; import java.util.List; +import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.StreamSupport; +import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; +import static org.elasticsearch.cluster.coordination.Coordinator.ZEN1_BWC_TERM; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING; import static org.elasticsearch.node.Node.NODE_NAME_SETTING; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class Zen1IT extends ESIntegTestCase { @@ -228,4 +240,66 @@ public void testMultipleNodeMigrationFromZen1ToZen2WithTwoNodes() throws Excepti public void testMultipleNodeMigrationFromZen1ToZen2WithThreeNodes() throws Exception { testMultipleNodeMigrationFromZen1ToZen2(3); } + + public void testFreshestMasterElectedAfterFullClusterRestart() throws Exception { + final List nodeNames = internalCluster().startNodes(3, ZEN1_SETTINGS); + + assertTrue(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.ALL)).get().isAcknowledged()); + + final List nodeEnvironments + = StreamSupport.stream(internalCluster().getDataOrMasterNodeInstances(NodeEnvironment.class).spliterator(), false) + .collect(Collectors.toList()); + + final boolean randomiseVersions = rarely(); + + internalCluster().fullRestart(new RestartCallback() { + int nodesStopped; + + @Override + public Settings onNodeStopped(String nodeName) throws Exception { + nodesStopped += 1; + + if (nodesStopped == 1) { + final Client client = internalCluster().client(randomValueOtherThan(nodeName, () -> randomFrom(nodeNames))); + + assertFalse(client.admin().cluster().health(Requests.clusterHealthRequest() + .waitForEvents(Priority.LANGUID) + .waitForNoRelocatingShards(true) + .waitForNodes("2")).actionGet().isTimedOut()); + + assertTrue(client.admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() + .put(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE)).get().isAcknowledged()); + } + + if (nodesStopped == nodeNames.size()) { + for (final NodeEnvironment nodeEnvironment : nodeEnvironments) { + // The versions written by nodes following a Zen1 master cannot be trusted. Randomise them to demonstrate they are + // not important. + final MetaStateService metaStateService = new MetaStateService(nodeEnvironment, xContentRegistry()); + final Manifest manifest = metaStateService.loadManifestOrEmpty(); + assertThat(manifest.getCurrentTerm(), is(ZEN1_BWC_TERM)); + final long newVersion = randomiseVersions ? randomNonNegativeLong() : 0L; + metaStateService.writeManifestAndCleanup("altering version to " + newVersion, + new Manifest(manifest.getCurrentTerm(), newVersion, manifest.getGlobalGeneration(), + manifest.getIndexGenerations())); + } + } + + return Coordinator.addZen1Attribute(false, Settings.builder()) + .put(ZEN2_SETTINGS) + .putList(INITIAL_MASTER_NODES_SETTING.getKey(), nodeNames) + .build(); + } + }); + + assertFalse(client().admin().cluster().health(Requests.clusterHealthRequest() + .waitForEvents(Priority.LANGUID) + .waitForNoRelocatingShards(true) + .waitForNodes("3")).actionGet().isTimedOut()); + + assertThat(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.get( + client().admin().cluster().state(new ClusterStateRequest()).get().getState().metaData().settings()), + equalTo(Allocation.NONE)); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 1827554ee50ea..5c67e1bbe566c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -91,6 +91,7 @@ public void testExpression_MultiParts() throws Exception { + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC).withDayOfMonth(1)))); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037") public void testExpression_CustomFormat() throws Exception { List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd}}>")); assertThat(results.size(), equalTo(1)); @@ -105,6 +106,7 @@ public void testExpression_EscapeStatic() throws Exception { equalTo(".mar{v}el-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037") public void testExpression_EscapeDateFormat() throws Exception { List result = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'YYYY}}>")); assertThat(result.size(), equalTo(1)); @@ -125,6 +127,7 @@ public void testExpression_MixedArray() throws Exception { DateTimeFormat.forPattern("YYYY.MM").print(new DateTime(context.getStartTime(), UTC).withDayOfMonth(1)))); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037") public void testExpression_CustomTimeZoneInIndexName() throws Exception { DateTimeZone timeZone; int hoursOffset; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java index 712f3ccdd4755..c30925514bb93 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java @@ -19,28 +19,225 @@ package org.elasticsearch.cluster.metadata; +import com.google.common.collect.ImmutableList; import org.elasticsearch.Version; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.RestoreInProgress; +import org.elasticsearch.cluster.SnapshotsInProgress; +import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.shards.ClusterShardLimitIT; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; +import static org.elasticsearch.cluster.metadata.MetaDataIndexStateService.INDEX_CLOSED_BLOCK; +import static org.elasticsearch.cluster.metadata.MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID; +import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.elasticsearch.cluster.shards.ClusterShardLimitIT.ShardCounts.forDataNodeCount; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class MetaDataIndexStateServiceTests extends ESTestCase { + public void testCloseRoutingTable() { + final Set nonBlockedIndices = new HashSet<>(); + final Map blockedIndices = new HashMap<>(); + final Map results = new HashMap<>(); + + ClusterState state = ClusterState.builder(new ClusterName("testCloseRoutingTable")).build(); + for (int i = 0; i < randomIntBetween(1, 25); i++) { + final String indexName = "index-" + i; + + if (randomBoolean()) { + state = addOpenedIndex(indexName, randomIntBetween(1, 5), randomIntBetween(0, 5), state); + nonBlockedIndices.add(state.metaData().index(indexName).getIndex()); + } else { + final ClusterBlock closingBlock = MetaDataIndexStateService.createIndexClosingBlock(); + state = addBlockedIndex(indexName, randomIntBetween(1, 5), randomIntBetween(0, 5), state, closingBlock); + blockedIndices.put(state.metaData().index(indexName).getIndex(), closingBlock); + results.put(state.metaData().index(indexName).getIndex(), new AcknowledgedResponse(randomBoolean())); + } + } + + final ClusterState updatedState = MetaDataIndexStateService.closeRoutingTable(state, blockedIndices, results); + assertThat(updatedState.metaData().indices().size(), equalTo(nonBlockedIndices.size() + blockedIndices.size())); + + for (Index nonBlockedIndex : nonBlockedIndices) { + assertIsOpened(nonBlockedIndex.getName(), updatedState); + assertThat(updatedState.blocks().hasIndexBlockWithId(nonBlockedIndex.getName(), INDEX_CLOSED_BLOCK_ID), is(false)); + } + for (Index blockedIndex : blockedIndices.keySet()) { + if (results.get(blockedIndex).isAcknowledged()) { + assertIsClosed(blockedIndex.getName(), updatedState); + } else { + assertIsOpened(blockedIndex.getName(), updatedState); + assertThat(updatedState.blocks().hasIndexBlockWithId(blockedIndex.getName(), INDEX_CLOSED_BLOCK_ID), is(true)); + } + } + } + + public void testAddIndexClosedBlocks() { + final ClusterState initialState = ClusterState.builder(new ClusterName("testAddIndexClosedBlocks")).build(); + { + final Map blockedIndices = new HashMap<>(); + Index[] indices = new Index[]{new Index("_name", "_uid")}; + expectThrows(IndexNotFoundException.class, () -> + MetaDataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, initialState)); + assertTrue(blockedIndices.isEmpty()); + } + { + final Map blockedIndices = new HashMap<>(); + Index[] indices = Index.EMPTY_ARRAY; + + ClusterState updatedState = MetaDataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, initialState); + assertSame(initialState, updatedState); + assertTrue(blockedIndices.isEmpty()); + } + { + final Map blockedIndices = new HashMap<>(); + ClusterState state = addClosedIndex("closed", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); + Index[] indices = new Index[]{state.metaData().index("closed").getIndex()}; + + ClusterState updatedState = MetaDataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); + assertSame(state, updatedState); + assertTrue(blockedIndices.isEmpty()); + + } + { + final Map blockedIndices = new HashMap<>(); + ClusterState state = addClosedIndex("closed", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); + state = addOpenedIndex("opened", randomIntBetween(1, 3), randomIntBetween(0, 3), state); + Index[] indices = new Index[]{state.metaData().index("opened").getIndex(), state.metaData().index("closed").getIndex()}; + + ClusterState updatedState = MetaDataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); + assertNotSame(state, updatedState); + + Index opened = updatedState.metaData().index("opened").getIndex(); + assertTrue(blockedIndices.containsKey(opened)); + assertHasBlock("opened", updatedState, blockedIndices.get(opened)); + + Index closed = updatedState.metaData().index("closed").getIndex(); + assertFalse(blockedIndices.containsKey(closed)); + } + { + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + ClusterState state = addRestoredIndex("restored", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); + if (randomBoolean()) { + state = addOpenedIndex("opened", randomIntBetween(1, 3), randomIntBetween(0, 3), state); + } + if (randomBoolean()) { + state = addOpenedIndex("closed", randomIntBetween(1, 3), randomIntBetween(0, 3), state); + } + Index[] indices = new Index[]{state.metaData().index("restored").getIndex()}; + MetaDataIndexStateService.addIndexClosedBlocks(indices, unmodifiableMap(emptyMap()), state); + }); + assertThat(exception.getMessage(), containsString("Cannot close indices that are being restored: [[restored]]")); + } + { + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + ClusterState state = addSnapshotIndex("snapshotted", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); + if (randomBoolean()) { + state = addOpenedIndex("opened", randomIntBetween(1, 3), randomIntBetween(0, 3), state); + } + if (randomBoolean()) { + state = addOpenedIndex("closed", randomIntBetween(1, 3), randomIntBetween(0, 3), state); + } + Index[] indices = new Index[]{state.metaData().index("snapshotted").getIndex()}; + MetaDataIndexStateService.addIndexClosedBlocks(indices, unmodifiableMap(emptyMap()), state); + }); + assertThat(exception.getMessage(), containsString("Cannot close indices that are being snapshotted: [[snapshotted]]")); + } + { + final Map blockedIndices = new HashMap<>(); + ClusterState state = addOpenedIndex("index-1", randomIntBetween(1, 3), randomIntBetween(0, 3), initialState); + state = addOpenedIndex("index-2", randomIntBetween(1, 3), randomIntBetween(0, 3), state); + state = addOpenedIndex("index-3", randomIntBetween(1, 3), randomIntBetween(0, 3), state); + final boolean mixedVersions = randomBoolean(); + if (mixedVersions) { + state = ClusterState.builder(state) + .nodes(DiscoveryNodes.builder(state.nodes()) + .add(new DiscoveryNode("old_node", buildNewFakeTransportAddress(), emptyMap(), + new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.V_6_0_0))) + .build(); + } + + Index index1 = state.metaData().index("index-1").getIndex(); + Index index2 = state.metaData().index("index-2").getIndex(); + Index index3 = state.metaData().index("index-3").getIndex(); + Index[] indices = new Index[]{index1, index2, index3}; + + ClusterState updatedState = MetaDataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); + assertNotSame(state, updatedState); + + for (Index index : indices) { + assertTrue(blockedIndices.containsKey(index)); + if (mixedVersions) { + assertIsClosed(index.getName(), updatedState); + } else { + assertHasBlock(index.getName(), updatedState, blockedIndices.get(index)); + } + } + } + } + + public void testAddIndexClosedBlocksReusesBlocks() { + ClusterState state = ClusterState.builder(new ClusterName("testAddIndexClosedBlocksReuseBlocks")).build(); + state = addOpenedIndex("test", randomIntBetween(1, 3), randomIntBetween(0, 3), state); + + Index test = state.metaData().index("test").getIndex(); + Index[] indices = new Index[]{test}; + + final Map blockedIndices = new HashMap<>(); + state = MetaDataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); + + assertTrue(blockedIndices.containsKey(test)); + assertHasBlock(test.getName(), state, blockedIndices.get(test)); + + final Map blockedIndices2 = new HashMap<>(); + state = MetaDataIndexStateService.addIndexClosedBlocks(indices, blockedIndices2, state); + + assertTrue(blockedIndices2.containsKey(test)); + assertHasBlock(test.getName(), state, blockedIndices2.get(test)); + assertEquals(blockedIndices.get(test), blockedIndices2.get(test)); + } + public void testValidateShardLimit() { int nodesInCluster = randomIntBetween(2,100); ClusterShardLimitIT.ShardCounts counts = forDataNodeCount(nodesInCluster); @@ -55,7 +252,6 @@ public void testValidateShardLimit() { .collect(Collectors.toList()) .toArray(new Index[2]); - DeprecationLogger deprecationLogger = new DeprecationLogger(logger); int totalShards = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas()); int currentShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas()); int maxShards = counts.getShardsPerNode() * nodesInCluster; @@ -69,32 +265,123 @@ public static ClusterState createClusterForShardLimitTest(int nodesInCluster, in int closedIndexShards, int closedIndexReplicas, Settings clusterSettings) { ImmutableOpenMap.Builder dataNodes = ImmutableOpenMap.builder(); for (int i = 0; i < nodesInCluster; i++) { - dataNodes.put(randomAlphaOfLengthBetween(5,15), mock(DiscoveryNode.class)); + dataNodes.put(randomAlphaOfLengthBetween(5, 15), mock(DiscoveryNode.class)); } DiscoveryNodes nodes = mock(DiscoveryNodes.class); when(nodes.getDataNodes()).thenReturn(dataNodes.build()); - IndexMetaData.Builder openIndexMetaData = IndexMetaData.builder(randomAlphaOfLengthBetween(5, 15)) - .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .creationDate(randomLong()) - .numberOfShards(openIndexShards) - .numberOfReplicas(openIndexReplicas); - IndexMetaData.Builder closedIndexMetaData = IndexMetaData.builder(randomAlphaOfLengthBetween(5, 15)) - .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .creationDate(randomLong()) - .state(IndexMetaData.State.CLOSE) - .numberOfShards(closedIndexShards) - .numberOfReplicas(closedIndexReplicas); - MetaData.Builder metaData = MetaData.builder().put(openIndexMetaData).put(closedIndexMetaData); + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).build(); + state = addOpenedIndex(randomAlphaOfLengthBetween(5, 15), openIndexShards, openIndexReplicas, state); + state = addClosedIndex(randomAlphaOfLengthBetween(5, 15), closedIndexShards, closedIndexReplicas, state); + + final MetaData.Builder metaData = MetaData.builder(state.metaData()); if (randomBoolean()) { metaData.persistentSettings(clusterSettings); } else { metaData.transientSettings(clusterSettings); } + return ClusterState.builder(state).metaData(metaData).nodes(nodes).build(); + } + + private static ClusterState addOpenedIndex(final String index, final int numShards, final int numReplicas, final ClusterState state) { + return addIndex(state, index, numShards, numReplicas, IndexMetaData.State.OPEN, null); + } + + private static ClusterState addClosedIndex(final String index, final int numShards, final int numReplicas, final ClusterState state) { + return addIndex(state, index, numShards, numReplicas, IndexMetaData.State.CLOSE, INDEX_CLOSED_BLOCK); + } + + private static ClusterState addBlockedIndex(final String index, final int numShards, final int numReplicas, final ClusterState state, + final ClusterBlock closingBlock) { + return addIndex(state, index, numShards, numReplicas, IndexMetaData.State.OPEN, closingBlock); + } + + private static ClusterState addRestoredIndex(final String index, final int numShards, final int numReplicas, final ClusterState state) { + ClusterState newState = addOpenedIndex(index, numShards, numReplicas, state); + + final ImmutableOpenMap.Builder shardsBuilder = ImmutableOpenMap.builder(); + for (ShardRouting shardRouting : newState.routingTable().index(index).randomAllActiveShardsIt()) { + shardsBuilder.put(shardRouting.shardId(), new RestoreInProgress.ShardRestoreStatus(shardRouting.currentNodeId())); + } - return ClusterState.builder(ClusterName.DEFAULT) - .metaData(metaData) - .nodes(nodes) + final Snapshot snapshot = new Snapshot(randomAlphaOfLength(10), new SnapshotId(randomAlphaOfLength(5), randomAlphaOfLength(5))); + final RestoreInProgress.Entry entry = + new RestoreInProgress.Entry("_uuid", snapshot, RestoreInProgress.State.INIT, ImmutableList.of(index), shardsBuilder.build()); + return ClusterState.builder(newState) + .putCustom(RestoreInProgress.TYPE, new RestoreInProgress.Builder().add(entry).build()) .build(); } + + private static ClusterState addSnapshotIndex(final String index, final int numShards, final int numReplicas, final ClusterState state) { + ClusterState newState = addOpenedIndex(index, numShards, numReplicas, state); + + final ImmutableOpenMap.Builder shardsBuilder = ImmutableOpenMap.builder(); + for (ShardRouting shardRouting : newState.routingTable().index(index).randomAllActiveShardsIt()) { + shardsBuilder.put(shardRouting.shardId(), new SnapshotsInProgress.ShardSnapshotStatus(shardRouting.currentNodeId())); + } + + final Snapshot snapshot = new Snapshot(randomAlphaOfLength(10), new SnapshotId(randomAlphaOfLength(5), randomAlphaOfLength(5))); + final SnapshotsInProgress.Entry entry = + new SnapshotsInProgress.Entry(snapshot, randomBoolean(), false, SnapshotsInProgress.State.INIT, + Collections.singletonList(new IndexId(index, index)), randomNonNegativeLong(), randomLong(), shardsBuilder.build()); + return ClusterState.builder(newState).putCustom(SnapshotsInProgress.TYPE, new SnapshotsInProgress(entry)).build(); + } + + private static ClusterState addIndex(final ClusterState currentState, + final String index, + final int numShards, + final int numReplicas, + final IndexMetaData.State state, + @Nullable final ClusterBlock block) { + final IndexMetaData indexMetaData = IndexMetaData.builder(index) + .state(state) + .creationDate(randomNonNegativeLong()) + .settings(Settings.builder() + .put(SETTING_VERSION_CREATED, Version.CURRENT) + .put(SETTING_NUMBER_OF_SHARDS, numShards) + .put(SETTING_NUMBER_OF_REPLICAS, numReplicas)) + .build(); + + final ClusterState.Builder clusterStateBuilder = ClusterState.builder(currentState); + clusterStateBuilder.metaData(MetaData.builder(currentState.metaData()).put(indexMetaData, true)); + + if (state == IndexMetaData.State.OPEN) { + final IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(indexMetaData.getIndex()); + for (int j = 0; j < indexMetaData.getNumberOfShards(); j++) { + ShardId shardId = new ShardId(indexMetaData.getIndex(), j); + IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); + indexShardRoutingBuilder.addShard(newShardRouting(shardId, randomAlphaOfLength(10), true, ShardRoutingState.STARTED)); + for (int k = 0; k < indexMetaData.getNumberOfReplicas(); k++) { + indexShardRoutingBuilder.addShard(newShardRouting(shardId, randomAlphaOfLength(10), false, ShardRoutingState.STARTED)); + } + indexRoutingTable.addIndexShard(indexShardRoutingBuilder.build()); + } + clusterStateBuilder.routingTable(RoutingTable.builder(currentState.routingTable()).add(indexRoutingTable).build()); + } + if (block != null) { + clusterStateBuilder.blocks(ClusterBlocks.builder().blocks(currentState.blocks()).addIndexBlock(index, block)); + } + return clusterStateBuilder.build(); + } + + private static void assertIsOpened(final String indexName, final ClusterState clusterState) { + assertThat(clusterState.metaData().index(indexName).getState(), is(IndexMetaData.State.OPEN)); + assertThat(clusterState.routingTable().index(indexName), notNullValue()); + } + + private static void assertIsClosed(final String indexName, final ClusterState clusterState) { + assertThat(clusterState.metaData().index(indexName).getState(), is(IndexMetaData.State.CLOSE)); + assertThat(clusterState.routingTable().index(indexName), nullValue()); + assertThat(clusterState.blocks().hasIndexBlock(indexName, MetaDataIndexStateService.INDEX_CLOSED_BLOCK), is(true)); + assertThat("Index " + indexName + " must have only 1 block with [id=" + MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID + "]", + clusterState.blocks().indices().getOrDefault(indexName, emptySet()).stream() + .filter(clusterBlock -> clusterBlock.id() == MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID).count(), equalTo(1L)); + } + + private static void assertHasBlock(final String indexName, final ClusterState clusterState, final ClusterBlock closingBlock) { + assertThat(clusterState.blocks().hasIndexBlock(indexName, closingBlock), is(true)); + assertThat("Index " + indexName + " must have only 1 block with [id=" + MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID + "]", + clusterState.blocks().indices().getOrDefault(indexName, emptySet()).stream() + .filter(clusterBlock -> clusterBlock.id() == MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID).count(), equalTo(1L)); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceUtils.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceUtils.java new file mode 100644 index 0000000000000..5ee6a7c60da3d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceUtils.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.index.Index; + +import java.util.Map; + +public class MetaDataIndexStateServiceUtils { + + private MetaDataIndexStateServiceUtils(){ + } + + /** + * Allows to call {@link MetaDataIndexStateService#addIndexClosedBlocks(Index[], Map, ClusterState)} which is a protected method. + */ + public static ClusterState addIndexClosedBlocks(final Index[] indices, final Map blockedIndices, + final ClusterState state) { + return MetaDataIndexStateService.addIndexClosedBlocks(indices, blockedIndices, state); + } + + /** + * Allows to call {@link MetaDataIndexStateService#closeRoutingTable(ClusterState, Map, Map)} which is a protected method. + */ + public static ClusterState closeRoutingTable(final ClusterState state, + final Map blockedIndices, + final Map results) { + return MetaDataIndexStateService.closeRoutingTable(state, blockedIndices, results); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index e2777616f426d..3f826c587e683 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.shard.IndexShard; @@ -50,6 +51,7 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -175,15 +177,17 @@ public void testFailedAllocationOfStalePrimaryToDataNodeWithNoData() throws Exce .getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT)); logger.info("--> force allocation of stale copy to node that does not have shard copy"); - client().admin().cluster().prepareReroute().add(new AllocateStalePrimaryAllocationCommand("test", 0, - dataNodeWithNoShardCopy, true)).get(); + Throwable iae = expectThrows( + IllegalArgumentException.class, + () -> client().admin().cluster().prepareReroute().add(new AllocateStalePrimaryAllocationCommand("test", 0, + dataNodeWithNoShardCopy, true)).get()); + assertThat(iae.getMessage(), equalTo("No data for shard [0] of index [test] found on any node")); logger.info("--> wait until shard is failed and becomes unassigned again"); - assertBusy(() -> - assertTrue(client().admin().cluster().prepareState().get().getState().toString(), - client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").allPrimaryShardsUnassigned())); + assertTrue(client().admin().cluster().prepareState().get().getState().toString(), + client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").allPrimaryShardsUnassigned()); assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test") - .getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); + .getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT)); } public void testForceStaleReplicaToBePromotedToPrimary() throws Exception { @@ -261,6 +265,44 @@ public void testForceStaleReplicaToBePromotedToPrimary() throws Exception { assertThat(newHistoryUUIds, hasSize(1)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37345") + public void testForceStaleReplicaToBePromotedToPrimaryOnWrongNode() throws Exception { + String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); + internalCluster().startDataOnlyNodes(2); + final String idxName = "test"; + assertAcked(client().admin().indices().prepareCreate(idxName) + .setSettings(Settings.builder().put("index.number_of_shards", 1) + .put("index.number_of_replicas", 1)).get()); + ensureGreen(); + createStaleReplicaScenario(master); + internalCluster().startDataOnlyNodes(2); + final int shardId = 0; + final List nodeNames = new ArrayList<>(Arrays.asList(internalCluster().getNodeNames())); + nodeNames.remove(master); + client().admin().indices().prepareShardStores(idxName).get().getStoreStatuses().get(idxName) + .get(shardId).forEach(status -> nodeNames.remove(status.getNode().getName())); + assertThat(nodeNames, hasSize(1)); + final String nodeWithoutData = nodeNames.get(0); + Throwable iae = expectThrows( + IllegalArgumentException.class, + () -> client().admin().cluster().prepareReroute() + .add(new AllocateStalePrimaryAllocationCommand(idxName, shardId, nodeWithoutData, true)).get()); + assertThat( + iae.getMessage(), + equalTo("No data for shard [" + shardId + "] of index [" + idxName + "] found on node [" + nodeWithoutData + ']')); + } + + public void testForceStaleReplicaToBePromotedForMissingIndex() { + internalCluster().startMasterOnlyNode(Settings.EMPTY); + final String dataNode = internalCluster().startDataOnlyNode(); + final String idxName = "test"; + IndexNotFoundException ex = expectThrows( + IndexNotFoundException.class, + () -> client().admin().cluster().prepareReroute() + .add(new AllocateStalePrimaryAllocationCommand(idxName, 0, dataNode, true)).get()); + assertThat(ex.getIndex().getName(), equalTo(idxName)); + } + public void testForcePrimaryShardIfAllocationDecidersSayNoAfterIndexCreation() throws ExecutionException, InterruptedException { String node = internalCluster().startNode(); client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).setSettings(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java index 3fcd743a8a1d4..14ed16f82decc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java @@ -136,9 +136,9 @@ public void testYesDecision() { } public void testCachedDecisions() { - List cachableStatuses = Arrays.asList(AllocationStatus.DECIDERS_NO, AllocationStatus.DECIDERS_THROTTLED, + List cacheableStatuses = Arrays.asList(AllocationStatus.DECIDERS_NO, AllocationStatus.DECIDERS_THROTTLED, AllocationStatus.NO_VALID_SHARD_COPY, AllocationStatus.FETCHING_SHARD_DATA, AllocationStatus.DELAYED_ALLOCATION); - for (AllocationStatus allocationStatus : cachableStatuses) { + for (AllocationStatus allocationStatus : cacheableStatuses) { if (allocationStatus == AllocationStatus.DECIDERS_THROTTLED) { AllocateUnassignedDecision cached = AllocateUnassignedDecision.throttle(null); AllocateUnassignedDecision another = AllocateUnassignedDecision.throttle(null); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettingsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettingsTests.java index 342fcea7ddef1..d9e157187d581 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettingsTests.java @@ -26,6 +26,7 @@ import java.util.Locale; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; @@ -203,4 +204,50 @@ public void testInvalidHighDiskThreshold() { assertThat(cause, hasToString(containsString("low disk watermark [85%] more than high disk watermark [75%]"))); } + public void testSequenceOfUpdates() { + final ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + new DiskThresholdSettings(Settings.EMPTY, clusterSettings); // this has the effect of registering the settings updater + + final Settings.Builder target = Settings.builder(); + + { + final Settings settings = Settings.builder() + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "99%") + .build(); + final Settings.Builder updates = Settings.builder(); + assertTrue(clusterSettings.updateSettings(settings, target, updates, "transient")); + assertNull(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey())); + assertNull(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey())); + assertThat(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey()), + equalTo("99%")); + } + + { + final Settings settings = Settings.builder() + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "97%") + .build(); + final Settings.Builder updates = Settings.builder(); + assertTrue(clusterSettings.updateSettings(settings, target, updates, "transient")); + assertNull(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey())); + assertThat(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey()), + equalTo("97%")); + assertThat(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey()), + equalTo("99%")); + } + + { + final Settings settings = Settings.builder() + .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "95%") + .build(); + final Settings.Builder updates = Settings.builder(); + assertTrue(clusterSettings.updateSettings(settings, target, updates, "transient")); + assertThat(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()), + equalTo("95%")); + assertThat(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey()), + equalTo("97%")); + assertThat(target.get(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey()), + equalTo("99%")); + } + } + } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index 2acabee8797f4..7cfa4e7d9c5dc 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -1126,7 +1126,7 @@ public void testParseGeometryCollection() throws IOException { ), new org.apache.lucene.geo.Polygon( new double[] {12.142857142857142d, -12.142857142857142d, -10d, 10d, 12.142857142857142d}, - new double[] {180d, 180d, -177d, -177d, 180d} + new double[] {-180d, -180d, -177d, -177d, -180d} ) }; assertGeometryEquals(luceneExpected, geometryCollectionGeoJson, false); diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java index 5840f7ef60191..b2370dadb604c 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.joda; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.test.ESTestCase; @@ -71,8 +72,6 @@ public void testCustomTimeFormats() { public void testDuellingFormatsValidParsing() { assertSameDate("1522332219", "epoch_second"); - assertSameDate("1522332219.", "epoch_second"); - assertSameDate("1522332219.0", "epoch_second"); assertSameDate("0", "epoch_second"); assertSameDate("1", "epoch_second"); assertSameDate("1522332219321", "epoch_millis"); @@ -85,18 +84,36 @@ public void testDuellingFormatsValidParsing() { assertSameDate("20181126T121212.123-0800", "basic_date_time"); assertSameDate("20181126T121212Z", "basic_date_time_no_millis"); + assertSameDate("20181126T121212+01:00", "basic_date_time_no_millis"); + assertSameDate("20181126T121212+0100", "basic_date_time_no_millis"); assertSameDate("2018363", "basic_ordinal_date"); assertSameDate("2018363T121212.123Z", "basic_ordinal_date_time"); + assertSameDate("2018363T121212.123+0100", "basic_ordinal_date_time"); + assertSameDate("2018363T121212.123+01:00", "basic_ordinal_date_time"); assertSameDate("2018363T121212Z", "basic_ordinal_date_time_no_millis"); + assertSameDate("2018363T121212+0100", "basic_ordinal_date_time_no_millis"); + assertSameDate("2018363T121212+01:00", "basic_ordinal_date_time_no_millis"); assertSameDate("121212.123Z", "basic_time"); + assertSameDate("121212.123+0100", "basic_time"); + assertSameDate("121212.123+01:00", "basic_time"); assertSameDate("121212Z", "basic_time_no_millis"); + assertSameDate("121212+0100", "basic_time_no_millis"); + assertSameDate("121212+01:00", "basic_time_no_millis"); assertSameDate("T121212.123Z", "basic_t_time"); + assertSameDate("T121212.123+0100", "basic_t_time"); + assertSameDate("T121212.123+01:00", "basic_t_time"); assertSameDate("T121212Z", "basic_t_time_no_millis"); + assertSameDate("T121212+0100", "basic_t_time_no_millis"); + assertSameDate("T121212+01:00", "basic_t_time_no_millis"); assertSameDate("2018W313", "basic_week_date"); assertSameDate("1W313", "basic_week_date"); assertSameDate("18W313", "basic_week_date"); assertSameDate("2018W313T121212.123Z", "basic_week_date_time"); + assertSameDate("2018W313T121212.123+0100", "basic_week_date_time"); + assertSameDate("2018W313T121212.123+01:00", "basic_week_date_time"); assertSameDate("2018W313T121212Z", "basic_week_date_time_no_millis"); + assertSameDate("2018W313T121212+0100", "basic_week_date_time_no_millis"); + assertSameDate("2018W313T121212+01:00", "basic_week_date_time_no_millis"); assertSameDate("2018-12-31", "date"); assertSameDate("18-5-6", "date"); @@ -126,6 +143,9 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-05-30T20:21", "date_optional_time"); assertSameDate("2018-05-30T20:21:23", "date_optional_time"); assertSameDate("2018-05-30T20:21:23.123", "date_optional_time"); + assertSameDate("2018-05-30T20:21:23.123Z", "date_optional_time"); + assertSameDate("2018-05-30T20:21:23.123+0100", "date_optional_time"); + assertSameDate("2018-05-30T20:21:23.123+01:00", "date_optional_time"); assertSameDate("2018-12-1", "date_optional_time"); assertSameDate("2018-12-31T10:15:30", "date_optional_time"); assertSameDate("2018-12-31T10:15:3", "date_optional_time"); @@ -133,13 +153,27 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-12-31T1:15:30", "date_optional_time"); assertSameDate("2018-12-31T10:15:30.123Z", "date_time"); + assertSameDate("2018-12-31T10:15:30.123+0100", "date_time"); + assertSameDate("2018-12-31T10:15:30.123+01:00", "date_time"); assertSameDate("2018-12-31T10:15:30.11Z", "date_time"); + assertSameDate("2018-12-31T10:15:30.11+0100", "date_time"); + assertSameDate("2018-12-31T10:15:30.11+01:00", "date_time"); assertSameDate("2018-12-31T10:15:3.123Z", "date_time"); + assertSameDate("2018-12-31T10:15:3.123+0100", "date_time"); + assertSameDate("2018-12-31T10:15:3.123+01:00", "date_time"); assertSameDate("2018-12-31T10:15:30Z", "date_time_no_millis"); + assertSameDate("2018-12-31T10:15:30+0100", "date_time_no_millis"); + assertSameDate("2018-12-31T10:15:30+01:00", "date_time_no_millis"); assertSameDate("2018-12-31T10:5:30Z", "date_time_no_millis"); + assertSameDate("2018-12-31T10:5:30+0100", "date_time_no_millis"); + assertSameDate("2018-12-31T10:5:30+01:00", "date_time_no_millis"); assertSameDate("2018-12-31T10:15:3Z", "date_time_no_millis"); + assertSameDate("2018-12-31T10:15:3+0100", "date_time_no_millis"); + assertSameDate("2018-12-31T10:15:3+01:00", "date_time_no_millis"); assertSameDate("2018-12-31T1:15:30Z", "date_time_no_millis"); + assertSameDate("2018-12-31T1:15:30+0100", "date_time_no_millis"); + assertSameDate("2018-12-31T1:15:30+01:00", "date_time_no_millis"); assertSameDate("12", "hour"); assertSameDate("01", "hour"); @@ -164,36 +198,78 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-1", "ordinal_date"); assertSameDate("2018-128T10:15:30.123Z", "ordinal_date_time"); + assertSameDate("2018-128T10:15:30.123+0100", "ordinal_date_time"); + assertSameDate("2018-128T10:15:30.123+01:00", "ordinal_date_time"); assertSameDate("2018-1T10:15:30.123Z", "ordinal_date_time"); + assertSameDate("2018-1T10:15:30.123+0100", "ordinal_date_time"); + assertSameDate("2018-1T10:15:30.123+01:00", "ordinal_date_time"); assertSameDate("2018-128T10:15:30Z", "ordinal_date_time_no_millis"); + assertSameDate("2018-128T10:15:30+0100", "ordinal_date_time_no_millis"); + assertSameDate("2018-128T10:15:30+01:00", "ordinal_date_time_no_millis"); assertSameDate("2018-1T10:15:30Z", "ordinal_date_time_no_millis"); + assertSameDate("2018-1T10:15:30+0100", "ordinal_date_time_no_millis"); + assertSameDate("2018-1T10:15:30+01:00", "ordinal_date_time_no_millis"); assertSameDate("10:15:30.123Z", "time"); + assertSameDate("10:15:30.123+0100", "time"); + assertSameDate("10:15:30.123+01:00", "time"); assertSameDate("1:15:30.123Z", "time"); + assertSameDate("1:15:30.123+0100", "time"); + assertSameDate("1:15:30.123+01:00", "time"); assertSameDate("10:1:30.123Z", "time"); + assertSameDate("10:1:30.123+0100", "time"); + assertSameDate("10:1:30.123+01:00", "time"); assertSameDate("10:15:3.123Z", "time"); + assertSameDate("10:15:3.123+0100", "time"); + assertSameDate("10:15:3.123+01:00", "time"); assertParseException("10:15:3.1", "time"); assertParseException("10:15:3Z", "time"); assertSameDate("10:15:30Z", "time_no_millis"); + assertSameDate("10:15:30+0100", "time_no_millis"); + assertSameDate("10:15:30+01:00", "time_no_millis"); assertSameDate("01:15:30Z", "time_no_millis"); + assertSameDate("01:15:30+0100", "time_no_millis"); + assertSameDate("01:15:30+01:00", "time_no_millis"); assertSameDate("1:15:30Z", "time_no_millis"); + assertSameDate("1:15:30+0100", "time_no_millis"); + assertSameDate("1:15:30+01:00", "time_no_millis"); assertSameDate("10:5:30Z", "time_no_millis"); + assertSameDate("10:5:30+0100", "time_no_millis"); + assertSameDate("10:5:30+01:00", "time_no_millis"); assertSameDate("10:15:3Z", "time_no_millis"); + assertSameDate("10:15:3+0100", "time_no_millis"); + assertSameDate("10:15:3+01:00", "time_no_millis"); assertParseException("10:15:3", "time_no_millis"); assertSameDate("T10:15:30.123Z", "t_time"); + assertSameDate("T10:15:30.123+0100", "t_time"); + assertSameDate("T10:15:30.123+01:00", "t_time"); assertSameDate("T1:15:30.123Z", "t_time"); + assertSameDate("T1:15:30.123+0100", "t_time"); + assertSameDate("T1:15:30.123+01:00", "t_time"); assertSameDate("T10:1:30.123Z", "t_time"); + assertSameDate("T10:1:30.123+0100", "t_time"); + assertSameDate("T10:1:30.123+01:00", "t_time"); assertSameDate("T10:15:3.123Z", "t_time"); + assertSameDate("T10:15:3.123+0100", "t_time"); + assertSameDate("T10:15:3.123+01:00", "t_time"); assertParseException("T10:15:3.1", "t_time"); assertParseException("T10:15:3Z", "t_time"); assertSameDate("T10:15:30Z", "t_time_no_millis"); + assertSameDate("T10:15:30+0100", "t_time_no_millis"); + assertSameDate("T10:15:30+01:00", "t_time_no_millis"); assertSameDate("T1:15:30Z", "t_time_no_millis"); + assertSameDate("T1:15:30+0100", "t_time_no_millis"); + assertSameDate("T1:15:30+01:00", "t_time_no_millis"); assertSameDate("T10:1:30Z", "t_time_no_millis"); + assertSameDate("T10:1:30+0100", "t_time_no_millis"); + assertSameDate("T10:1:30+01:00", "t_time_no_millis"); assertSameDate("T10:15:3Z", "t_time_no_millis"); + assertSameDate("T10:15:3+0100", "t_time_no_millis"); + assertSameDate("T10:15:3+01:00", "t_time_no_millis"); assertParseException("T10:15:3", "t_time_no_millis"); assertSameDate("2012-W48-6", "week_date"); @@ -205,10 +281,18 @@ public void testDuellingFormatsValidParsing() { assertJavaTimeParseException("2012-W1-8", "week_date", "Text '2012-W1-8' could not be parsed"); assertSameDate("2012-W48-6T10:15:30.123Z", "week_date_time"); + assertSameDate("2012-W48-6T10:15:30.123+0100", "week_date_time"); + assertSameDate("2012-W48-6T10:15:30.123+01:00", "week_date_time"); assertSameDate("2012-W1-6T10:15:30.123Z", "week_date_time"); + assertSameDate("2012-W1-6T10:15:30.123+0100", "week_date_time"); + assertSameDate("2012-W1-6T10:15:30.123+01:00", "week_date_time"); assertSameDate("2012-W48-6T10:15:30Z", "week_date_time_no_millis"); + assertSameDate("2012-W48-6T10:15:30+0100", "week_date_time_no_millis"); + assertSameDate("2012-W48-6T10:15:30+01:00", "week_date_time_no_millis"); assertSameDate("2012-W1-6T10:15:30Z", "week_date_time_no_millis"); + assertSameDate("2012-W1-6T10:15:30+0100", "week_date_time_no_millis"); + assertSameDate("2012-W1-6T10:15:30+01:00", "week_date_time_no_millis"); assertSameDate("2012", "year"); assertSameDate("1", "year"); @@ -237,14 +321,24 @@ public void testDuelingStrictParsing() { assertSameDate("2018W313", "strict_basic_week_date"); assertParseException("18W313", "strict_basic_week_date"); assertSameDate("2018W313T121212.123Z", "strict_basic_week_date_time"); + assertSameDate("2018W313T121212.123+0100", "strict_basic_week_date_time"); + assertSameDate("2018W313T121212.123+01:00", "strict_basic_week_date_time"); assertParseException("2018W313T12128.123Z", "strict_basic_week_date_time"); assertParseException("2018W313T81212.123Z", "strict_basic_week_date_time"); assertParseException("2018W313T12812.123Z", "strict_basic_week_date_time"); assertParseException("2018W313T12812.1Z", "strict_basic_week_date_time"); assertSameDate("2018W313T121212Z", "strict_basic_week_date_time_no_millis"); + assertSameDate("2018W313T121212+0100", "strict_basic_week_date_time_no_millis"); + assertSameDate("2018W313T121212+01:00", "strict_basic_week_date_time_no_millis"); assertParseException("2018W313T12128Z", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T12128+0100", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T12128+01:00", "strict_basic_week_date_time_no_millis"); assertParseException("2018W313T81212Z", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T81212+0100", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T81212+01:00", "strict_basic_week_date_time_no_millis"); assertParseException("2018W313T12812Z", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T12812+0100", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T12812+01:00", "strict_basic_week_date_time_no_millis"); assertSameDate("2018-12-31", "strict_date"); assertParseException("10000-12-31", "strict_date"); assertParseException("2018-8-31", "strict_date"); @@ -265,15 +359,24 @@ public void testDuelingStrictParsing() { assertParseException("2018-1-31", "strict_date_optional_time"); assertParseException("10000-01-31", "strict_date_optional_time"); assertSameDate("2018-12-31T10:15:30", "strict_date_optional_time"); + assertSameDate("2018-12-31T10:15:30Z", "strict_date_optional_time"); + assertSameDate("2018-12-31T10:15:30+0100", "strict_date_optional_time"); + assertSameDate("2018-12-31T10:15:30+01:00", "strict_date_optional_time"); assertParseException("2018-12-31T10:15:3", "strict_date_optional_time"); assertParseException("2018-12-31T10:5:30", "strict_date_optional_time"); assertParseException("2018-12-31T9:15:30", "strict_date_optional_time"); assertSameDate("2018-12-31T10:15:30.123Z", "strict_date_time"); + assertSameDate("2018-12-31T10:15:30.123+0100", "strict_date_time"); + assertSameDate("2018-12-31T10:15:30.123+01:00", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.11Z", "strict_date_time"); + assertSameDate("2018-12-31T10:15:30.11+0100", "strict_date_time"); + assertSameDate("2018-12-31T10:15:30.11+01:00", "strict_date_time"); assertParseException("2018-12-31T10:15:3.123Z", "strict_date_time"); assertParseException("2018-12-31T10:5:30.123Z", "strict_date_time"); assertParseException("2018-12-31T1:15:30.123Z", "strict_date_time"); assertSameDate("2018-12-31T10:15:30Z", "strict_date_time_no_millis"); + assertSameDate("2018-12-31T10:15:30+0100", "strict_date_time_no_millis"); + assertSameDate("2018-12-31T10:15:30+01:00", "strict_date_time_no_millis"); assertParseException("2018-12-31T10:5:30Z", "strict_date_time_no_millis"); assertParseException("2018-12-31T10:15:3Z", "strict_date_time_no_millis"); assertParseException("2018-12-31T1:15:30Z", "strict_date_time_no_millis"); @@ -296,12 +399,18 @@ public void testDuelingStrictParsing() { assertParseException("2018-1", "strict_ordinal_date"); assertSameDate("2018-128T10:15:30.123Z", "strict_ordinal_date_time"); + assertSameDate("2018-128T10:15:30.123+0100", "strict_ordinal_date_time"); + assertSameDate("2018-128T10:15:30.123+01:00", "strict_ordinal_date_time"); assertParseException("2018-1T10:15:30.123Z", "strict_ordinal_date_time"); assertSameDate("2018-128T10:15:30Z", "strict_ordinal_date_time_no_millis"); + assertSameDate("2018-128T10:15:30+0100", "strict_ordinal_date_time_no_millis"); + assertSameDate("2018-128T10:15:30+01:00", "strict_ordinal_date_time_no_millis"); assertParseException("2018-1T10:15:30Z", "strict_ordinal_date_time_no_millis"); assertSameDate("10:15:30.123Z", "strict_time"); + assertSameDate("10:15:30.123+0100", "strict_time"); + assertSameDate("10:15:30.123+01:00", "strict_time"); assertParseException("1:15:30.123Z", "strict_time"); assertParseException("10:1:30.123Z", "strict_time"); assertParseException("10:15:3.123Z", "strict_time"); @@ -309,13 +418,19 @@ public void testDuelingStrictParsing() { assertParseException("10:15:3Z", "strict_time"); assertSameDate("10:15:30Z", "strict_time_no_millis"); + assertSameDate("10:15:30+0100", "strict_time_no_millis"); + assertSameDate("10:15:30+01:00", "strict_time_no_millis"); assertSameDate("01:15:30Z", "strict_time_no_millis"); + assertSameDate("01:15:30+0100", "strict_time_no_millis"); + assertSameDate("01:15:30+01:00", "strict_time_no_millis"); assertParseException("1:15:30Z", "strict_time_no_millis"); assertParseException("10:5:30Z", "strict_time_no_millis"); assertParseException("10:15:3Z", "strict_time_no_millis"); assertParseException("10:15:3", "strict_time_no_millis"); assertSameDate("T10:15:30.123Z", "strict_t_time"); + assertSameDate("T10:15:30.123+0100", "strict_t_time"); + assertSameDate("T10:15:30.123+01:00", "strict_t_time"); assertParseException("T1:15:30.123Z", "strict_t_time"); assertParseException("T10:1:30.123Z", "strict_t_time"); assertParseException("T10:15:3.123Z", "strict_t_time"); @@ -323,6 +438,8 @@ public void testDuelingStrictParsing() { assertParseException("T10:15:3Z", "strict_t_time"); assertSameDate("T10:15:30Z", "strict_t_time_no_millis"); + assertSameDate("T10:15:30+0100", "strict_t_time_no_millis"); + assertSameDate("T10:15:30+01:00", "strict_t_time_no_millis"); assertParseException("T1:15:30Z", "strict_t_time_no_millis"); assertParseException("T10:1:30Z", "strict_t_time_no_millis"); assertParseException("T10:15:3Z", "strict_t_time_no_millis"); @@ -342,9 +459,13 @@ public void testDuelingStrictParsing() { assertJavaTimeParseException("2012-W01-8", "strict_week_date", "Text '2012-W01-8' could not be parsed"); assertSameDate("2012-W48-6T10:15:30.123Z", "strict_week_date_time"); + assertSameDate("2012-W48-6T10:15:30.123+0100", "strict_week_date_time"); + assertSameDate("2012-W48-6T10:15:30.123+01:00", "strict_week_date_time"); assertParseException("2012-W1-6T10:15:30.123Z", "strict_week_date_time"); assertSameDate("2012-W48-6T10:15:30Z", "strict_week_date_time_no_millis"); + assertSameDate("2012-W48-6T10:15:30+0100", "strict_week_date_time_no_millis"); + assertSameDate("2012-W48-6T10:15:30+01:00", "strict_week_date_time_no_millis"); assertParseException("2012-W1-6T10:15:30Z", "strict_week_date_time_no_millis"); assertSameDate("2012", "strict_year"); @@ -384,6 +505,7 @@ public void testSamePrinterOutput() { ZonedDateTime javaDate = ZonedDateTime.of(year, month, day, hour, minute, second, 0, ZoneOffset.UTC); DateTime jodaDate = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC); + assertSamePrinterOutput("epoch_second", javaDate, jodaDate); assertSamePrinterOutput("basicDate", javaDate, jodaDate); assertSamePrinterOutput("basicDateTime", javaDate, jodaDate); @@ -428,7 +550,7 @@ public void testSamePrinterOutput() { assertSamePrinterOutput("year", javaDate, jodaDate); assertSamePrinterOutput("yearMonth", javaDate, jodaDate); assertSamePrinterOutput("yearMonthDay", javaDate, jodaDate); - assertSamePrinterOutput("epoch_second", javaDate, jodaDate); + assertSamePrinterOutput("epoch_millis", javaDate, jodaDate); assertSamePrinterOutput("strictBasicWeekDate", javaDate, jodaDate); assertSamePrinterOutput("strictBasicWeekDateTime", javaDate, jodaDate); @@ -476,6 +598,12 @@ private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, Date assertThat(jodaDate.getMillis(), is(javaDate.toInstant().toEpochMilli())); String javaTimeOut = DateFormatters.forPattern(format).format(javaDate); String jodaTimeOut = DateFormatter.forPattern(format).formatJoda(jodaDate); + if (JavaVersion.current().getVersion().get(0) == 8 && javaTimeOut.endsWith(".0") + && (format.equals("epoch_second") || format.equals("epoch_millis"))) { + // java 8 has a bug in DateTimeFormatter usage when printing dates that rely on isSupportedBy for fields, which is + // what we use for epoch time. This change accounts for that bug. It should be removed when java 8 support is removed + jodaTimeOut += ".0"; + } String message = String.format(Locale.ROOT, "expected string representation to be equal for format [%s]: joda [%s], java [%s]", format, jodaTimeOut, javaTimeOut); assertThat(message, javaTimeOut, is(jodaTimeOut)); @@ -484,7 +612,6 @@ private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, Date private void assertSameDate(String input, String format) { DateFormatter jodaFormatter = Joda.forPattern(format); DateFormatter javaFormatter = DateFormatters.forPattern(format); - assertSameDate(input, format, jodaFormatter, javaFormatter); } diff --git a/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java index a00bffe2bf6f2..fde9d73fae892 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java @@ -29,7 +29,6 @@ public class JodaTests extends ESTestCase { - public void testBasicTTimePattern() { DateFormatter formatter1 = DateFormatter.forPattern("basic_t_time"); assertEquals(formatter1.pattern(), "basic_t_time"); diff --git a/server/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java b/server/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java index 537bb3db70aca..740430ac0993b 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java @@ -19,8 +19,13 @@ package org.elasticsearch.common.logging; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; - import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.simple.SimpleLoggerContext; +import org.apache.logging.log4j.simple.SimpleLoggerContextFactory; +import org.apache.logging.log4j.spi.ExtendedLogger; +import org.apache.logging.log4j.spi.LoggerContext; +import org.apache.logging.log4j.spi.LoggerContextFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; @@ -28,14 +33,21 @@ import org.hamcrest.core.IsSame; import java.io.IOException; +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.security.AccessControlContext; +import java.security.AccessController; +import java.security.Permissions; +import java.security.PrivilegedAction; +import java.security.ProtectionDomain; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.IntStream; -import java.nio.charset.StandardCharsets; import static org.elasticsearch.common.logging.DeprecationLogger.WARNING_HEADER_PATTERN; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; @@ -43,6 +55,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; /** * Tests {@link DeprecationLogger} @@ -303,6 +319,49 @@ public void testWarningHeaderSizeSetting() throws IOException{ } } + public void testLogPermissions() { + AtomicBoolean supplierCalled = new AtomicBoolean(false); + + // mocking the logger used inside DeprecationLogger requires heavy hacking... + Logger parentLogger = mock(Logger.class); + when(parentLogger.getName()).thenReturn("logger"); + ExtendedLogger mockLogger = mock(ExtendedLogger.class); + doAnswer(invocationOnMock -> { + supplierCalled.set(true); + createTempDir(); // trigger file permission, like rolling logs would + return null; + }).when(mockLogger).warn("foo", new Object[] {"bar"}); + final LoggerContext context = new SimpleLoggerContext() { + @Override + public ExtendedLogger getLogger(String name) { + return mockLogger; + } + }; + + final LoggerContextFactory originalFactory = LogManager.getFactory(); + try { + LogManager.setFactory(new SimpleLoggerContextFactory() { + @Override + public LoggerContext getContext(String fqcn, ClassLoader loader, Object externalContext, boolean currentContext, + URI configLocation, String name) { + return context; + } + }); + DeprecationLogger deprecationLogger = new DeprecationLogger(parentLogger); + + AccessControlContext noPermissionsAcc = new AccessControlContext( + new ProtectionDomain[]{new ProtectionDomain(null, new Permissions())} + ); + AccessController.doPrivileged((PrivilegedAction) () -> { + deprecationLogger.deprecated("foo", "bar"); + return null; + }, noPermissionsAcc); + assertThat("supplier called", supplierCalled.get(), is(true)); + } finally { + LogManager.setFactory(originalFactory); + } + } + private String range(int lowerInclusive, int upperInclusive) { return IntStream .range(lowerInclusive, upperInclusive + 1) diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 9194a60382d0d..fc732fbd88e2e 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -37,7 +37,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; @@ -51,9 +50,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.startsWith; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.sameInstance; @@ -514,94 +511,6 @@ public void testApply() { assertEquals(15, bC.get()); } - private static final Setting FOO_BAR_LOW_SETTING = new Setting<>( - "foo.bar.low", - "1", - Integer::parseInt, - new FooBarLowValidator(), - Property.Dynamic, - Property.NodeScope); - - private static final Setting FOO_BAR_HIGH_SETTING = new Setting<>( - "foo.bar.high", - "2", - Integer::parseInt, - new FooBarHighValidator(), - Property.Dynamic, - Property.NodeScope); - - static class FooBarLowValidator implements Setting.Validator { - @Override - public void validate(Integer value, Map, Integer> settings) { - final int high = settings.get(FOO_BAR_HIGH_SETTING); - if (value > high) { - throw new IllegalArgumentException("low [" + value + "] more than high [" + high + "]"); - } - } - - @Override - public Iterator> settings() { - return Collections.singletonList(FOO_BAR_HIGH_SETTING).iterator(); - } - } - - static class FooBarHighValidator implements Setting.Validator { - @Override - public void validate(Integer value, Map, Integer> settings) { - final int low = settings.get(FOO_BAR_LOW_SETTING); - if (value < low) { - throw new IllegalArgumentException("high [" + value + "] less than low [" + low + "]"); - } - } - - @Override - public Iterator> settings() { - return Collections.singletonList(FOO_BAR_LOW_SETTING).iterator(); - } - } - - public void testValidator() { - final AbstractScopedSettings service = - new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(FOO_BAR_LOW_SETTING, FOO_BAR_HIGH_SETTING))); - - final AtomicInteger consumerLow = new AtomicInteger(); - final AtomicInteger consumerHigh = new AtomicInteger(); - - service.addSettingsUpdateConsumer(FOO_BAR_LOW_SETTING, consumerLow::set); - - service.addSettingsUpdateConsumer(FOO_BAR_HIGH_SETTING, consumerHigh::set); - - final Settings newSettings = Settings.builder().put("foo.bar.low", 17).put("foo.bar.high", 13).build(); - { - final IllegalArgumentException e = - expectThrows( - IllegalArgumentException.class, - () -> service.validateUpdate(newSettings)); - assertThat(e, hasToString(containsString("illegal value can't update [foo.bar.low] from [1] to [17]"))); - assertNotNull(e.getCause()); - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - final IllegalArgumentException cause = (IllegalArgumentException) e.getCause(); - assertThat(cause, hasToString(containsString("low [17] more than high [13]"))); - assertThat(e.getSuppressed(), arrayWithSize(1)); - assertThat(e.getSuppressed()[0], instanceOf(IllegalArgumentException.class)); - final IllegalArgumentException suppressed = (IllegalArgumentException) e.getSuppressed()[0]; - assertThat(suppressed, hasToString(containsString("illegal value can't update [foo.bar.high] from [2] to [13]"))); - assertNotNull(suppressed.getCause()); - assertThat(suppressed.getCause(), instanceOf(IllegalArgumentException.class)); - final IllegalArgumentException suppressedCause = (IllegalArgumentException) suppressed.getCause(); - assertThat(suppressedCause, hasToString(containsString("high [13] less than low [17]"))); - assertThat(consumerLow.get(), equalTo(0)); - assertThat(consumerHigh.get(), equalTo(0)); - } - - { - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.applySettings(newSettings)); - assertThat(e, hasToString(containsString("illegal value can't update [foo.bar.low] from [1] to [17]"))); - assertThat(consumerLow.get(), equalTo(0)); - assertThat(consumerHigh.get(), equalTo(0)); - } - } - public void testGet() { ClusterSettings settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); // affix setting - complex matcher diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 750c7148946fc..220392a952c29 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -204,12 +204,18 @@ public void testValidateStringSetting() { static class FooBarValidator implements Setting.Validator { - public static boolean invoked; + public static boolean invokedInIsolation; + public static boolean invokedWithDependencies; @Override - public void validate(String value, Map, String> settings) { - invoked = true; + public void validate(String value) { + invokedInIsolation = true; assertThat(value, equalTo("foo.bar value")); + } + + @Override + public void validate(String value, Map, String> settings) { + invokedWithDependencies = true; assertTrue(settings.keySet().contains(BAZ_QUX_SETTING)); assertThat(settings.get(BAZ_QUX_SETTING), equalTo("baz.qux value")); assertTrue(settings.keySet().contains(QUUX_QUUZ_SETTING)); @@ -230,7 +236,8 @@ public void testValidator() { .put("quux.quuz", "quux.quuz value") .build(); FOO_BAR_SETTING.get(settings); - assertTrue(FooBarValidator.invoked); + assertTrue(FooBarValidator.invokedInIsolation); + assertTrue(FooBarValidator.invokedWithDependencies); } public void testUpdateNotDynamic() { @@ -934,7 +941,7 @@ public void testAffixMapUpdateWithNullSettingValue() { final Setting.AffixSetting affixSetting = Setting.prefixKeySetting("prefix" + ".", - (key) -> Setting.simpleString(key, (value, map) -> {}, Property.Dynamic, Property.NodeScope)); + key -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); final Consumer> consumer = (map) -> {}; final BiConsumer validator = (s1, s2) -> {}; diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 74c6a80fa3277..802bceaa90812 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -25,6 +25,9 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -44,6 +47,7 @@ import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; +import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -708,4 +712,51 @@ public void testCopy() { IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> Settings.builder().copy("not_there", settings)); assertEquals("source key not found in the source settings", iae.getMessage()); } + + public void testFractionalTimeValue() { + final Setting setting = + Setting.timeSetting("key", TimeValue.parseTimeValue(randomTimeValue(0, 24, "h"), "key"), TimeValue.ZERO); + final TimeValue expected = TimeValue.timeValueMillis(randomNonNegativeLong()); + final Settings settings = Settings.builder().put("key", expected).build(); + /* + * Previously we would internally convert the time value to a string using a method that tries to be smart about the units (e.g., + * 1000ms would be converted to 1s). However, this had a problem in that, for example, 1500ms would be converted to 1.5s. Then, + * 1.5s could not be converted back to a TimeValue because TimeValues do not support fractional components. Effectively this test + * is then asserting that we no longer make this mistake when doing the internal string conversion. Instead, we convert to a string + * using a method that does not lose the original unit. + */ + final TimeValue actual = setting.get(settings); + assertThat(actual, equalTo(expected)); + } + + public void testFractionalByteSizeValue() { + final Setting setting = + Setting.byteSizeSetting("key", ByteSizeValue.parseBytesSizeValue(randomIntBetween(1, 16) + "k", "key")); + final ByteSizeValue expected = new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES); + final Settings settings = Settings.builder().put("key", expected).build(); + /* + * Previously we would internally convert the byte size value to a string using a method that tries to be smart about the units + * (e.g., 1024 bytes would be converted to 1kb). However, this had a problem in that, for example, 1536 bytes would be converted to + * 1.5k. Then, 1.5k could not be converted back to a ByteSizeValue because ByteSizeValues do not support fractional components. + * Effectively this test is then asserting that we no longer make this mistake when doing the internal string conversion. Instead, + * we convert to a string using a method that does not lose the original unit. + */ + final ByteSizeValue actual = setting.get(settings); + assertThat(actual, equalTo(expected)); + } + + public void testSetByTimeUnit() { + final Setting setting = + Setting.timeSetting("key", TimeValue.parseTimeValue(randomTimeValue(0, 24, "h"), "key"), TimeValue.ZERO); + final TimeValue expected = new TimeValue(1500, TimeUnit.MICROSECONDS); + final Settings settings = Settings.builder().put("key", expected.getMicros(), TimeUnit.MICROSECONDS).build(); + /* + * Previously we would internally convert the duration to a string by converting to milliseconds which could lose precision (e.g., + * 1500 microseconds would be converted to 1ms). Effectively this test is then asserting that we no longer make this mistake when + * doing the internal string conversion. Instead, we convert to a duration using a method that does not lose the original unit. + */ + final TimeValue actual = setting.get(settings); + assertThat(actual, equalTo(expected)); + } + } diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java index 0f58e30f7a2bf..a2858284593d1 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java @@ -23,7 +23,7 @@ import java.time.Instant; import java.time.ZoneId; -import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; import java.util.Locale; @@ -43,36 +43,11 @@ public class DateFormattersTests extends ESTestCase { // as this feature is supported it also makes sense to make it exact public void testEpochMillisParser() { DateFormatter formatter = DateFormatters.forPattern("epoch_millis"); - { - Instant instant = Instant.from(formatter.parse("12345.6789")); - assertThat(instant.getEpochSecond(), is(12L)); - assertThat(instant.getNano(), is(345_678_900)); - } { Instant instant = Instant.from(formatter.parse("12345")); assertThat(instant.getEpochSecond(), is(12L)); assertThat(instant.getNano(), is(345_000_000)); } - { - Instant instant = Instant.from(formatter.parse("12345.")); - assertThat(instant.getEpochSecond(), is(12L)); - assertThat(instant.getNano(), is(345_000_000)); - } - { - Instant instant = Instant.from(formatter.parse("-12345.6789")); - assertThat(instant.getEpochSecond(), is(-13L)); - assertThat(instant.getNano(), is(1_000_000_000 - 345_678_900)); - } - { - Instant instant = Instant.from(formatter.parse("-436134.241272")); - assertThat(instant.getEpochSecond(), is(-437L)); - assertThat(instant.getNano(), is(1_000_000_000 - 134_241_272)); - } - { - Instant instant = Instant.from(formatter.parse("-12345")); - assertThat(instant.getEpochSecond(), is(-13L)); - assertThat(instant.getNano(), is(1_000_000_000 - 345_000_000)); - } { Instant instant = Instant.from(formatter.parse("0")); assertThat(instant.getEpochSecond(), is(0L)); @@ -83,10 +58,10 @@ public void testEpochMillisParser() { public void testEpochMilliParser() { DateFormatter formatter = DateFormatters.forPattern("epoch_millis"); DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid")); - assertThat(e.getMessage(), containsString("invalid number")); + assertThat(e.getMessage(), containsString("could not be parsed")); e = expectThrows(DateTimeParseException.class, () -> formatter.parse("123.1234567")); - assertThat(e.getMessage(), containsString("too much granularity after dot [123.1234567]")); + assertThat(e.getMessage(), containsString("unparsed text found at index 3")); } // this is not in the duelling tests, because the epoch second parser in joda time drops the milliseconds after the comma @@ -95,30 +70,14 @@ public void testEpochMilliParser() { public void testEpochSecondParser() { DateFormatter formatter = DateFormatters.forPattern("epoch_second"); - assertThat(Instant.from(formatter.parse("1234.567")).toEpochMilli(), is(1234567L)); - assertThat(Instant.from(formatter.parse("1234.")).getNano(), is(0)); - assertThat(Instant.from(formatter.parse("1234.")).getEpochSecond(), is(1234L)); - assertThat(Instant.from(formatter.parse("1234.1")).getNano(), is(100_000_000)); - assertThat(Instant.from(formatter.parse("1234.12")).getNano(), is(120_000_000)); - assertThat(Instant.from(formatter.parse("1234.123")).getNano(), is(123_000_000)); - assertThat(Instant.from(formatter.parse("1234.1234")).getNano(), is(123_400_000)); - assertThat(Instant.from(formatter.parse("1234.12345")).getNano(), is(123_450_000)); - assertThat(Instant.from(formatter.parse("1234.123456")).getNano(), is(123_456_000)); - assertThat(Instant.from(formatter.parse("1234.1234567")).getNano(), is(123_456_700)); - assertThat(Instant.from(formatter.parse("1234.12345678")).getNano(), is(123_456_780)); - assertThat(Instant.from(formatter.parse("1234.123456789")).getNano(), is(123_456_789)); - - assertThat(Instant.from(formatter.parse("-1234.567")).toEpochMilli(), is(-1234567L)); - assertThat(Instant.from(formatter.parse("-1234")).getNano(), is(0)); - - DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.1234567890")); - assertThat(e.getMessage(), is("too much granularity after dot [1234.1234567890]")); - e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.123456789013221")); - assertThat(e.getMessage(), is("too much granularity after dot [1234.123456789013221]")); + DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.1")); + assertThat(e.getMessage(), is("Text '1234.1' could not be parsed, unparsed text found at index 4")); + e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.")); + assertThat(e.getMessage(), is("Text '1234.' could not be parsed, unparsed text found at index 4")); e = expectThrows(DateTimeParseException.class, () -> formatter.parse("abc")); - assertThat(e.getMessage(), is("invalid number [abc]")); + assertThat(e.getMessage(), is("Text 'abc' could not be parsed, unparsed text found at index 0")); e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc")); - assertThat(e.getMessage(), is("invalid number [1234.abc]")); + assertThat(e.getMessage(), is("Text '1234.abc' could not be parsed, unparsed text found at index 4")); } public void testEpochMilliParsersWithDifferentFormatters() { @@ -128,22 +87,18 @@ public void testEpochMilliParsersWithDifferentFormatters() { assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis")); } + public void testParsersWithMultipleInternalFormats() throws Exception { + ZonedDateTime first = DateFormatters.toZonedDateTime( + DateFormatters.forPattern("strict_date_optional_time_nanos").parse("2018-05-15T17:14:56+0100")); + ZonedDateTime second = DateFormatters.toZonedDateTime( + DateFormatters.forPattern("strict_date_optional_time_nanos").parse("2018-05-15T17:14:56+01:00")); + assertThat(first, is(second)); + } + public void testLocales() { assertThat(DateFormatters.forPattern("strict_date_optional_time").locale(), is(Locale.ROOT)); Locale locale = randomLocale(random()); assertThat(DateFormatters.forPattern("strict_date_optional_time").withLocale(locale).locale(), is(locale)); - if (locale.equals(Locale.ROOT)) { - DateFormatter millisFormatter = DateFormatters.forPattern("epoch_millis"); - assertThat(millisFormatter.withLocale(locale), is(millisFormatter)); - DateFormatter secondFormatter = DateFormatters.forPattern("epoch_second"); - assertThat(secondFormatter.withLocale(locale), is(secondFormatter)); - } else { - IllegalArgumentException e = - expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_millis").withLocale(locale)); - assertThat(e.getMessage(), is("epoch_millis date formatter can only be in locale ROOT")); - e = expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_second").withLocale(locale)); - assertThat(e.getMessage(), is("epoch_second date formatter can only be in locale ROOT")); - } } public void testTimeZones() { @@ -151,18 +106,6 @@ public void testTimeZones() { assertThat(DateFormatters.forPattern("strict_date_optional_time").zone(), is(nullValue())); ZoneId zoneId = randomZone(); assertThat(DateFormatters.forPattern("strict_date_optional_time").withZone(zoneId).zone(), is(zoneId)); - if (zoneId.equals(ZoneOffset.UTC)) { - DateFormatter millisFormatter = DateFormatters.forPattern("epoch_millis"); - assertThat(millisFormatter.withZone(zoneId), is(millisFormatter)); - DateFormatter secondFormatter = DateFormatters.forPattern("epoch_second"); - assertThat(secondFormatter.withZone(zoneId), is(secondFormatter)); - } else { - IllegalArgumentException e = - expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_millis").withZone(zoneId)); - assertThat(e.getMessage(), is("epoch_millis date formatter can only be in zone offset UTC")); - e = expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_second").withZone(zoneId)); - assertThat(e.getMessage(), is("epoch_second date formatter can only be in zone offset UTC")); - } } public void testEqualsAndHashcode() { @@ -200,9 +143,18 @@ public void testForceJava8() { assertThat(DateFormatter.forPattern("8date_optional_time"), instanceOf(JavaDateFormatter.class)); // named formats too DateFormatter formatter = DateFormatter.forPattern("8date_optional_time||ww-MM-dd"); - assertThat(formatter, instanceOf(DateFormatters.MergedDateFormatter.class)); - DateFormatters.MergedDateFormatter mergedFormatter = (DateFormatters.MergedDateFormatter) formatter; - assertThat(mergedFormatter.formatters.get(0), instanceOf(JavaDateFormatter.class)); - assertThat(mergedFormatter.formatters.get(1), instanceOf(JavaDateFormatter.class)); + assertThat(formatter, instanceOf(JavaDateFormatter.class)); + } + + public void testParsingStrictNanoDates() { + DateFormatter formatter = DateFormatters.forPattern("strict_date_optional_time_nanos"); + formatter.format(formatter.parse("2016-01-01T00:00:00.000")); + formatter.format(formatter.parse("2018-05-15T17:14:56")); + formatter.format(formatter.parse("2018-05-15T17:14:56Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56+0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56+01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789+0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789+01:00")); } } diff --git a/server/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java b/server/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java index 729c431d2b2cf..2b937730e4750 100644 --- a/server/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.common.util; -import org.elasticsearch.common.util.CancellableThreads.IOInterruptable; -import org.elasticsearch.common.util.CancellableThreads.Interruptable; +import org.elasticsearch.common.util.CancellableThreads.IOInterruptible; +import org.elasticsearch.common.util.CancellableThreads.Interruptible; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -62,7 +62,7 @@ private TestPlan(int id) { } } - static class TestRunnable implements Interruptable { + static class TestRunnable implements Interruptible { final TestPlan plan; final CountDownLatch readyForCancel; @@ -95,7 +95,7 @@ public void run() throws InterruptedException { } } - static class TestIORunnable implements IOInterruptable { + static class TestIORunnable implements IOInterruptible { final TestPlan plan; final CountDownLatch readyForCancel; diff --git a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java index 7a9a6570d0d99..703c7346268d9 100644 --- a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java +++ b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java @@ -172,8 +172,8 @@ void assertNoMaster(final String node, @Nullable final ClusterBlock expectedBloc assertNull("node [" + node + "] still has [" + nodes.getMasterNode() + "] as master", nodes.getMasterNode()); if (expectedBlocks != null) { for (ClusterBlockLevel level : expectedBlocks.levels()) { - assertTrue("node [" + node + "] does have level [" + level + "] in it's blocks", state.getBlocks().hasGlobalBlock - (level)); + assertTrue("node [" + node + "] does have level [" + level + "] in it's blocks", + state.getBlocks().hasGlobalBlockWithLevel(level)); } } }, maxWaitTime.getMillis(), TimeUnit.MILLISECONDS); diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java index 3effaa8776110..6f47590089f28 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.discovery.zen.PublishClusterStateAction; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.NetworkDisruption.NetworkDisconnect; import org.elasticsearch.test.disruption.NetworkDisruption.TwoPartitions; @@ -133,11 +132,8 @@ public void testUnicastSinglePingResponseContainsMaster() throws Exception { * Test cluster join with issues in cluster state publishing * */ public void testClusterJoinDespiteOfPublishingIssues() throws Exception { - // TODO: enable this for Zen2 once lag-detection is implemented - String masterNode = internalCluster().startMasterOnlyNode( - Settings.builder().put(TestZenDiscovery.USE_ZEN2.getKey(), false).build()); - String nonMasterNode = internalCluster().startDataOnlyNode( - Settings.builder().put(TestZenDiscovery.USE_ZEN2.getKey(), false).build()); + String masterNode = internalCluster().startMasterOnlyNode(); + String nonMasterNode = internalCluster().startDataOnlyNode(); DiscoveryNodes discoveryNodes = internalCluster().getInstance(ClusterService.class, nonMasterNode).state().nodes(); diff --git a/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java index 48fb694747983..f6a95a3ed5b35 100644 --- a/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.snapshots.ConcurrentSnapshotExecutionException; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotState; @@ -165,16 +166,24 @@ public void clusterChanged(ClusterChangedEvent event) { try { future.get(); } catch (Exception ex) { - logger.info("--> got exception from hanged master", ex); Throwable cause = ex.getCause(); - assertThat(cause, instanceOf(MasterNotDiscoveredException.class)); - cause = cause.getCause(); - assertThat(cause, instanceOf(FailedToCommitClusterStateException.class)); + if (cause.getCause() instanceof ConcurrentSnapshotExecutionException) { + logger.info("--> got exception from race in master operation retries"); + } else { + logger.info("--> got exception from hanged master", ex); + assertThat(cause, instanceOf(MasterNotDiscoveredException.class)); + cause = cause.getCause(); + assertThat(cause, instanceOf(FailedToCommitClusterStateException.class)); + } } logger.info("--> verify that snapshot eventually will be created due to retries"); assertBusy(() -> { - assertSnapshotExists("test-repo", "test-snap-2"); + try { + assertSnapshotExists("test-repo", "test-snap-2"); + } catch (SnapshotMissingException ex) { + throw new AssertionError(ex); + } }, 1, TimeUnit.MINUTES); } diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index fedbf02a8e86f..f06ef3e72808a 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -401,7 +401,7 @@ public BoundTransportAddress boundAddress() { Collections.singletonList("127.0.0.1"), limitPortCounts, transportService, - TimeValue.timeValueSeconds(1)); + TimeValue.timeValueSeconds(30)); assertThat(transportAddresses, hasSize(limitPortCounts)); final Set ports = new HashSet<>(); for (final TransportAddress address : transportAddresses) { @@ -445,7 +445,7 @@ public BoundTransportAddress boundAddress() { Collections.singletonList(NetworkAddress.format(loopbackAddress)), 10, transportService, - TimeValue.timeValueSeconds(1)); + TimeValue.timeValueSeconds(30)); assertThat(transportAddresses, hasSize(7)); final Set ports = new HashSet<>(); for (final TransportAddress address : transportAddresses) { @@ -496,7 +496,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi Arrays.asList(hostname), 1, transportService, - TimeValue.timeValueSeconds(1) + TimeValue.timeValueSeconds(30) ); assertThat(transportAddresses, empty()); @@ -547,7 +547,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final TimeValue resolveTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 3)); + final TimeValue resolveTimeout = TimeValue.timeValueSeconds(randomIntBetween(3, 5)); try { final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, @@ -722,7 +722,7 @@ public BoundTransportAddress boundAddress() { Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301"), 1, transportService, - TimeValue.timeValueSeconds(1)); + TimeValue.timeValueSeconds(30)); assertThat(transportAddresses, hasSize(1)); // only one of the two is valid and will be used assertThat(transportAddresses.get(0).getAddress(), equalTo("127.0.0.1")); assertThat(transportAddresses.get(0).getPort(), equalTo(9301)); diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index 99155799fb199..c9a2f7dc58388 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ESAllocationTestCase; +import org.elasticsearch.cluster.coordination.JoinTaskExecutor; import org.elasticsearch.cluster.coordination.NodeRemovalClusterStateTaskExecutor; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -388,7 +389,7 @@ public void testValidateOnUnsupportedIndexVersionCreated() throws Exception { final DiscoveryNode localNode = new DiscoveryNode("other_node", buildNewFakeTransportAddress(), emptyMap(), EnumSet.allOf(DiscoveryNode.Role.class), Version.CURRENT); MembershipAction.ValidateJoinRequestRequestHandler request = new MembershipAction.ValidateJoinRequestRequestHandler - (() -> localNode, ZenDiscovery.addBuiltInJoinValidators(Collections.emptyList())); + (() -> localNode, JoinTaskExecutor.addBuiltInJoinValidators(Collections.emptyList())); final boolean incompatible = randomBoolean(); IndexMetaData indexMetaData = IndexMetaData.builder("test").settings(Settings.builder() .put(SETTING_VERSION_CREATED, diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 63635f5cbe7a4..55ab02c1dcfb9 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; -import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -457,12 +456,8 @@ public void testExistingTempFiles() throws IOException { } } NodeEnvironment env = newNodeEnvironment(paths, Settings.EMPTY); - try { - env.ensureAtomicMoveSupported(); - } catch (AtomicMoveNotSupportedException e) { - // that's OK :) - } env.close(); + // check we clean up for (String path: paths) { final Path nodePath = NodeEnvironment.resolveNodePath(PathUtils.get(path), 0); diff --git a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java index 229cb99fbfbda..6616f93ded24b 100644 --- a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java +++ b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java @@ -257,7 +257,8 @@ public void testExplainDateRangeInQueryString() { refresh(); - ExplainResponse explainResponse = client().prepareExplain("test", "type", "1").setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get(); + ExplainResponse explainResponse = client().prepareExplain("test", "type", "1") + .setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get(); assertThat(explainResponse.isExists(), equalTo(true)); assertThat(explainResponse.isMatch(), equalTo(true)); } diff --git a/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 4750540a178d0..3f337b9afee42 100644 --- a/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.gateway; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; @@ -374,7 +375,7 @@ public void testLatestVersionLoaded() throws Exception { logger.info("--> add some metadata and additional template"); client().admin().indices().preparePutTemplate("template_1") - .setTemplate("te*") + .setPatterns(Collections.singletonList("te*")) .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "text").field("store", true).endObject() diff --git a/server/src/test/java/org/elasticsearch/get/GetActionIT.java b/server/src/test/java/org/elasticsearch/get/GetActionIT.java index edd8660586a47..77303995f7494 100644 --- a/server/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -617,7 +617,10 @@ public void testGetFieldsComplexField() throws Exception { FlushResponse flushResponse = client().admin().indices().prepareFlush("my-index").setForce(true).get(); if (flushResponse.getSuccessfulShards() == 0) { StringBuilder sb = new StringBuilder("failed to flush at least one shard. total shards [") - .append(flushResponse.getTotalShards()).append("], failed shards: [").append(flushResponse.getFailedShards()).append("]"); + .append(flushResponse.getTotalShards()) + .append("], failed shards: [") + .append(flushResponse.getFailedShards()) + .append("]"); for (DefaultShardOperationFailedException failure: flushResponse.getShardFailures()) { sb.append("\nShard failure: ").append(failure); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index df411f81e3c84..5f6659afd7397 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -383,19 +383,20 @@ public void testDisableQueryCacheHasPrecedenceOverForceQueryCache() throws IOExc indexService.close("simon says", false); } - public void testMmapfsStoreTypeNotAllowed() { + public void testMmapNotAllowed() { + String storeType = randomFrom(IndexModule.Type.HYBRIDFS.getSettingsKey(), IndexModule.Type.MMAPFS.getSettingsKey()); final Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put("index.store.type", "mmapfs") + .put("index.store.type", storeType) .build(); final Settings nodeSettings = Settings.builder() - .put(IndexModule.NODE_STORE_ALLOW_MMAPFS.getKey(), false) + .put(IndexModule.NODE_STORE_ALLOW_MMAP.getKey(), false) .build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo", "_na_"), settings, nodeSettings); final IndexModule module = new IndexModule(indexSettings, emptyAnalysisRegistry, new InternalEngineFactory(), Collections.emptyMap()); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> newIndexService(module)); - assertThat(e, hasToString(containsString("store type [mmapfs] is not allowed"))); + assertThat(e, hasToString(containsString("store type [" + storeType + "] is not allowed"))); } class CustomQueryCache implements QueryCache { diff --git a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index 8d547c617e55b..fc24bdf9691de 100644 --- a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -43,11 +43,11 @@ import java.io.IOException; import java.util.Collections; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.startsWith; public class SearchSlowLogTests extends ESSingleNodeTestCase { diff --git a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java index 6546eaa124483..ff33ce19d484b 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java @@ -30,6 +30,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -52,7 +53,8 @@ public class CombinedDeletionPolicyTests extends ESTestCase { public void testKeepCommitsAfterGlobalCheckpoint() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(); final int extraRetainedOps = between(0, 100); - final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, extraRetainedOps); + final SoftDeletesPolicy softDeletesPolicy = + new SoftDeletesPolicy(globalCheckpoint::get, -1, extraRetainedOps, Collections::emptyList); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get); @@ -96,7 +98,8 @@ public void testKeepCommitsAfterGlobalCheckpoint() throws Exception { public void testAcquireIndexCommit() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(); final int extraRetainedOps = between(0, 100); - final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, extraRetainedOps); + final SoftDeletesPolicy softDeletesPolicy = + new SoftDeletesPolicy(globalCheckpoint::get, -1, extraRetainedOps, Collections::emptyList); final UUID translogUUID = UUID.randomUUID(); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get); @@ -176,7 +179,7 @@ public void testAcquireIndexCommit() throws Exception { public void testLegacyIndex() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(); - final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, 0); + final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, 0, Collections::emptyList); final UUID translogUUID = UUID.randomUUID(); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); @@ -211,7 +214,7 @@ public void testLegacyIndex() throws Exception { public void testDeleteInvalidCommits() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(randomNonNegativeLong()); - final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, 0); + final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, 0, Collections::emptyList); TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get); @@ -245,7 +248,7 @@ public void testDeleteInvalidCommits() throws Exception { public void testCheckUnreferencedCommits() throws Exception { final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); - final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, 0); + final SoftDeletesPolicy softDeletesPolicy = new SoftDeletesPolicy(globalCheckpoint::get, -1, 0, Collections::emptyList); final UUID translogUUID = UUID.randomUUID(); final TranslogDeletionPolicy translogPolicy = createTranslogDeletionPolicy(); CombinedDeletionPolicy indexPolicy = new CombinedDeletionPolicy(logger, translogPolicy, softDeletesPolicy, globalCheckpoint::get); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 31995f1f7f252..f88aaedd6991f 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -3011,7 +3011,8 @@ public void testRecoverFromForeignTranslog() throws IOException { new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, - new NoneCircuitBreakerService(), () -> UNASSIGNED_SEQ_NO, primaryTerm::get, tombstoneDocSupplier()); + new NoneCircuitBreakerService(), () -> UNASSIGNED_SEQ_NO, Collections::emptyList, primaryTerm::get, + tombstoneDocSupplier()); expectThrows(EngineCreationFailureException.class, () -> new InternalEngine(brokenConfig)); engine = createEngine(store, primaryTranslogDir); // and recover again! diff --git a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java index c46b47b87d06e..ef895e1a4ce8e 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -37,6 +37,7 @@ import org.apache.lucene.index.StandardDirectoryReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; @@ -158,4 +159,39 @@ public void testPruneSome() throws IOException { } } } + + public void testPruneNone() throws IOException { + try (Directory dir = newDirectory()) { + IndexWriterConfig iwc = newIndexWriterConfig(); + iwc.setMergePolicy(new RecoverySourcePruneMergePolicy("extra_source", + () -> new MatchAllDocsQuery(), iwc.getMergePolicy())); + try (IndexWriter writer = new IndexWriter(dir, iwc)) { + for (int i = 0; i < 20; i++) { + if (i > 0 && randomBoolean()) { + writer.flush(); + } + Document doc = new Document(); + doc.add(new StoredField("source", "hello world")); + doc.add(new StoredField("extra_source", "hello world")); + doc.add(new NumericDocValuesField("extra_source", 1)); + writer.addDocument(doc); + } + writer.forceMerge(1); + writer.commit(); + try (DirectoryReader reader = DirectoryReader.open(writer)) { + assertEquals(1, reader.leaves().size()); + NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); + assertNotNull(extra_source); + for (int i = 0; i < reader.maxDoc(); i++) { + Document document = reader.document(i); + Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); + assertTrue(collect.contains("source")); + assertTrue(collect.contains("extra_source")); + assertEquals(i, extra_source.nextDoc()); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, extra_source.nextDoc()); + } + } + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/SoftDeletesPolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/SoftDeletesPolicyTests.java index f359010038284..310e83e9d2cef 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/SoftDeletesPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/SoftDeletesPolicyTests.java @@ -19,25 +19,48 @@ package org.elasticsearch.index.engine; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.search.PointRangeQuery; +import org.apache.lucene.search.Query; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.index.seqno.RetentionLease; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; public class SoftDeletesPolicyTests extends ESTestCase { + /** * Makes sure we won't advance the retained seq# if the retention lock is held */ public void testSoftDeletesRetentionLock() { long retainedOps = between(0, 10000); AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + final AtomicLong[] retainingSequenceNumbers = new AtomicLong[randomIntBetween(0, 8)]; + for (int i = 0; i < retainingSequenceNumbers.length; i++) { + retainingSequenceNumbers[i] = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); + } + final Supplier> retentionLeasesSupplier = + () -> { + final Set leases = new HashSet<>(retainingSequenceNumbers.length); + for (int i = 0; i < retainingSequenceNumbers.length; i++) { + leases.add(new RetentionLease(Integer.toString(i), retainingSequenceNumbers[i].get(), 0L, "test")); + } + return leases; + }; long safeCommitCheckpoint = globalCheckpoint.get(); - SoftDeletesPolicy policy = new SoftDeletesPolicy(globalCheckpoint::get, between(1, 10000), retainedOps); + SoftDeletesPolicy policy = new SoftDeletesPolicy(globalCheckpoint::get, between(1, 10000), retainedOps, retentionLeasesSupplier); long minRetainedSeqNo = policy.getMinRetainedSeqNo(); List locks = new ArrayList<>(); int iters = scaledRandomIntBetween(10, 1000); @@ -47,6 +70,9 @@ public void testSoftDeletesRetentionLock() { } // Advances the global checkpoint and the local checkpoint of a safe commit globalCheckpoint.addAndGet(between(0, 1000)); + for (final AtomicLong retainingSequenceNumber : retainingSequenceNumbers) { + retainingSequenceNumber.set(randomLongBetween(retainingSequenceNumber.get(), globalCheckpoint.get())); + } safeCommitCheckpoint = randomLongBetween(safeCommitCheckpoint, globalCheckpoint.get()); policy.setLocalCheckpointOfSafeCommit(safeCommitCheckpoint); if (rarely()) { @@ -58,18 +84,36 @@ public void testSoftDeletesRetentionLock() { locks.removeAll(releasingLocks); releasingLocks.forEach(Releasable::close); - // We only expose the seqno to the merge policy if the retention lock is not held. - policy.getRetentionQuery(); + // getting the query has side effects, updating the internal state of the policy + final Query query = policy.getRetentionQuery(); + assertThat(query, instanceOf(PointRangeQuery.class)); + final PointRangeQuery retentionQuery = (PointRangeQuery) query; + + // we only expose the minimum sequence number to the merge policy if the retention lock is not held if (locks.isEmpty()) { - long retainedSeqNo = Math.min(safeCommitCheckpoint, globalCheckpoint.get() - retainedOps) + 1; + final long minimumRetainingSequenceNumber = Arrays.stream(retainingSequenceNumbers) + .mapToLong(AtomicLong::get) + .min() + .orElse(Long.MAX_VALUE); + long retainedSeqNo = + Math.min(safeCommitCheckpoint, Math.min(minimumRetainingSequenceNumber, globalCheckpoint.get() - retainedOps)) + 1; minRetainedSeqNo = Math.max(minRetainedSeqNo, retainedSeqNo); } + assertThat(retentionQuery.getNumDims(), equalTo(1)); + assertThat(LongPoint.decodeDimension(retentionQuery.getLowerPoint(), 0), equalTo(minRetainedSeqNo)); + assertThat(LongPoint.decodeDimension(retentionQuery.getUpperPoint(), 0), equalTo(Long.MAX_VALUE)); assertThat(policy.getMinRetainedSeqNo(), equalTo(minRetainedSeqNo)); } locks.forEach(Releasable::close); - long retainedSeqNo = Math.min(safeCommitCheckpoint, globalCheckpoint.get() - retainedOps) + 1; + final long minimumRetainingSequenceNumber = Arrays.stream(retainingSequenceNumbers) + .mapToLong(AtomicLong::get) + .min() + .orElse(Long.MAX_VALUE); + long retainedSeqNo = + Math.min(safeCommitCheckpoint, Math.min(minimumRetainingSequenceNumber, globalCheckpoint.get() - retainedOps)) + 1; minRetainedSeqNo = Math.max(minRetainedSeqNo, retainedSeqNo); assertThat(policy.getMinRetainedSeqNo(), equalTo(minRetainedSeqNo)); } + } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index 3b29d15bf3fb2..023a0a84f53a2 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -63,16 +63,16 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", BytesReference.bytes(doc), XContentType.JSON)); + ParsedDocument d = mapper.parse(new SourceToParse("test", "test", "1", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); BytesRef bytes1 = randomBytes(); doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1.bytes, bytes1.offset, bytes1.length).endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "2", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "test", "2", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); doc = XContentFactory.jsonBuilder().startObject().endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "3", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "test", "3", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); // test remove duplicate value @@ -88,7 +88,7 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - d = mapper.parse(SourceToParse.source("test", "test", "4", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "test", "4", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); IndexFieldData indexFieldData = getForField("field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index 5f4cd98600b46..3d8c348acab94 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -91,7 +91,7 @@ public void testStoredValue() throws IOException { assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { - ParsedDocument doc = mapperService.documentMapper().parse(SourceToParse.source("test", "type", "id", + ParsedDocument doc = mapperService.documentMapper().parse(new SourceToParse("test", "type", "id", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON)); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index fd98a8199a22f..f672a955cff18 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -71,7 +71,7 @@ public void testDefaults() throws IOException { DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", true) @@ -141,7 +141,7 @@ public void testParsesBooleansStrict() throws IOException { .field("field", randomFrom("off", "no", "0", "on", "yes", "1")) .endObject()); MapperParsingException ex = expectThrows(MapperParsingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON))); + () -> defaultMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON))); assertEquals("failed to parse field [field] of type [boolean]", ex.getMessage()); } @@ -164,7 +164,7 @@ public void testMultiFields() throws IOException { .startObject() .field("field", false) .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); assertNotNull(doc.rootDoc().getField("field.as_string")); } @@ -187,7 +187,7 @@ public void testDocValues() throws Exception { DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument parsedDoc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument parsedDoc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("bool1", true) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java index 09394b396679f..83aca847fa4b8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CamelCaseFieldNameTests.java @@ -35,7 +35,7 @@ public void testCamelCaseFieldNameStaysAsIs() throws Exception { client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper documentMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("thisIsCamelCase", "value1") .endObject()), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 513ba039c955e..7354af17043eb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -166,7 +166,7 @@ public void testParsingMinimal() throws Exception { .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("completion", "suggestion") @@ -187,7 +187,7 @@ public void testParsingFailure() throws Exception { .parse("type1", new CompressedXContent(mapping)); MapperParsingException e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse(SourceToParse.source("test", "type1", "1", + defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("completion", 1.0) @@ -220,7 +220,7 @@ public void testKeywordWithSubCompletionAndContext() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .array("keywordfield", "key1", "key2", "key3") @@ -275,7 +275,7 @@ public void testCompletionWithContextAndSubCompletion() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("suggest") @@ -331,7 +331,7 @@ public void testCompletionWithContextAndSubCompletionIndexByPath() throws Except DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .array("suggest", "timmy","starbucks") @@ -368,7 +368,7 @@ public void testKeywordWithSubCompletionAndStringInsert() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("geofield", "drm3btev3e86")//"41.12,-71.34" @@ -399,7 +399,7 @@ public void testCompletionTypeWithSubCompletionFieldAndStringInsert() throws Exc DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("suggest", "suggestion") @@ -431,7 +431,7 @@ public void testCompletionTypeWithSubCompletionFieldAndObjectInsert() throws Exc DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("completion") @@ -469,7 +469,7 @@ public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Except DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("completion") @@ -509,7 +509,7 @@ public void testCompletionTypeWithSubKeywordFieldAndStringInsert() throws Except DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("completion", "suggestion") @@ -537,7 +537,7 @@ public void testParsingMultiValued() throws Exception { .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .array("completion", "suggestion1", "suggestion2") @@ -561,7 +561,7 @@ public void testParsingWithWeight() throws Exception { .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("completion") @@ -587,7 +587,7 @@ public void testParsingMultiValueWithWeight() throws Exception { .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("completion") @@ -630,7 +630,7 @@ public void testParsingWithGeoFieldAlias() throws Exception { MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); Mapper fieldMapper = mapperService.documentMapper().mappers().getMapper("completion"); - ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = mapperService.documentMapper().parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("completion") @@ -655,7 +655,7 @@ public void testParsingFull() throws Exception { .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startArray("completion") @@ -693,7 +693,7 @@ public void testParsingMixed() throws Exception { .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startArray("completion") @@ -733,7 +733,7 @@ public void testNonContextEnabledParsingWithContexts() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", + defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field1") @@ -764,7 +764,7 @@ public void testFieldValueValidation() throws Exception { charsRefBuilder.append("sugg"); charsRefBuilder.setCharAt(2, '\u001F'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", + defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) @@ -779,7 +779,7 @@ public void testFieldValueValidation() throws Exception { charsRefBuilder.setCharAt(2, '\u0000'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", + defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) @@ -794,7 +794,7 @@ public void testFieldValueValidation() throws Exception { charsRefBuilder.setCharAt(2, '\u001E'); try { - defaultMapper.parse(SourceToParse.source("test", "type1", "1", + defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("completion", charsRefBuilder.get().toString()) @@ -808,7 +808,7 @@ public void testFieldValueValidation() throws Exception { } // empty inputs are ignored - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .array("completion", " ", "") @@ -821,7 +821,7 @@ public void testFieldValueValidation() throws Exception { assertThat(ignoredFields.stringValue(), equalTo("completion")); // null inputs are ignored - ParsedDocument nullDoc = defaultMapper.parse(SourceToParse.source("test", "type1", "1", + ParsedDocument nullDoc = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .nullField("completion") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java index 7936b97fad4f2..8a7a42e18f040 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java @@ -97,7 +97,7 @@ public void testCopyToFieldsParsing() throws Exception { .field("int_to_str_test", 42) .endObject()); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)); + ParsedDocument parsedDoc = docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)); ParseContext.Document doc = parsedDoc.rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(2)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -153,7 +153,7 @@ public void testCopyToFieldsInnerObjectParsing() throws Exception { .startObject("foo").startObject("bar").field("baz", "zoo").endObject().endObject() .endObject()); - ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -181,7 +181,7 @@ public void testCopyToDynamicInnerObjectParsing() throws Exception { .field("new_field", "bar") .endObject()); - ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -219,7 +219,7 @@ public void testCopyToDynamicInnerInnerObjectParsing() throws Exception { .field("new_field", "bar") .endObject()); - ParseContext.Document doc = docMapper.parse(SourceToParse.source("test", "type1", "1", json, + ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getFields("copy_test").length, equalTo(1)); assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); @@ -250,7 +250,7 @@ public void testCopyToStrictDynamicInnerObjectParsing() throws Exception { .endObject()); try { - docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); + docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc(); fail(); } catch (MapperParsingException ex) { assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [type1] is not allowed")); @@ -285,7 +285,7 @@ public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception { .endObject()); try { - docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); + docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc(); fail(); } catch (MapperParsingException ex) { assertThat(ex.getMessage(), @@ -393,7 +393,7 @@ public void testCopyToNestedField() throws Exception { .endArray() .endObject(); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(jsonDoc), XContentType.JSON)); assertEquals(6, doc.docs().size()); @@ -562,7 +562,7 @@ public void testCopyToDynamicNestedObjectParsing() throws Exception { .endObject()); try { - docMapper.parse(SourceToParse.source("test", "type1", "1", json, XContentType.JSON)).rootDoc(); + docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc(); fail(); } catch (MapperParsingException ex) { assertThat(ex.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 023fa5ca051aa..8b437d25a8495 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -66,7 +66,7 @@ public void testDefaults() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-11") @@ -95,7 +95,7 @@ public void testNotIndexed() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-11") @@ -117,7 +117,7 @@ public void testNoDocValues() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-11") @@ -139,7 +139,7 @@ public void testStore() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-11") @@ -166,7 +166,7 @@ public void testIgnoreMalformed() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ThrowingRunnable runnable = () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "2016-03-99") @@ -182,7 +182,7 @@ public void testIgnoreMalformed() throws Exception { DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper2.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", ":1") @@ -204,7 +204,7 @@ public void testChangeFormat() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 1457654400) @@ -230,7 +230,7 @@ public void testFloatEpochFormat() throws IOException { long epochMillis = randomNonNegativeLong(); String epochFloatValue = epochMillis + "." + randomIntBetween(0, 999); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", epochFloatValue) @@ -252,7 +252,7 @@ public void testChangeLocale() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 1457654400) @@ -273,7 +273,7 @@ public void testNullValue() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -294,7 +294,7 @@ public void testNullValue() throws IOException { mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -363,7 +363,7 @@ public void testTimeZoneParsing() throws Exception { final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone(); final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index 8cd39d72ad3a9..ff35e93204ac1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -177,7 +177,7 @@ public void run() { barrier.await(); for (int i = 0; i < 200 && stopped.get() == false; i++) { final String fieldName = Integer.toString(i); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", + ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "test", fieldName, new BytesArray("{ \"" + fieldName + "\" : \"test\" }"), @@ -200,7 +200,7 @@ public void run() { while(stopped.get() == false) { final String fieldName = lastIntroducedFieldName.get(); final BytesReference source = new BytesArray("{ \"" + fieldName + "\" : \"test\" }"); - ParsedDocument parsedDoc = documentMapper.parse(SourceToParse.source("test", + ParsedDocument parsedDoc = documentMapper.parse(new SourceToParse("test", "test", "random", source, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 2ec49e5b20431..ffdb93474c0e9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -73,7 +73,7 @@ public void testFieldDisabled() throws Exception { .field("foo", "1234") .field("bar", 10) .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.rootDoc().getField("foo")); assertNotNull(doc.rootDoc().getField("bar")); assertNotNull(doc.rootDoc().getField(IdFieldMapper.NAME)); @@ -98,7 +98,7 @@ public void testDotsWithExistingMapper() throws Exception { .field("baz", 789) .endObject() .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); // no update! String[] values = doc.rootDoc().getValues("foo.bar.baz"); assertEquals(3, values.length); @@ -120,7 +120,7 @@ public void testDotsWithExistingNestedMapper() throws Exception { .field("foo.bar", 123) .endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals( "Cannot add a value for field [foo.bar] since one of the intermediate objects is mapped as a nested object: [foo]", e.getMessage()); @@ -138,21 +138,21 @@ public void testUnexpectedFieldMappingType() throws Exception { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo", true) .endObject()); MapperException exception = expectThrows(MapperException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertThat(exception.getMessage(), containsString("failed to parse field [foo] of type [long]")); } { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("bar", "bar") .endObject()); MapperException exception = expectThrows(MapperException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "2", bytes, XContentType.JSON))); assertThat(exception.getMessage(), containsString("failed to parse field [bar] of type [boolean]")); } { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("geo", 123) .endObject()); MapperException exception = expectThrows(MapperException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "2", bytes, XContentType.JSON))); assertThat(exception.getMessage(), containsString("failed to parse field [geo] of type [geo_shape]")); } @@ -178,7 +178,7 @@ public void testDotsWithDynamicNestedMapper() throws Exception { .field("foo.bar",42) .endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals( "It is forbidden to create dynamic nested objects ([foo]) through `copy_to` or dots in field names", e.getMessage()); @@ -225,7 +225,7 @@ public void testNestedHaveIdAndTypeFields() throws Exception { doc.endObject(); // Verify in the case where only a single type is allowed that the _id field is added to nested documents: - ParsedDocument result = mapper.parse(SourceToParse.source("index2", "type", "1", + ParsedDocument result = mapper.parse(new SourceToParse("index2", "type", "1", BytesReference.bytes(doc), XContentType.JSON)); assertEquals(2, result.docs().size()); // Nested document: @@ -258,7 +258,7 @@ public void testPropagateDynamicWithExistingMapper() throws Exception { .startObject().startObject("foo") .field("bar", "something") .endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.rootDoc().getField("foo.bar")); } @@ -278,7 +278,7 @@ public void testPropagateDynamicWithDynamicMapper() throws Exception { .startObject().startObject("foo").startObject("bar") .field("baz", "something") .endObject().endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.rootDoc().getField("foo.bar.baz")); } @@ -297,7 +297,7 @@ public void testDynamicRootFallback() throws Exception { .startObject().startObject("foo") .field("bar", "something") .endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); assertNull(doc.rootDoc().getField("foo.bar")); } @@ -428,7 +428,7 @@ public void testDynamicGeoPointArrayWithTemplate() throws Exception { .startArray().value(0).value(0).endArray() .startArray().value(1).value(1).endArray() .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo").length); } @@ -446,7 +446,7 @@ public void testDynamicLongArrayWithTemplate() throws Exception { .value(0) .value(1) .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } @@ -461,7 +461,7 @@ public void testDynamicLongArray() throws Exception { .value(0) .value(1) .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } @@ -477,7 +477,7 @@ public void testDynamicFalseLongArray() throws Exception { .value(0) .value(1) .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo").length); } @@ -494,7 +494,7 @@ public void testDynamicStrictLongArray() throws Exception { .value(1) .endArray().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -511,7 +511,7 @@ public void testMappedGeoPointArray() throws Exception { .startArray().value(0).value(0).endArray() .startArray().value(1).value(1).endArray() .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo").length); } @@ -527,7 +527,7 @@ public void testMappedLongArray() throws Exception { .value(0) .value(1) .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo").length); } @@ -545,7 +545,7 @@ public void testDynamicObjectWithTemplate() throws Exception { .startObject().startObject("foo") .field("bar", "baz") .endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar").length); } @@ -560,7 +560,7 @@ public void testDynamicFalseObject() throws Exception { .startObject().startObject("foo") .field("bar", "baz") .endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar").length); } @@ -576,7 +576,7 @@ public void testDynamicStrictObject() throws Exception { .field("bar", "baz") .endObject().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -591,7 +591,7 @@ public void testDynamicFalseValue() throws Exception { .startObject() .field("bar", "baz") .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("bar").length); } @@ -607,7 +607,7 @@ public void testDynamicStrictValue() throws Exception { .field("bar", "baz") .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [bar] within [type] is not allowed", exception.getMessage()); } @@ -622,7 +622,7 @@ public void testDynamicFalseNull() throws Exception { .startObject() .field("bar", (String) null) .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("bar").length); } @@ -638,7 +638,7 @@ public void testDynamicStrictNull() throws Exception { .field("bar", (String) null) .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [bar] within [type] is not allowed", exception.getMessage()); } @@ -652,7 +652,7 @@ public void testMappedNullValue() throws Exception { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo", (Long) null) .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo").length); } @@ -667,7 +667,7 @@ public void testDynamicDottedFieldNameLongArray() throws Exception { .value(0) .value(1) .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -694,7 +694,7 @@ public void testDynamicDottedFieldNameLongArrayWithParentTemplate() throws Excep .value(0) .value(1) .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -720,7 +720,7 @@ public void testDynamicDottedFieldNameLongArrayWithExistingParent() throws Excep .value(0) .value(1) .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -747,7 +747,7 @@ public void testDynamicDottedFieldNameLongArrayWithExistingParentWrongType() thr .value(1) .endArray().endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " + "Existing mapping for [foo] must be of type object but found [long].", exception.getMessage()); } @@ -764,7 +764,7 @@ public void testDynamicFalseDottedFieldNameLongArray() throws Exception { .value(0) .value(1) .endArray().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz").length); } @@ -781,7 +781,7 @@ public void testDynamicStrictDottedFieldNameLongArray() throws Exception { .value(1) .endArray().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -794,7 +794,7 @@ public void testDynamicDottedFieldNameLong() throws Exception { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -819,7 +819,7 @@ public void testDynamicDottedFieldNameLongWithParentTemplate() throws Exception BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -843,7 +843,7 @@ public void testDynamicDottedFieldNameLongWithExistingParent() throws Exception BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -868,7 +868,7 @@ public void testDynamicDottedFieldNameLongWithExistingParentWrongType() throws E .startObject().field("foo.bar.baz", 0) .endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " + "Existing mapping for [foo] must be of type object but found [long].", exception.getMessage()); } @@ -883,7 +883,7 @@ public void testDynamicFalseDottedFieldNameLong() throws Exception { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz").length); } @@ -898,7 +898,7 @@ public void testDynamicStrictDottedFieldNameLong() throws Exception { .startObject().field("foo.bar.baz", 0) .endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -912,7 +912,7 @@ public void testDynamicDottedFieldNameObject() throws Exception { .startObject().startObject("foo.bar.baz") .field("a", 0) .endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -941,7 +941,7 @@ public void testDynamicDottedFieldNameObjectWithParentTemplate() throws Exceptio .startObject().startObject("foo.bar.baz") .field("a", 0) .endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -965,7 +965,7 @@ public void testDynamicDottedFieldNameObjectWithExistingParent() throws Exceptio BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz") .field("a", 0).endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); assertNotNull(fooMapper); @@ -992,7 +992,7 @@ public void testDynamicDottedFieldNameObjectWithExistingParentWrongType() throws BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz") .field("a", 0).endObject().endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " + "Existing mapping for [foo] must be of type object but found [long].", exception.getMessage()); @@ -1009,7 +1009,7 @@ public void testDynamicFalseDottedFieldNameObject() throws Exception { .startObject().startObject("foo.bar.baz") .field("a", 0) .endObject().endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); assertEquals(0, doc.rootDoc().getFields("foo.bar.baz.a").length); } @@ -1025,7 +1025,7 @@ public void testDynamicStrictDottedFieldNameObject() throws Exception { .field("a", 0) .endObject().endObject()); StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("mapping set to strict, dynamic introduction of [foo] within [type] is not allowed", exception.getMessage()); } @@ -1036,12 +1036,12 @@ public void testDocumentContainsMetadataField() throws Exception { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_ttl", 0).endObject()); MapperParsingException e = expectThrows(MapperParsingException.class, () -> - mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertTrue(e.getMessage(), e.getMessage().contains("cannot be added inside a document")); BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("foo._ttl", 0).endObject()); - mapper.parse(SourceToParse.source("test", "type", "1", bytes2, XContentType.JSON)); // parses without error + mapper.parse(new SourceToParse("test", "type", "1", bytes2, XContentType.JSON)); // parses without error } public void testSimpleMapper() throws Exception { @@ -1053,10 +1053,10 @@ public void testSimpleMapper() throws Exception { indexService.mapperService()).build(indexService.mapperService()); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); - doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = docMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); } public void testParseToJsonAndParse() throws Exception { @@ -1067,7 +1067,7 @@ public void testParseToJsonAndParse() throws Exception { // reparse it DocumentMapper builtDocMapper = parser.parse("person", new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); - Document doc = builtDocMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = builtDocMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } @@ -1080,7 +1080,7 @@ public void testSimpleParser() throws Exception { assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } @@ -1090,7 +1090,7 @@ public void testSimpleParserNoTypeNoId() throws Exception { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser() .parse("person", new CompressedXContent(mapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("shay")); } @@ -1117,7 +1117,7 @@ public void testNoDocumentSent() throws Exception { BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8)); try { - docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + docMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); fail("this point is never reached"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("failed to parse, document is empty")); @@ -1130,7 +1130,7 @@ public void testNoLevel() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("test1", "value1") @@ -1150,7 +1150,7 @@ public void testTypeLevel() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject().startObject("type") .field("test1", "value1") @@ -1170,7 +1170,7 @@ public void testNoLevelWithFieldTypeAsValue() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("type", "value_type") @@ -1192,7 +1192,7 @@ public void testTypeLevelWithFieldTypeAsValue() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject().startObject("type") .field("type", "value_type") @@ -1214,7 +1214,7 @@ public void testNoLevelWithFieldTypeAsObject() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("type").field("type_field", "type_value").endObject() @@ -1236,7 +1236,7 @@ public void testTypeLevelWithFieldTypeAsObject() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject().startObject("type") .startObject("type").field("type_field", "type_value").endObject() @@ -1258,7 +1258,7 @@ public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject().startObject("type") .field("test1", "value1") @@ -1280,7 +1280,7 @@ public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject().startObject("type") .field("test1", "value1") @@ -1302,7 +1302,7 @@ public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("test1", "value1") @@ -1325,7 +1325,7 @@ public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(defaultMapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject().startObject("type") .field("test1", "value1") @@ -1356,7 +1356,7 @@ public void testDynamicDateDetectionDisabledOnNumbers() throws IOException { // Even though we matched the dynamic format, we do not match on numbers, // which are too likely to be false positives - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); Mapping update = doc.dynamicMappingsUpdate(); assertNotNull(update); Mapper dateMapper = update.root().getMapper("foo"); @@ -1378,7 +1378,7 @@ public void testDynamicDateDetectionEnabledWithNoSpecialCharacters() throws IOEx .endObject()); // We should have generated a date field - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON)); Mapping update = doc.dynamicMappingsUpdate(); assertNotNull(update); Mapper dateMapper = update.root().getMapper("foo"); @@ -1481,7 +1481,7 @@ public void testWriteToFieldAlias() throws Exception { .field("alias-field", "value") .endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Cannot write to a field alias [alias-field].", exception.getCause().getMessage()); } @@ -1514,7 +1514,7 @@ public void testCopyToFieldAlias() throws Exception { .field("text-field", "value") .endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Cannot copy to a field alias [alias-field].", exception.getCause().getMessage()); } @@ -1545,7 +1545,7 @@ public void testDynamicDottedFieldNameWithFieldAlias() throws Exception { .endObject() .endObject()); MapperParsingException exception = expectThrows(MapperParsingException.class, - () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + () -> mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [alias-field.dynamic-field]. " + "Existing mapping for [alias-field] must be of type object but found [alias].", exception.getMessage()); @@ -1564,7 +1564,7 @@ public void testTypeless() throws IOException { .field("foo", "1234") .endObject()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "_doc", "1", bytes, XContentType.JSON)); + ParsedDocument doc = mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON)); assertNull(doc.dynamicMappingsUpdate()); // no update since we reused the existing type } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 23e205b8f58d7..2eea666e8fe27 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -50,7 +50,7 @@ public void testDoubleIndexingSameDoc() throws Exception { QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L, null); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field1", "value1") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 8fc03a49c85e2..56e6f5e4c6b04 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -70,7 +70,7 @@ public void testDynamicTrue() throws IOException { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(jsonBuilder() .startObject() .field("field1", "value1") @@ -93,7 +93,7 @@ public void testDynamicFalse() throws IOException { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(jsonBuilder() .startObject() .field("field1", "value1") @@ -118,7 +118,7 @@ public void testDynamicStrict() throws IOException { .parse("type", new CompressedXContent(mapping)); StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", + () -> defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(jsonBuilder() .startObject() .field("field1", "value1") @@ -128,7 +128,7 @@ public void testDynamicStrict() throws IOException { assertThat(e.getMessage(), equalTo("mapping set to strict, dynamic introduction of [field2] within [type] is not allowed")); e = expectThrows(StrictDynamicMappingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", + () -> defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field1", "value1") @@ -151,7 +151,7 @@ public void testDynamicFalseWithInnerObjectButDynamicSetOnRoot() throws IOExcept DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(jsonBuilder() .startObject().startObject("obj1") .field("field1", "value1") @@ -178,7 +178,7 @@ public void testDynamicStrictWithInnerObjectButDynamicSetOnRoot() throws IOExcep .parse("type", new CompressedXContent(mapping)); StrictDynamicMappingException e = expectThrows(StrictDynamicMappingException.class, () -> - defaultMapper.parse(SourceToParse.source("test", "type", "1", + defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(jsonBuilder() .startObject().startObject("obj1") .field("field1", "value1") @@ -207,7 +207,7 @@ private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XConten .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(1).numberOfReplicas(0).build(); IndexSettings settings = new IndexSettings(build, Settings.EMPTY); - SourceToParse source = SourceToParse.source("test", mapper.type(), "some_id", + SourceToParse source = new SourceToParse("test", mapper.type(), "some_id", BytesReference.bytes(builder), builder.contentType()); try (XContentParser xContentParser = createParser(JsonXContent.jsonXContent, source.source())) { ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext(settings, parser, mapper, source, xContentParser); @@ -561,7 +561,7 @@ public void testMixTemplateMultiFieldAndMappingReuse() throws Exception { XContentBuilder json = XContentFactory.jsonBuilder().startObject() .field("field", "foo") .endObject(); - SourceToParse source = SourceToParse.source("test", "_doc", "1", BytesReference.bytes(json), json.contentType()); + SourceToParse source = new SourceToParse("test", "_doc", "1", BytesReference.bytes(json), json.contentType()); DocumentMapper mapper = indexService.mapperService().documentMapper("_doc"); assertNull(mapper.mappers().getMapper("field.raw")); ParsedDocument parsed = mapper.parse(source); @@ -596,7 +596,7 @@ private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentB .field("baz", (double) 3.2f) // double that can be accurately represented as a float .field("quux", "3.2") // float detected through numeric detection .endObject()); - ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id", + ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "type", "id", source, builder.contentType())); Mapping update = parsedDocument.dynamicMappingsUpdate(); assertNotNull(update); @@ -615,7 +615,7 @@ public void testNumericDetectionEnabled() throws Exception { client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("s_long", "100") @@ -642,7 +642,7 @@ public void testNumericDetectionDefault() throws Exception { client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("s_long", "100") @@ -691,7 +691,7 @@ public void testDateDetectionInheritsFormat() throws Exception { client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("date1", "2016-11-20") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java index fba85b1909cec..6b451b64db0d1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingVersionTests.java @@ -45,7 +45,7 @@ public void testDynamicMappingDefault() throws IOException { .documentMapperWithAutoCreate("my-type").getDocumentMapper(); ParsedDocument parsedDoc = documentMapper.parse( - SourceToParse.source("my-index", "my-type", "1", BytesReference + new SourceToParse("my-index", "my-type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("foo", 3) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 62c764e8060af..ff44dec81d962 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -50,7 +50,7 @@ public void testMatchTypeOnly() throws Exception { DocumentMapper docMapper = mapperService.documentMapper("person"); builder = JsonXContent.contentBuilder(); builder.startObject().field("s", "hello").field("l", 1).endObject(); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", BytesReference.bytes(builder), + ParsedDocument parsedDoc = docMapper.parse(new SourceToParse("test", "person", "1", BytesReference.bytes(builder), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); @@ -70,7 +70,7 @@ public void testSimple() throws Exception { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + ParsedDocument parsedDoc = docMapper.parse(new SourceToParse("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); @@ -129,7 +129,7 @@ public void testSimpleWithXContentTraverse() throws Exception { client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping, XContentType.JSON).get(); DocumentMapper docMapper = index.mapperService().documentMapper("person"); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json"); - ParsedDocument parsedDoc = docMapper.parse(SourceToParse.source("test", "person", "1", new BytesArray(json), + ParsedDocument parsedDoc = docMapper.parse(new SourceToParse("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index 1d65fb27c55b5..e2f9f798a1441 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -87,7 +87,7 @@ public void testExternalValues() throws Exception { .endObject().endObject()) )); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -146,7 +146,7 @@ public void testExternalValuesWithMultifield() throws Exception { .endObject() .endObject().endObject().endObject()))); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -213,7 +213,7 @@ public void testExternalValuesWithMultifieldTwoLevels() throws Exception { .endObject() .endObject().endObject().endObject()))); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 6d7601357a336..d0d4a6b1d15b7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -82,7 +82,7 @@ public void testInjectIntoDocDuringParsing() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("a", "100") @@ -106,7 +106,7 @@ public void testExplicitEnabled() throws Exception { FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertTrue(fieldNamesMapper.fieldType().isEnabled()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -125,7 +125,7 @@ public void testDisabled() throws Exception { FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().isEnabled()); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java index 6999e39b70a7d..e67469bcd85ff 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GenericStoreDynamicTemplateTests.java @@ -40,7 +40,7 @@ public void testSimple() throws Exception { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper().parse( - SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); + new SourceToParse("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); Document doc = parsedDoc.rootDoc(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index a2c17e68b78a5..f5597ecb1f443 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -65,7 +65,7 @@ public void testGeoHashValue() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("point", stringEncode(1.3, 1.2)) @@ -82,7 +82,7 @@ public void testLatLonValuesStored() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() @@ -99,7 +99,7 @@ public void testArrayLatLonValues() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point") @@ -121,7 +121,7 @@ public void testLatLonInOneValue() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3") @@ -139,7 +139,7 @@ public void testLatLonStringWithZValue() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3,10.0") @@ -157,7 +157,7 @@ public void testLatLonStringWithZValueException() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - SourceToParse source = SourceToParse.source("test", "type", "1", + SourceToParse source = new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3,10.0") @@ -175,7 +175,7 @@ public void testLatLonInOneValueStored() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3") @@ -191,7 +191,7 @@ public void testLatLonInOneValueArray() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point") @@ -213,7 +213,7 @@ public void testLonLatArray() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() @@ -231,7 +231,7 @@ public void testLonLatArrayDynamic() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() @@ -248,7 +248,7 @@ public void testLonLatArrayStored() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() @@ -268,7 +268,7 @@ public void testLonLatArrayArrayStored() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point") @@ -381,7 +381,7 @@ public void testNullValue() throws Exception { Object nullValue = ((GeoPointFieldMapper) fieldMapper).fieldType().nullValue(); assertThat(nullValue, equalTo(new GeoPoint(1, 2))); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .nullField("location") @@ -391,7 +391,7 @@ public void testNullValue() throws Exception { assertThat(doc.rootDoc().getField("location"), notNullValue()); BytesRef defaultValue = doc.rootDoc().getField("location").binaryValue(); - doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("location", "1, 2") @@ -400,7 +400,7 @@ public void testNullValue() throws Exception { // Shouldn't matter if we specify the value explicitly or use null value assertThat(defaultValue, equalTo(doc.rootDoc().getField("location").binaryValue())); - doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("location", "3, 4") @@ -422,7 +422,7 @@ public void testInvalidGeohashIgnored() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("location", "1234.333") @@ -445,7 +445,7 @@ public void testInvalidGeohashNotIgnored() throws Exception { .parse("type", new CompressedXContent(mapping)); MapperParsingException ex = expectThrows(MapperParsingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", + () -> defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("location", "1234.333") @@ -469,57 +469,57 @@ public void testInvalidGeopointValuesIgnored() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("location", "1234.333").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("lat", "-").field("lon", 1.3).endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("lat", 1.3).field("lon", "-").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("location", "-,1.3").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("location", "1.3,-").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("lat", "NaN").field("lon", "NaN").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("lat", 12).field("lon", "NaN").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("lat", "NaN").field("lon", 10).endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("location", "NaN,NaN").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("location", "10,NaN").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); - assertThat(defaultMapper.parse(SourceToParse.source("test", "type", "1", + assertThat(defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("location", "NaN,12").endObject() ), XContentType.JSON)).rootDoc().getField("location"), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java index aab5e98ed0a12..f015dee1f8931 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldMapperTests.java @@ -50,7 +50,7 @@ public void testIncludeInObjectNotAllowed() throws Exception { .parse("type", new CompressedXContent(mapping)); try { - docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("_id", "1").endObject()), XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { @@ -62,7 +62,7 @@ public void testDefaults() throws IOException { Settings indexSettings = Settings.EMPTY; MapperService mapperService = createIndex("test", indexSettings).mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(SourceToParse.source("index", "type", "id", + ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); IndexableField[] fields = document.rootDoc().getFields(IdFieldMapper.NAME); assertEquals(1, fields.length); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java index e60b097aaca36..7fd867f9b3425 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java @@ -47,7 +47,7 @@ public void testDefaultDisabledIndexMapper() throws Exception { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index c5eded8f5ab11..6396b3988bc7d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -68,7 +68,7 @@ public void testDefaults() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "::1") @@ -97,7 +97,7 @@ public void testNotIndexed() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "::1") @@ -119,7 +119,7 @@ public void testNoDocValues() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "::1") @@ -142,7 +142,7 @@ public void testStore() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "::1") @@ -170,7 +170,7 @@ public void testIgnoreMalformed() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ThrowingRunnable runnable = () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", ":1") @@ -185,7 +185,7 @@ public void testIgnoreMalformed() throws Exception { DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper2.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", ":1") @@ -210,7 +210,7 @@ public void testNullValue() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -231,7 +231,7 @@ public void testNullValue() throws IOException { mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java index 2c70b25d6a446..9b126da4c2bc9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpRangeFieldMapperTests.java @@ -60,7 +60,7 @@ public void testStoreCidr() throws Exception { cases.put("192.168.0.0/17", "192.168.127.255"); for (final Map.Entry entry : cases.entrySet()) { ParsedDocument doc = - mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", entry.getKey()) .endObject()), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java index 8060c0a3f92c8..fba695ba86cac 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/JavaMultiFieldMergeTests.java @@ -45,7 +45,7 @@ public void testMergeMultiField() throws Exception { BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); Document doc = mapperService.documentMapper().parse( - SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -61,7 +61,7 @@ public void testMergeMultiField() throws Exception { assertThat(mapperService.fullName("name.not_indexed2"), nullValue()); assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); - doc = mapperService.documentMapper().parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + doc = mapperService.documentMapper().parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -99,7 +99,7 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); Document doc = mapperService.documentMapper().parse( - SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -117,7 +117,7 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { assertThat(mapperService.fullName("name.not_indexed3"), nullValue()); doc = mapperService.documentMapper().parse( - SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 8e5c81e58f189..d7f8d48fc5cf3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -110,7 +110,7 @@ public void testDefaults() throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -147,7 +147,7 @@ public void testIgnoreAbove() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "elk") @@ -157,7 +157,7 @@ public void testIgnoreAbove() throws IOException { IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "elasticsearch") @@ -176,7 +176,7 @@ public void testNullValue() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -192,7 +192,7 @@ public void testNullValue() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .endObject()), @@ -201,7 +201,7 @@ public void testNullValue() throws IOException { IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -222,7 +222,7 @@ public void testEnableStore() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -243,7 +243,7 @@ public void testDisableIndex() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -265,7 +265,7 @@ public void testDisableDocValues() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -287,7 +287,7 @@ public void testIndexOptions() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -333,7 +333,7 @@ public void testEnableNorms() throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -358,7 +358,7 @@ public void testNormalizer() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "AbC") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index b5eeef0fa2847..2c1a75b40d4c0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -39,8 +39,6 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.containsString; @@ -179,25 +177,7 @@ public void testUnmappedFieldType() { assertWarnings("[unmapped_type:string] should be replaced with [unmapped_type:keyword]"); } - public void testMergeWithMap() throws Throwable { - IndexService indexService1 = createIndex("index1"); - MapperService mapperService = indexService1.mapperService(); - Map> mappings = new HashMap<>(); - - mappings.put(MapperService.DEFAULT_MAPPING, MapperService.parseMapping(xContentRegistry(), "{}")); - MapperException e = expectThrows(MapperParsingException.class, - () -> mapperService.merge(mappings, MergeReason.MAPPING_UPDATE)); - assertThat(e.getMessage(), startsWith("Failed to parse mapping [" + MapperService.DEFAULT_MAPPING + "]: ")); - - mappings.clear(); - mappings.put("type1", MapperService.parseMapping(xContentRegistry(), "{}")); - - e = expectThrows( MapperParsingException.class, - () -> mapperService.merge(mappings, MergeReason.MAPPING_UPDATE)); - assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: ")); - } - - public void testPartitionedConstraints() { + public void testPartitionedConstraints() { // partitioned index must have routing IllegalArgumentException noRoutingException = expectThrows(IllegalArgumentException.class, () -> { client().admin().indices().prepareCreate("test-index") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index 8e350cfe1c77d..6bd0b608f3f0e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -64,7 +64,7 @@ private void testMultiField(String mapping) throws Exception { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); Document doc = mapperService.documentMapper().parse( - SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); @@ -140,7 +140,7 @@ public void testBuildThenParse() throws Exception { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse(SourceToParse.source("test", "person", "1", json, XContentType.JSON)).rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "person", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 1d339aa9bbb00..d3f41589fb1fd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -63,7 +63,7 @@ public void testEmptyNested() throws Exception { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -73,7 +73,7 @@ public void testEmptyNested() throws Exception { assertThat(doc.docs().size(), equalTo(1)); - doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -96,7 +96,7 @@ public void testSingleNested() throws Exception { ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); assertThat(nested1Mapper.nested().isNested(), equalTo(true)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -112,7 +112,7 @@ public void testSingleNested() throws Exception { assertThat(doc.docs().get(1).get("field"), equalTo("value")); - doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -154,7 +154,7 @@ public void testMultiNested() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -215,7 +215,7 @@ public void testMultiObjectAndNested1() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -278,7 +278,7 @@ public void testMultiObjectAndNested2() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(true)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(false)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -339,7 +339,7 @@ public void testMultiRootAndNested1() throws Exception { assertThat(nested2Mapper.nested().isIncludeInParent(), equalTo(false)); assertThat(nested2Mapper.nested().isIncludeInRoot(), equalTo(true)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -398,7 +398,7 @@ public void testMultipleLevelsIncludeRoot1() throws Exception { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("nested1") .startObject().startArray("nested2").startObject().field("foo", "bar") @@ -432,7 +432,7 @@ public void testMultipleLevelsIncludeRoot2() throws Exception { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().startArray("nested1") .startObject().startArray("nested2") @@ -460,7 +460,7 @@ public void testNestedArrayStrict() throws Exception { assertThat(nested1Mapper.nested().isNested(), equalTo(true)); assertThat(nested1Mapper.dynamic(), equalTo(Dynamic.STRICT)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") @@ -568,7 +568,7 @@ public void testLimitNestedDocsDefaultSettings() throws Exception{ docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = SourceToParse.source("test1", "type", "1", + SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source1)); assertEquals( @@ -601,7 +601,7 @@ public void testLimitNestedDocs() throws Exception{ docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = SourceToParse.source("test1", "type", "1", + SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); ParsedDocument doc = docMapper.parse(source1); assertThat(doc.docs().size(), equalTo(3)); @@ -619,7 +619,7 @@ public void testLimitNestedDocs() throws Exception{ docBuilder2.endArray(); } docBuilder2.endObject(); - SourceToParse source2 = SourceToParse.source("test1", "type", "2", + SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2)); assertEquals( @@ -657,7 +657,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception{ docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = SourceToParse.source("test1", "type", "1", + SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); ParsedDocument doc = docMapper.parse(source1); assertThat(doc.docs().size(), equalTo(3)); @@ -680,7 +680,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception{ } docBuilder2.endObject(); - SourceToParse source2 = SourceToParse.source("test1", "type", "2", + SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2)); assertEquals( @@ -714,7 +714,7 @@ public void testReorderParentBWC() throws IOException { ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); assertThat(nested1Mapper.nested().isNested(), equalTo(true)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java index 550802f6c9dfd..85922fccff80a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NullValueObjectMappingTests.java @@ -39,7 +39,7 @@ public void testNullValueObject() throws IOException { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("obj1").endObject() @@ -49,7 +49,7 @@ public void testNullValueObject() throws IOException { assertThat(doc.rootDoc().get("value1"), equalTo("test1")); - doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .nullField("obj1") @@ -59,7 +59,7 @@ public void testNullValueObject() throws IOException { assertThat(doc.rootDoc().get("value1"), equalTo("test1")); - doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", + doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("obj1").field("field", "value").endObject() diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index 8b8e174dba83c..ba7f5d846840a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -56,7 +56,7 @@ public void doTestDefaults(String type) throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 123) @@ -84,7 +84,7 @@ public void doTestNotIndexed(String type) throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 123) @@ -107,7 +107,7 @@ public void doTestNoDocValues(String type) throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 123) @@ -131,7 +131,7 @@ public void doTestStore(String type) throws Exception { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", 123) @@ -160,7 +160,7 @@ public void doTestCoerce(String type) throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "123") @@ -183,7 +183,7 @@ public void doTestCoerce(String type) throws IOException { assertEquals(mapping, mapper2.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + ThrowingRunnable runnable = () -> mapper2.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "123") @@ -203,7 +203,7 @@ protected void doTestDecimalCoerce(String type) throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "7.89") @@ -230,7 +230,7 @@ private void doTestIgnoreMalformed(String type) throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ThrowingRunnable runnable = () -> mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "a") @@ -246,7 +246,7 @@ private void doTestIgnoreMalformed(String type) throws IOException { DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper2.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "a") @@ -308,7 +308,7 @@ protected void doTestNullValue(String type) throws IOException { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -335,7 +335,7 @@ protected void doTestNullValue(String type) throws IOException { mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -422,7 +422,7 @@ public void testOutOfRangeValues() throws IOException { } private void parseRequest(NumberType type, BytesReference content) throws IOException { - createDocumentMapper(type).parse(SourceToParse.source("test", "type", "1", content, XContentType.JSON)); + createDocumentMapper(type).parse(new SourceToParse("test", "type", "1", content, XContentType.JSON)); } private DocumentMapper createDocumentMapper(NumberType type) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 676cefda36559..db3feee39e04d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -43,7 +43,7 @@ public void testDifferentInnerObjectTokenFailure() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - defaultMapper.parse(SourceToParse.source("test", "type", "1", new BytesArray(" {\n" + + defaultMapper.parse(new SourceToParse("test", "type", "1", new BytesArray(" {\n" + " \"object\": {\n" + " \"array\":[\n" + " {\n" + diff --git a/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java index 9546fb5136e31..31cfd5e82ee31 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/PathMatchDynamicTemplateTests.java @@ -40,7 +40,7 @@ public void testSimple() throws Exception { byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper().parse( - SourceToParse.source("test", "person", "1", new BytesArray(json), XContentType.JSON)); + new SourceToParse("test", "person", "1", new BytesArray(json), XContentType.JSON)); client().admin().indices().preparePutMapping("test").setType("person") .setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); Document doc = parsedDoc.rootDoc(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java index 1f8b0b58af813..fcb78f66add5e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -121,7 +121,7 @@ public void doTestDefaults(String type) throws Exception { DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) @@ -152,7 +152,7 @@ protected void doTestNotIndexed(String type) throws Exception { DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) @@ -176,7 +176,7 @@ protected void doTestNoDocValues(String type) throws Exception { DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) @@ -202,7 +202,7 @@ protected void doTestStore(String type) throws Exception { DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping))); assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) @@ -241,7 +241,7 @@ public void doTestCoerce(String type) throws IOException { assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) @@ -267,7 +267,7 @@ public void doTestCoerce(String type) throws IOException { assertEquals(Strings.toString(mapping), mapper2.mappingSource().toString()); ThrowingRunnable runnable = () -> mapper2 - .parse(SourceToParse.source( + .parse(new SourceToParse( "test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().startObject("field") .field(getFromField(), "5.2").field(getToField(), "10").endObject().endObject()), XContentType.JSON)); @@ -287,7 +287,7 @@ protected void doTestDecimalCoerce(String type) throws IOException { assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); - ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc1 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(GT_FIELD.getPreferredName(), "2.34") @@ -296,7 +296,7 @@ protected void doTestDecimalCoerce(String type) throws IOException { .endObject()), XContentType.JSON)); - ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc2 = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(GT_FIELD.getPreferredName(), "2") @@ -324,7 +324,7 @@ protected void doTestNullValue(String type) throws IOException { assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); // test null value for min and max - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .nullField(getFromField()) @@ -339,7 +339,7 @@ protected void doTestNullValue(String type) throws IOException { assertThat(storedField.stringValue(), containsString(expected)); // test null max value - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .field(getFromField(), getFrom(type)) @@ -367,7 +367,7 @@ protected void doTestNullValue(String type) throws IOException { assertThat(storedField.stringValue(), containsString(strVal)); // test null range - doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .nullField("field") @@ -394,7 +394,7 @@ public void doTestNoBounds(String type) throws IOException { assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); // test no bounds specified - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject() .startObject("field") .endObject() diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java index 11c858545d03f..3da00e3f787c9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RoutingFieldMapperTests.java @@ -37,12 +37,12 @@ public void testRoutingMapper() throws Exception { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "value") .endObject()), - XContentType.JSON).routing("routing_value")); + XContentType.JSON, "routing_value")); assertThat(doc.rootDoc().get("_routing"), equalTo("routing_value")); assertThat(doc.rootDoc().get("field"), equalTo("value")); @@ -54,7 +54,7 @@ public void testIncludeInObjectNotAllowed() throws Exception { .parse("type", new CompressedXContent(mapping)); try { - docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() + docMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder() .startObject().field("_routing", "foo").endObject()),XContentType.JSON)); fail("Expected failure to parse metadata field"); } catch (MapperParsingException e) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index baccb4f474ecc..a8db41e677b95 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -54,7 +54,7 @@ public void testNoFormat() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .field("field", "value") .endObject()), @@ -63,7 +63,7 @@ public void testNoFormat() throws Exception { assertThat(XContentFactory.xContentType(doc.source().toBytesRef().bytes), equalTo(XContentType.JSON)); documentMapper = parser.parse("type", new CompressedXContent(mapping)); - doc = documentMapper.parse(SourceToParse.source("test", "type", "1", + doc = documentMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.smileBuilder().startObject() .field("field", "value") .endObject()), @@ -80,7 +80,7 @@ public void testIncludes() throws Exception { DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() .startObject("path2").field("field2", "value2").endObject() @@ -104,7 +104,7 @@ public void testExcludes() throws Exception { DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser() .parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", "type", "1", + ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() .startObject("path2").field("field2", "value2").endObject() @@ -216,7 +216,7 @@ public void testSourceObjectContainsExtraTokens() throws Exception { .parse("type", new CompressedXContent(mapping)); try { - documentMapper.parse(SourceToParse.source("test", "type", "1", + documentMapper.parse(new SourceToParse("test", "type", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object (invalid JSON) fail("Expected parse exception"); } catch (MapperParsingException e) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index a07192df8047f..1b279f4d26866 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -64,7 +64,7 @@ public void testBytesAndNumericRepresentation() throws Exception { MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field1", 1) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index f149620b0287b..acd6c9ee6f80b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -103,7 +103,7 @@ public void testDefaults() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -135,7 +135,7 @@ public void testEnableStore() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -156,7 +156,7 @@ public void testDisableIndex() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -179,7 +179,7 @@ public void testDisableNorms() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "1234") @@ -210,7 +210,7 @@ public void testIndexOptions() throws IOException { for (String option : supportedOptions.keySet()) { jsonDoc.field(option, "1234"); } - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()), + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()), XContentType.JSON)); for (Map.Entry entry : supportedOptions.entrySet()) { @@ -232,7 +232,7 @@ public void testDefaultPositionIncrementGap() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + SourceToParse sourceToParse = new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .array("field", new String[] {"a", "b"}) @@ -274,7 +274,7 @@ public void testPositionIncrementGap() throws IOException { assertEquals(mapping, mapper.mappingSource().toString()); - SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + SourceToParse sourceToParse = new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .array("field", new String[]{"a", "b"}) @@ -433,7 +433,7 @@ public void testTermVectors() throws IOException { DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = defaultMapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field1", "1234") @@ -765,7 +765,7 @@ public void testFastPhraseMapping() throws IOException { new Term("synfield._index_phrase", "motor dog")}) .build())); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "Some English text that is going to be very useful") @@ -830,7 +830,7 @@ public void testIndexPrefixMapping() throws IOException { assertThat(mapper.mappers().getMapper("field._index_prefix").toString(), containsString("prefixChars=2:10")); - ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("field", "Some English text that is going to be very useful") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java index 2af659b6e20f0..da58907355202 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldTypeTests.java @@ -21,6 +21,8 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.search.AutomatonQuery; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.PrefixQuery; @@ -40,6 +42,7 @@ import java.util.List; import static org.apache.lucene.search.MultiTermQuery.CONSTANT_SCORE_REWRITE; +import static org.hamcrest.Matchers.equalTo; public class TextFieldTypeTests extends FieldTypeTestCase { @Override @@ -90,7 +93,7 @@ public void modify(MappedFieldType ft) { TextFieldMapper.TextFieldType tft = (TextFieldMapper.TextFieldType)ft; TextFieldMapper.PrefixFieldType pft = tft.getPrefixFieldType(); if (pft == null) { - tft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType(ft.name(), 3, 3)); + tft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType(ft.name(), ft.name() + "._index_prefix", 3, 3)); } else { tft.setPrefixFieldType(null); @@ -156,7 +159,7 @@ public void testFuzzyQuery() { public void testIndexPrefixes() { TextFieldMapper.TextFieldType ft = new TextFieldMapper.TextFieldType(); ft.setName("field"); - ft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType("field._index_prefix", 2, 10)); + ft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType("field", "field._index_prefix", 2, 10)); Query q = ft.prefixQuery("goin", CONSTANT_SCORE_REWRITE, null); assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q); @@ -167,6 +170,12 @@ public void testIndexPrefixes() { q = ft.prefixQuery("g", CONSTANT_SCORE_REWRITE, null); Automaton automaton = Operations.concatenate(Arrays.asList(Automata.makeChar('g'), Automata.makeAnyChar())); - assertEquals(new ConstantScoreQuery(new AutomatonQuery(new Term("field._index_prefix", "g*"), automaton)), q); + + Query expected = new ConstantScoreQuery(new BooleanQuery.Builder() + .add(new AutomatonQuery(new Term("field._index_prefix", "g*"), automaton), BooleanClause.Occur.SHOULD) + .add(new TermQuery(new Term("field", "g")), BooleanClause.Occur.SHOULD) + .build()); + + assertThat(q, equalTo(expected)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java index 26f2b7fc9239e..c763040d01d60 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java @@ -59,7 +59,7 @@ public void testDocValuesSingleType() throws Exception { public static void testDocValues(Function createIndex) throws IOException { MapperService mapperService = createIndex.apply("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(SourceToParse.source("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); + ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); @@ -84,7 +84,7 @@ public void testDefaults() throws IOException { Settings indexSettings = Settings.EMPTY; MapperService mapperService = createIndex("test", indexSettings).mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(SourceToParse.source("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); + ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); assertEquals(Collections.emptyList(), Arrays.asList(document.rootDoc().getFields(TypeFieldMapper.NAME))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index 66f955dac7b00..22f9705dcc5f9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -36,8 +36,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.VersionUtils; @@ -94,7 +96,7 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { } else { indexedShapeToReturn = shape; indexedShapeId = randomAlphaOfLengthBetween(3, 20); - indexedShapeType = randomAlphaOfLengthBetween(3, 20); + indexedShapeType = randomBoolean() ? randomAlphaOfLengthBetween(3, 20) : null; builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType); if (randomBoolean()) { indexedShapeIndex = randomAlphaOfLengthBetween(3, 20); @@ -126,15 +128,17 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { @Override protected GetResponse executeGet(GetRequest getRequest) { + String indexedType = indexedShapeType != null ? indexedShapeType : MapperService.SINGLE_MAPPING_NAME; + assertThat(indexedShapeToReturn, notNullValue()); assertThat(indexedShapeId, notNullValue()); - assertThat(indexedShapeType, notNullValue()); assertThat(getRequest.id(), equalTo(indexedShapeId)); - assertThat(getRequest.type(), equalTo(indexedShapeType)); + assertThat(getRequest.type(), equalTo(indexedType)); assertThat(getRequest.routing(), equalTo(indexedShapeRouting)); String expectedShapeIndex = indexedShapeIndex == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_INDEX_NAME : indexedShapeIndex; assertThat(getRequest.index(), equalTo(expectedShapeIndex)); String expectedShapePath = indexedShapePath == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_FIELD_NAME : indexedShapePath; + String json; try { XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); @@ -146,7 +150,7 @@ protected GetResponse executeGet(GetRequest getRequest) { } catch (IOException ex) { throw new ElasticsearchException("boom", ex); } - return new GetResponse(new GetResult(indexedShapeIndex, indexedShapeType, indexedShapeId, 0, 1, 0, true, new BytesArray(json), + return new GetResponse(new GetResult(indexedShapeIndex, indexedType, indexedShapeId, 0, 1, 0, true, new BytesArray(json), null)); } @@ -176,19 +180,13 @@ public void testNoFieldName() throws Exception { } public void testNoShape() throws IOException { - expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(fieldName(), null)); + expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(fieldName(), (ShapeBuilder) null)); } public void testNoIndexedShape() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(fieldName(), null, "type")); - assertEquals("either shapeBytes or indexedShapeId and indexedShapeType are required", e.getMessage()); - } - - public void testNoIndexedShapeType() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoShapeQueryBuilder(fieldName(), "id", null)); - assertEquals("indexedShapeType is required if indexedShapeId is specified", e.getMessage()); + assertEquals("either shape or indexedShapeId is required", e.getMessage()); } public void testNoRelation() throws IOException { @@ -286,4 +284,16 @@ public void testSerializationFailsUnlessFetched() throws IOException { builder = rewriteAndFetch(builder, createShardContext()); builder.writeTo(new BytesStreamOutput(10)); } + + @Override + protected QueryBuilder parseQuery(XContentParser parser) throws IOException { + QueryBuilder query = super.parseQuery(parser); + assertThat(query, instanceOf(GeoShapeQueryBuilder.class)); + + GeoShapeQueryBuilder shapeQuery = (GeoShapeQueryBuilder) query; + if (shapeQuery.indexedShapeType() != null) { + assertWarnings(GeoShapeQueryBuilder.TYPES_DEPRECATION_MESSAGE); + } + return query; + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java index c146df73019c2..2aed8202dd698 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.TermInSetQuery; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; @@ -48,7 +49,7 @@ protected IdsQueryBuilder doCreateTestQueryBuilder() { type = randomAlphaOfLengthBetween(1, 10); } } else if (randomBoolean()) { - type = MetaData.ALL; + type = MetaData.ALL; } else { type = null; } @@ -152,4 +153,16 @@ public void testFromJson() throws IOException { assertThat(parsed.ids(), contains("1","100","4")); assertEquals(json, 0, parsed.types().length); } + + @Override + protected QueryBuilder parseQuery(XContentParser parser) throws IOException { + QueryBuilder query = super.parseQuery(parser); + assertThat(query, instanceOf(IdsQueryBuilder.class)); + + IdsQueryBuilder idsQuery = (IdsQueryBuilder) query; + if (idsQuery.types().length > 0) { + assertWarnings(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE); + } + return query; + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java b/server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java index f549d17977dc1..2dcf3245dfe15 100644 --- a/server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java @@ -59,8 +59,7 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { } else { indexedShapeToReturn = shape; indexedShapeId = randomAlphaOfLengthBetween(3, 20); - indexedShapeType = randomAlphaOfLengthBetween(3, 20); - builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType); + builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId); if (randomBoolean()) { indexedShapeIndex = randomAlphaOfLengthBetween(3, 20); builder.indexedShapeIndex(indexedShapeIndex); diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 5615944219c91..62613139b50fd 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -93,14 +93,20 @@ private static String[] randomStringFields() { private Item generateRandomItem() { String index = randomBoolean() ? getIndex().getName() : null; - String type = "doc"; // indexed item or artificial document Item item; + if (randomBoolean()) { - item = new Item(index, type, randomAlphaOfLength(10)); + item = randomBoolean() + ? new Item(index, randomAlphaOfLength(10)) + : new Item(index, randomArtificialDoc()); } else { - item = new Item(index, type, randomArtificialDoc()); + String type = "doc"; + item = randomBoolean() + ? new Item(index, type, randomAlphaOfLength(10)) + : new Item(index, type, randomArtificialDoc()); } + // if no field is specified MLT uses all mapped fields for this item if (randomBoolean()) { item.fields(randomFrom(randomFields)); @@ -334,7 +340,7 @@ public void testItemFromXContent() throws IOException { } @Override - protected boolean isCachable(MoreLikeThisQueryBuilder queryBuilder) { + protected boolean isCacheable(MoreLikeThisQueryBuilder queryBuilder) { return queryBuilder.likeItems().length == 0; // items are always fetched } @@ -372,4 +378,16 @@ public void testFromJson() throws IOException { assertEquals(json, 2, parsed.fields().length); assertEquals(json, "and potentially some more text here as well", parsed.likeTexts()[0]); } + + @Override + protected QueryBuilder parseQuery(XContentParser parser) throws IOException { + QueryBuilder query = super.parseQuery(parser); + assertThat(query, instanceOf(MoreLikeThisQueryBuilder.class)); + + MoreLikeThisQueryBuilder mltQuery = (MoreLikeThisQueryBuilder) query; + if (mltQuery.isTypeless() == false) { + assertWarnings(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE); + } + return query; + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 70f504516ec8a..baa0fed01bbf0 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.analysis.MockSynonymAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; +import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -45,6 +46,9 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -69,6 +73,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; @@ -78,6 +83,20 @@ import static org.hamcrest.Matchers.instanceOf; public class QueryStringQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + XContentBuilder mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties") + .startObject("prefix_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .endObject() + .endObject().endObject().endObject(); + + mapperService.merge("_doc", + new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); + } + @Override protected QueryStringQueryBuilder doCreateTestQueryBuilder() { int numTerms = randomIntBetween(0, 5); @@ -535,6 +554,21 @@ public void testToQueryWildcardQuery() throws Exception { } } + public void testToQueryWildcardWithIndexedPrefixes() throws Exception { + QueryStringQueryParser queryParser = new QueryStringQueryParser(createShardContext(), "prefix_field"); + Query query = queryParser.parse("foo*"); + Query expectedQuery = new ConstantScoreQuery(new TermQuery(new Term("prefix_field._index_prefix", "foo"))); + assertThat(query, equalTo(expectedQuery)); + + query = queryParser.parse("g*"); + Automaton a = Operations.concatenate(Arrays.asList(Automata.makeChar('g'), Automata.makeAnyChar())); + expectedQuery = new ConstantScoreQuery(new BooleanQuery.Builder() + .add(new AutomatonQuery(new Term("prefix_field._index_prefix", "g*"), a), Occur.SHOULD) + .add(new TermQuery(new Term("prefix_field", "g")), Occur.SHOULD) + .build()); + assertThat(query, equalTo(expectedQuery)); + } + public void testToQueryWilcardQueryWithSynonyms() throws Exception { for (Operator op : Operator.values()) { BooleanClause.Occur defaultOp = op.toBooleanClauseOccur(); diff --git a/server/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java b/server/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java index ecd767b9d657f..04d2d2c347bbf 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java +++ b/server/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java @@ -21,6 +21,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import org.elasticsearch.common.Strings; import java.util.Random; @@ -47,7 +48,9 @@ public static QueryBuilder createQuery(Random r) { case 1: return new TermQueryBuilderTests().createTestQueryBuilder(); case 2: - return new IdsQueryBuilderTests().createTestQueryBuilder(); + // We make sure this query has no types to avoid deprecation warnings in the + // tests that use this method. + return new IdsQueryBuilderTests().createTestQueryBuilder().types(Strings.EMPTY_ARRAY); case 3: return createMultiTermQuery(r); default: diff --git a/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java index 0252468e717dc..b0bbca3266bab 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java @@ -117,7 +117,7 @@ protected Set getObjectsHoldingArbitraryContent() { } @Override - protected boolean isCachable(ScriptQueryBuilder queryBuilder) { + protected boolean isCacheable(ScriptQueryBuilder queryBuilder) { return false; } } diff --git a/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java index ef173883d0ac0..ad9af8c49c391 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java @@ -89,7 +89,7 @@ public void testIllegalArguments() { } @Override - protected boolean isCachable(ScriptScoreQueryBuilder queryBuilder) { + protected boolean isCacheable(ScriptScoreQueryBuilder queryBuilder) { return false; } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 02df22fd97efb..d1e0de67369dc 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -35,10 +35,12 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; +import org.hamcrest.CoreMatchers; import org.junit.Before; import java.io.IOException; @@ -97,8 +99,13 @@ protected TermsQueryBuilder doCreateTestQueryBuilder() { } private TermsLookup randomTermsLookup() { - return new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), - termsPath).routing(randomBoolean() ? randomAlphaOfLength(10) : null); + // Randomly choose between a typeless terms lookup and one with an explicit type to make sure we are + // testing both cases. + TermsLookup lookup = randomBoolean() + ? new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath) + : new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath); + lookup.routing(randomBoolean() ? randomAlphaOfLength(10) : null); + return lookup; } @Override @@ -274,10 +281,10 @@ public void testGeo() throws Exception { } @Override - protected boolean isCachable(TermsQueryBuilder queryBuilder) { - // even though we use a terms lookup here we do this during rewrite and that means we are cachable on toQuery + protected boolean isCacheable(TermsQueryBuilder queryBuilder) { + // even though we use a terms lookup here we do this during rewrite and that means we are cacheable on toQuery // that's why we return true here all the time - return super.isCachable(queryBuilder); + return super.isCacheable(queryBuilder); } public void testSerializationFailsUnlessFetched() throws IOException { @@ -316,5 +323,16 @@ public void testTypeField() throws IOException { builder.doToQuery(createShardContext()); assertWarnings(QueryShardContext.TYPES_DEPRECATION_MESSAGE); } -} + @Override + protected QueryBuilder parseQuery(XContentParser parser) throws IOException { + QueryBuilder query = super.parseQuery(parser); + assertThat(query, CoreMatchers.instanceOf(TermsQueryBuilder.class)); + + TermsQueryBuilder termsQuery = (TermsQueryBuilder) query; + if (termsQuery.isTypeless() == false) { + assertWarnings(TermsQueryBuilder.TYPES_DEPRECATION_MESSAGE); + } + return query; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 698cb71692b0f..f68769bb89cb5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -111,7 +111,7 @@ protected void doAssertLuceneQuery(TermsSetQueryBuilder queryBuilder, Query quer } @Override - protected boolean isCachable(TermsSetQueryBuilder queryBuilder) { + protected boolean isCacheable(TermsSetQueryBuilder queryBuilder) { return queryBuilder.getMinimumShouldMatchField() != null || (queryBuilder.getMinimumShouldMatchScript() != null && queryBuilder.getValues().isEmpty()); } diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index ba673cf2ea4a9..8f177cac863b3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -797,7 +797,7 @@ public List> getScoreFunctions() { } @Override - protected boolean isCachable(FunctionScoreQueryBuilder queryBuilder) { + protected boolean isCacheable(FunctionScoreQueryBuilder queryBuilder) { FilterFunctionBuilder[] filterFunctionBuilders = queryBuilder.filterFunctionBuilders(); for (FilterFunctionBuilder builder : filterFunctionBuilders) { if (builder.getScoreFunction() instanceof ScriptScoreFunctionBuilder) { diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 747d951d5a8cc..6f38822092aea 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -197,7 +197,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { 1, randomNonNegativeLong(), false, - SourceToParse.source("index", "type", "replica", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse("index", "type", "replica", new BytesArray("{}"), XContentType.JSON)); shards.promoteReplicaToPrimary(promotedReplica).get(); oldPrimary.close("demoted", randomBoolean()); oldPrimary.store().close(); @@ -210,7 +210,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { promotedReplica.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - SourceToParse.source("index", "type", "primary", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("index", "type", "primary", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, randomNonNegativeLong(), false); } diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java new file mode 100644 index 0000000000000..2854cc87d8695 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java @@ -0,0 +1,149 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.seqno; + +import org.elasticsearch.cluster.routing.AllocationId; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.IndexSettingsModule; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.LongSupplier; + +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class ReplicationTrackerRetentionLeaseTests extends ReplicationTrackerTestCase { + + public void testAddOrUpdateRetentionLease() { + final AllocationId id = AllocationId.newInitializing(); + final ReplicationTracker replicationTracker = new ReplicationTracker( + new ShardId("test", "_na", 0), + id.getId(), + IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), + UNASSIGNED_SEQ_NO, + value -> {}, + () -> 0L); + replicationTracker.updateFromMaster( + randomNonNegativeLong(), + Collections.singleton(id.getId()), + routingTable(Collections.emptySet(), id), + Collections.emptySet()); + replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); + final int length = randomIntBetween(0, 8); + final long[] minimumRetainingSequenceNumbers = new long[length]; + for (int i = 0; i < length; i++) { + minimumRetainingSequenceNumbers[i] = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + replicationTracker.addOrUpdateRetentionLease(Integer.toString(i), minimumRetainingSequenceNumbers[i], "test-" + i); + assertRetentionLeases(replicationTracker, i + 1, minimumRetainingSequenceNumbers, () -> 0L); + } + + for (int i = 0; i < length; i++) { + minimumRetainingSequenceNumbers[i] = randomLongBetween(minimumRetainingSequenceNumbers[i], Long.MAX_VALUE); + replicationTracker.addOrUpdateRetentionLease(Integer.toString(i), minimumRetainingSequenceNumbers[i], "test-" + i); + assertRetentionLeases(replicationTracker, length, minimumRetainingSequenceNumbers, () -> 0L); + } + + } + + public void testExpiration() { + final AllocationId id = AllocationId.newInitializing(); + final AtomicLong currentTimeMillis = new AtomicLong(randomLongBetween(0, 1024)); + final long retentionLeaseMillis = randomLongBetween(1, TimeValue.timeValueHours(12).millis()); + final Settings settings = Settings + .builder() + .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_SETTING.getKey(), TimeValue.timeValueMillis(retentionLeaseMillis)) + .build(); + final ReplicationTracker replicationTracker = new ReplicationTracker( + new ShardId("test", "_na", 0), + id.getId(), + IndexSettingsModule.newIndexSettings("test", settings), + UNASSIGNED_SEQ_NO, + value -> {}, + currentTimeMillis::get); + replicationTracker.updateFromMaster( + randomNonNegativeLong(), + Collections.singleton(id.getId()), + routingTable(Collections.emptySet(), id), + Collections.emptySet()); + replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); + final long[] retainingSequenceNumbers = new long[1]; + retainingSequenceNumbers[0] = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + replicationTracker.addOrUpdateRetentionLease("0", retainingSequenceNumbers[0], "test-0"); + + { + final Collection retentionLeases = replicationTracker.getRetentionLeases(); + assertThat(retentionLeases, hasSize(1)); + final RetentionLease retentionLease = retentionLeases.iterator().next(); + assertThat(retentionLease.timestamp(), equalTo(currentTimeMillis.get())); + assertRetentionLeases(replicationTracker, 1, retainingSequenceNumbers, currentTimeMillis::get); + } + + // renew the lease + currentTimeMillis.set(currentTimeMillis.get() + randomLongBetween(0, 1024)); + retainingSequenceNumbers[0] = randomLongBetween(retainingSequenceNumbers[0], Long.MAX_VALUE); + replicationTracker.addOrUpdateRetentionLease("0", retainingSequenceNumbers[0], "test-0"); + + { + final Collection retentionLeases = replicationTracker.getRetentionLeases(); + assertThat(retentionLeases, hasSize(1)); + final RetentionLease retentionLease = retentionLeases.iterator().next(); + assertThat(retentionLease.timestamp(), equalTo(currentTimeMillis.get())); + assertRetentionLeases(replicationTracker, 1, retainingSequenceNumbers, currentTimeMillis::get); + } + + // now force the lease to expire + currentTimeMillis.set(currentTimeMillis.get() + randomLongBetween(retentionLeaseMillis, Long.MAX_VALUE - currentTimeMillis.get())); + assertRetentionLeases(replicationTracker, 0, retainingSequenceNumbers, currentTimeMillis::get); + } + + private void assertRetentionLeases( + final ReplicationTracker replicationTracker, + final int size, + final long[] minimumRetainingSequenceNumbers, + final LongSupplier currentTimeMillisSupplier) { + final Collection retentionLeases = replicationTracker.getRetentionLeases(); + final Map idToRetentionLease = new HashMap<>(); + for (final RetentionLease retentionLease : retentionLeases) { + idToRetentionLease.put(retentionLease.id(), retentionLease); + } + + assertThat(idToRetentionLease.entrySet(), hasSize(size)); + for (int i = 0; i < size; i++) { + assertThat(idToRetentionLease.keySet(), hasItem(Integer.toString(i))); + final RetentionLease retentionLease = idToRetentionLease.get(Integer.toString(i)); + assertThat(retentionLease.retainingSequenceNumber(), equalTo(minimumRetainingSequenceNumbers[i])); + assertThat( + currentTimeMillisSupplier.getAsLong() - retentionLease.timestamp(), + lessThanOrEqualTo(replicationTracker.indexSettings().getRetentionLeaseMillis())); + assertThat(retentionLease.source(), equalTo("test-" + i)); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java new file mode 100644 index 0000000000000..9b1f951a030fe --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTestCase.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.seqno; + +import org.elasticsearch.cluster.routing.AllocationId; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.util.Set; +import java.util.function.LongConsumer; +import java.util.function.LongSupplier; + +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; + +public abstract class ReplicationTrackerTestCase extends ESTestCase { + + ReplicationTracker newTracker( + final AllocationId allocationId, + final LongConsumer updatedGlobalCheckpoint, + final LongSupplier currentTimeMillisSupplier) { + return new ReplicationTracker( + new ShardId("test", "_na_", 0), + allocationId.getId(), + IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), + UNASSIGNED_SEQ_NO, + updatedGlobalCheckpoint, + currentTimeMillisSupplier); + } + + static IndexShardRoutingTable routingTable(final Set initializingIds, final AllocationId primaryId) { + final ShardId shardId = new ShardId("test", "_na_", 0); + final ShardRouting primaryShard = + TestShardRouting.newShardRouting(shardId, randomAlphaOfLength(10), null, true, ShardRoutingState.STARTED, primaryId); + return routingTable(initializingIds, primaryShard); + } + + static IndexShardRoutingTable routingTable(final Set initializingIds, final ShardRouting primaryShard) { + assert !initializingIds.contains(primaryShard.allocationId()); + final ShardId shardId = new ShardId("test", "_na_", 0); + final IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(shardId); + for (final AllocationId initializingId : initializingIds) { + builder.addShard(TestShardRouting.newShardRouting( + shardId, randomAlphaOfLength(10), null, false, ShardRoutingState.INITIALIZING, initializingId)); + } + + builder.addShard(primaryShard); + + return builder.build(); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java index 0aed64d05fc93..001e50af57c79 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; @@ -61,7 +60,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.not; -public class ReplicationTrackerTests extends ESTestCase { +public class ReplicationTrackerTests extends ReplicationTrackerTestCase { public void testEmptyShards() { final ReplicationTracker tracker = newTracker(AllocationId.newInitializing()); @@ -76,27 +75,6 @@ private Map randomAllocationsWithLocalCheckpoints(int min, i return allocations; } - private static IndexShardRoutingTable routingTable(final Set initializingIds, final AllocationId primaryId) { - final ShardId shardId = new ShardId("test", "_na_", 0); - final ShardRouting primaryShard = - TestShardRouting.newShardRouting(shardId, randomAlphaOfLength(10), null, true, ShardRoutingState.STARTED, primaryId); - return routingTable(initializingIds, primaryShard); - } - - private static IndexShardRoutingTable routingTable(final Set initializingIds, final ShardRouting primaryShard) { - assert !initializingIds.contains(primaryShard.allocationId()); - ShardId shardId = new ShardId("test", "_na_", 0); - IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(shardId); - for (AllocationId initializingId : initializingIds) { - builder.addShard(TestShardRouting.newShardRouting( - shardId, randomAlphaOfLength(10), null, false, ShardRoutingState.INITIALIZING, initializingId)); - } - - builder.addShard(primaryShard); - - return builder.build(); - } - private static Set ids(Set allocationIds) { return allocationIds.stream().map(AllocationId::getId).collect(Collectors.toSet()); } @@ -428,12 +406,7 @@ public void testWaitForAllocationIdToBeInSync() throws Exception { private AtomicLong updatedGlobalCheckpoint = new AtomicLong(UNASSIGNED_SEQ_NO); private ReplicationTracker newTracker(final AllocationId allocationId) { - return new ReplicationTracker( - new ShardId("test", "_na_", 0), - allocationId.getId(), - IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), - UNASSIGNED_SEQ_NO, - updatedGlobalCheckpoint::set); + return newTracker(allocationId, updatedGlobalCheckpoint::set, () -> 0L); } public void testWaitForAllocationIdToBeInSyncCanBeInterrupted() throws BrokenBarrierException, InterruptedException { @@ -709,10 +682,11 @@ public void testPrimaryContextHandoff() throws IOException { FakeClusterState clusterState = initialState(); final AllocationId primaryAllocationId = clusterState.routingTable.primaryShard().allocationId(); final LongConsumer onUpdate = updatedGlobalCheckpoint -> {}; - ReplicationTracker oldPrimary = - new ReplicationTracker(shardId, primaryAllocationId.getId(), indexSettings, UNASSIGNED_SEQ_NO, onUpdate); - ReplicationTracker newPrimary = - new ReplicationTracker(shardId, primaryAllocationId.getRelocationId(), indexSettings, UNASSIGNED_SEQ_NO, onUpdate); + final long globalCheckpoint = UNASSIGNED_SEQ_NO; + ReplicationTracker oldPrimary = new ReplicationTracker( + shardId, primaryAllocationId.getId(), indexSettings, globalCheckpoint, onUpdate, () -> 0L); + ReplicationTracker newPrimary = new ReplicationTracker( + shardId, primaryAllocationId.getRelocationId(), indexSettings, globalCheckpoint, onUpdate, () -> 0L); Set allocationIds = new HashSet<>(Arrays.asList(oldPrimary.shardAllocationId, newPrimary.shardAllocationId)); diff --git a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseTests.java b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseTests.java new file mode 100644 index 0000000000000..a5e4af5d0e6a3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseTests.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.seqno; + +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; + +public class RetentionLeaseTests extends ESTestCase { + + public void testRetainingSequenceNumberOutOfRange() { + final long retainingSequenceNumber = randomLongBetween(Long.MIN_VALUE, UNASSIGNED_SEQ_NO - 1); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new RetentionLease("id", retainingSequenceNumber, randomNonNegativeLong(), "source")); + assertThat( + e, + hasToString(containsString("retention lease retaining sequence number [" + retainingSequenceNumber + "] out of range"))); + } + + public void testTimestampOutOfRange() { + final long timestamp = randomLongBetween(Long.MIN_VALUE, -1); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new RetentionLease("id", randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE), timestamp, "source")); + assertThat( + e, + hasToString(containsString("retention lease timestamp [" + timestamp + "] out of range"))); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index 0ca449b3ca7d6..475caf06e30a8 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -357,7 +357,7 @@ public void testMaybeFlush() throws Exception { .setSource("{}", XContentType.JSON).setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE).get(); assertFalse(shard.shouldPeriodicallyFlush()); shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - SourceToParse.source("test", "test", "1", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("test", "test", "1", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); assertTrue(shard.shouldPeriodicallyFlush()); final Translog translog = getTranslog(shard); @@ -407,7 +407,7 @@ public void testMaybeRollTranslogGeneration() throws Exception { for (int i = 0; i < numberOfDocuments; i++) { assertThat(translog.currentFileGeneration(), equalTo(generation + rolls)); final Engine.IndexResult result = shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - SourceToParse.source("test", "test", "1", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("test", "test", "1", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); final Translog.Location location = result.getTranslogLocation(); shard.afterWriteOperation(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardRetentionLeaseTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardRetentionLeaseTests.java new file mode 100644 index 0000000000000..bd2a33617eecf --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardRetentionLeaseTests.java @@ -0,0 +1,153 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.shard; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.seqno.RetentionLease; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.LongSupplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class IndexShardRetentionLeaseTests extends IndexShardTestCase { + + private final AtomicLong currentTimeMillis = new AtomicLong(); + + @Override + protected ThreadPool setUpThreadPool() { + final ThreadPool threadPool = mock(ThreadPool.class); + doAnswer(invocationOnMock -> currentTimeMillis.get()).when(threadPool).absoluteTimeInMillis(); + when(threadPool.executor(anyString())).thenReturn(mock(ExecutorService.class)); + when(threadPool.scheduler()).thenReturn(mock(ScheduledExecutorService.class)); + return threadPool; + } + + @Override + protected void tearDownThreadPool() { + + } + + public void testAddOrUpdateRetentionLease() throws IOException { + final IndexShard indexShard = newStartedShard(true); + try { + final int length = randomIntBetween(0, 8); + final long[] minimumRetainingSequenceNumbers = new long[length]; + for (int i = 0; i < length; i++) { + minimumRetainingSequenceNumbers[i] = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + indexShard.addOrUpdateRetentionLease(Integer.toString(i), minimumRetainingSequenceNumbers[i], "test-" + i); + assertRetentionLeases(indexShard, i + 1, minimumRetainingSequenceNumbers, () -> 0L); + } + + for (int i = 0; i < length; i++) { + minimumRetainingSequenceNumbers[i] = randomLongBetween(minimumRetainingSequenceNumbers[i], Long.MAX_VALUE); + indexShard.addOrUpdateRetentionLease(Integer.toString(i), minimumRetainingSequenceNumbers[i], "test-" + i); + assertRetentionLeases(indexShard, length, minimumRetainingSequenceNumbers, () -> 0L); + } + } finally { + closeShards(indexShard); + } + } + + public void testExpiration() throws IOException { + final long retentionLeaseMillis = randomLongBetween(1, TimeValue.timeValueHours(12).millis()); + final Settings settings = Settings + .builder() + .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_SETTING.getKey(), TimeValue.timeValueMillis(retentionLeaseMillis)) + .build(); + // current time is mocked through the thread pool + final IndexShard indexShard = newStartedShard(true, settings, new InternalEngineFactory()); + try { + final long[] retainingSequenceNumbers = new long[1]; + retainingSequenceNumbers[0] = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + indexShard.addOrUpdateRetentionLease("0", retainingSequenceNumbers[0], "test-0"); + + { + final Collection retentionLeases = indexShard.getEngine().config().retentionLeasesSupplier().get(); + assertThat(retentionLeases, hasSize(1)); + final RetentionLease retentionLease = retentionLeases.iterator().next(); + assertThat(retentionLease.timestamp(), equalTo(currentTimeMillis.get())); + assertRetentionLeases(indexShard, 1, retainingSequenceNumbers, currentTimeMillis::get); + } + + // renew the lease + currentTimeMillis.set(currentTimeMillis.get() + randomLongBetween(0, 1024)); + retainingSequenceNumbers[0] = randomLongBetween(retainingSequenceNumbers[0], Long.MAX_VALUE); + indexShard.addOrUpdateRetentionLease("0", retainingSequenceNumbers[0], "test-0"); + + { + final Collection retentionLeases = indexShard.getEngine().config().retentionLeasesSupplier().get(); + assertThat(retentionLeases, hasSize(1)); + final RetentionLease retentionLease = retentionLeases.iterator().next(); + assertThat(retentionLease.timestamp(), equalTo(currentTimeMillis.get())); + assertRetentionLeases(indexShard, 1, retainingSequenceNumbers, currentTimeMillis::get); + } + + // now force the lease to expire + currentTimeMillis.set( + currentTimeMillis.get() + randomLongBetween(retentionLeaseMillis, Long.MAX_VALUE - currentTimeMillis.get())); + assertRetentionLeases(indexShard, 0, retainingSequenceNumbers, currentTimeMillis::get); + } finally { + closeShards(indexShard); + } + } + + private void assertRetentionLeases( + final IndexShard indexShard, + final int size, + final long[] minimumRetainingSequenceNumbers, + final LongSupplier currentTimeMillisSupplier) { + final Collection retentionLeases = indexShard.getEngine().config().retentionLeasesSupplier().get(); + final Map idToRetentionLease = new HashMap<>(); + for (final RetentionLease retentionLease : retentionLeases) { + idToRetentionLease.put(retentionLease.id(), retentionLease); + } + + assertThat(idToRetentionLease.entrySet(), hasSize(size)); + for (int i = 0; i < size; i++) { + assertThat(idToRetentionLease.keySet(), hasItem(Integer.toString(i))); + final RetentionLease retentionLease = idToRetentionLease.get(Integer.toString(i)); + assertThat(retentionLease.retainingSequenceNumber(), equalTo(minimumRetainingSequenceNumbers[i])); + assertThat( + currentTimeMillisSupplier.getAsLong() - retentionLease.timestamp(), + lessThanOrEqualTo(indexShard.indexSettings().getRetentionLeaseMillis())); + assertThat(retentionLease.source(), equalTo("test-" + i)); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 7679595a7fa3b..4745904a55467 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1790,15 +1790,15 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)); shard.applyIndexOperationOnReplica(3, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery. shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); shard.applyIndexOperationOnReplica(2, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)); shard.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; if (randomBoolean()) { @@ -1912,7 +1912,7 @@ public void testRecoverFromStoreWithNoOps() throws IOException { // start a replica shard and index the second doc final IndexShard otherShard = newStartedShard(false); updateMappings(otherShard, shard.indexSettings().getIndexMetaData()); - SourceToParse sourceToParse = SourceToParse.source(shard.shardId().getIndexName(), "_doc", "1", + SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), "_doc", "1", new BytesArray("{}"), XContentType.JSON); otherShard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); @@ -2033,11 +2033,11 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { final String indexName = shard.shardId().getIndexName(); // Index #0, index #1 shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here. shard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1")); // Here we try to increase term (i.e. a new primary is promoted) without rolling back a replica so we can keep stale operations @@ -2047,7 +2047,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { shard.getEngine().rollTranslogGeneration(); shard.markSeqNoAsNoop(1, "test"); shard.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2")); closeShard(shard, false); @@ -3037,7 +3037,7 @@ private Result indexOnReplicaWithGaps( for (int i = offset + 1; i < operations; i++) { if (!rarely() || i == operations - 1) { // last operation can't be a gap as it's not a gap anymore final String id = Integer.toString(i); - SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", id, + SourceToParse sourceToParse = new SourceToParse(indexShard.shardId().getIndexName(), "_doc", id, new BytesArray("{}"), XContentType.JSON); indexShard.applyIndexOperationOnReplica(i, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 282404de9a45e..d074ef3375833 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -80,7 +80,7 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { for (int i = 0; i < numDocs; i++) { // Index doc but not advance local checkpoint. shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - SourceToParse.source(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), + new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, randomBoolean() ? IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP : randomNonNegativeLong(), true); } @@ -150,7 +150,7 @@ public void testSyncerOnClosingShard() throws Exception { for (int i = 0; i < numDocs; i++) { // Index doc but not advance local checkpoint. shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - SourceToParse.source(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), + new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 35fbb94c9976b..53c3e86ee01fb 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -71,6 +72,7 @@ import java.util.List; import java.util.Locale; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -124,8 +126,8 @@ public void onFailedEngine(String reason, @Nullable Exception e) { indexSettings, null, store, newMergePolicy(), iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), eventListener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), Collections.singletonList(listeners), Collections.emptyList(), null, - new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED, () -> primaryTerm, - EngineTestCase.tombstoneDocSupplier()); + new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED, Collections::emptyList, + () -> primaryTerm, EngineTestCase.tombstoneDocSupplier()); engine = new InternalEngine(config); engine.initializeMaxSeqNoOfUpdatesOrDeletes(); engine.recoverFromTranslog((e, s) -> 0, Long.MAX_VALUE); @@ -279,7 +281,7 @@ public void testConcurrentRefresh() throws Exception { if (immediate) { assertNotNull(listener.forcedRefresh.get()); } else { - assertBusy(() -> assertNotNull(listener.forcedRefresh.get())); + assertBusy(() -> assertNotNull(listener.forcedRefresh.get()), 1, TimeUnit.MINUTES); } assertFalse(listener.forcedRefresh.get()); listener.assertNoError(); @@ -314,7 +316,7 @@ public void testLotsOfThreads() throws Exception { DummyRefreshListener listener = new DummyRefreshListener(); listeners.addOrNotify(index.getTranslogLocation(), listener); - assertBusy(() -> assertNotNull("listener never called", listener.forcedRefresh.get())); + assertBusy(() -> assertNotNull("listener never called", listener.forcedRefresh.get()), 1, TimeUnit.MINUTES); if (threadCount < maxListeners) { assertFalse(listener.forcedRefresh.get()); } @@ -342,6 +344,40 @@ public void testLotsOfThreads() throws Exception { refresher.cancel(); } + public void testDisallowAddListeners() throws Exception { + assertEquals(0, listeners.pendingCount()); + DummyRefreshListener listener = new DummyRefreshListener(); + assertFalse(listeners.addOrNotify(index("1").getTranslogLocation(), listener)); + engine.refresh("I said so"); + assertFalse(listener.forcedRefresh.get()); + listener.assertNoError(); + + try (Releasable releaseable1 = listeners.forceRefreshes()) { + listener = new DummyRefreshListener(); + assertTrue(listeners.addOrNotify(index("1").getTranslogLocation(), listener)); + assertTrue(listener.forcedRefresh.get()); + listener.assertNoError(); + assertEquals(0, listeners.pendingCount()); + + try (Releasable releaseable2 = listeners.forceRefreshes()) { + listener = new DummyRefreshListener(); + assertTrue(listeners.addOrNotify(index("1").getTranslogLocation(), listener)); + assertTrue(listener.forcedRefresh.get()); + listener.assertNoError(); + assertEquals(0, listeners.pendingCount()); + } + + listener = new DummyRefreshListener(); + assertTrue(listeners.addOrNotify(index("1").getTranslogLocation(), listener)); + assertTrue(listener.forcedRefresh.get()); + listener.assertNoError(); + assertEquals(0, listeners.pendingCount()); + } + + assertFalse(listeners.addOrNotify(index("1").getTranslogLocation(), new DummyRefreshListener())); + assertEquals(1, listeners.pendingCount()); + } + private Engine.IndexResult index(String id) throws IOException { return index(id, "test"); } diff --git a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 966495faa1e13..cd0c90f50779c 100644 --- a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -107,7 +107,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class CorruptedFileIT extends ESIntegTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/server/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index 155473c83cf30..4e22aad8d15de 100644 --- a/server/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -64,6 +64,9 @@ private void doTestStoreDirectory(Index index, Path tempDir, String typeSettingV new ShardPath(false, tempDir, tempDir, new ShardId(index, 0))); try (Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) { switch (type) { + case HYBRIDFS: + assertHybridDirectory(directory); + break; case NIOFS: assertTrue(type + " " + directory.toString(), directory instanceof NIOFSDirectory); break; @@ -75,7 +78,7 @@ private void doTestStoreDirectory(Index index, Path tempDir, String typeSettingV break; case FS: if (Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { - assertTrue(directory.toString(), directory instanceof MMapDirectory); + assertHybridDirectory(directory); } else if (Constants.WINDOWS) { assertTrue(directory.toString(), directory instanceof SimpleFSDirectory); } else { @@ -88,4 +91,9 @@ private void doTestStoreDirectory(Index index, Path tempDir, String typeSettingV } } + private void assertHybridDirectory(Directory directory) { + assertTrue(directory.toString(), directory instanceof FsDirectoryService.HybridDirectory); + Directory randomAccessDirectory = ((FsDirectoryService.HybridDirectory) directory).getRandomAccessDirectory(); + assertTrue("randomAccessDirectory: " + randomAccessDirectory.toString(), randomAccessDirectory instanceof MMapDirectory); + } } diff --git a/server/src/test/java/org/elasticsearch/indexing/IndexActionIT.java b/server/src/test/java/org/elasticsearch/indexing/IndexActionIT.java index 0bf8fa698f44e..36488addb3737 100644 --- a/server/src/test/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/server/src/test/java/org/elasticsearch/indexing/IndexActionIT.java @@ -70,8 +70,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { logger.debug("running search with all types"); SearchResponse response = client().prepareSearch("test").get(); if (response.getHits().getTotalHits().value != numOfDocs) { - final String message = "Count is " + response.getHits().getTotalHits().value + " but " + numOfDocs + " was expected. " - + ElasticsearchAssertions.formatShardStatus(response); + final String message = + "Count is " + response.getHits().getTotalHits().value + " but " + numOfDocs + " was expected. " + + ElasticsearchAssertions.formatShardStatus(response); logger.error("{}. search response: \n{}", message, response); fail(message); } @@ -85,8 +86,9 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { logger.debug("running search with a specific type"); SearchResponse response = client().prepareSearch("test").setTypes("type").get(); if (response.getHits().getTotalHits().value != numOfDocs) { - final String message = "Count is " + response.getHits().getTotalHits().value + " but " + numOfDocs + " was expected. " - + ElasticsearchAssertions.formatShardStatus(response); + final String message = + "Count is " + response.getHits().getTotalHits().value + " but " + numOfDocs + " was expected. " + + ElasticsearchAssertions.formatShardStatus(response); logger.error("{}. search response: \n{}", message, response); fail(message); } @@ -183,7 +185,8 @@ public void testCreateFlagWithBulk() { createIndex("test"); ensureGreen(); - BulkResponse bulkResponse = client().prepareBulk().add(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")).execute().actionGet(); + BulkResponse bulkResponse = client().prepareBulk().add( + client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")).execute().actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(false)); assertThat(bulkResponse.getItems().length, equalTo(1)); IndexResponse indexResponse = bulkResponse.getItems()[0].getResponse(); diff --git a/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java b/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java index ecb169ddaf7dc..a3743f55d028b 100644 --- a/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java +++ b/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.indices; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; @@ -82,9 +83,37 @@ public void testSerialization() throws IOException { assertNotSame(deserializedLookup, termsLookup); } } + + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(Version.V_6_7_0); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> termsLookup.writeTo(output)); + assertEquals("Typeless [terms] lookup queries are not supported if any " + + "node is running a version before 7.0.", e.getMessage()); + } + } + + public void testSerializationWithTypes() throws IOException { + TermsLookup termsLookup = randomTermsLookupWithTypes(); + try (BytesStreamOutput output = new BytesStreamOutput()) { + termsLookup.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + TermsLookup deserializedLookup = new TermsLookup(in); + assertEquals(deserializedLookup, termsLookup); + assertEquals(deserializedLookup.hashCode(), termsLookup.hashCode()); + assertNotSame(deserializedLookup, termsLookup); + } + } } public static TermsLookup randomTermsLookup() { + return new TermsLookup( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10).replace('.', '_') + ).routing(randomBoolean() ? randomAlphaOfLength(10) : null); + } + + public static TermsLookup randomTermsLookupWithTypes() { return new TermsLookup( randomAlphaOfLength(10), randomAlphaOfLength(10), diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index af04fd5e27d9b..387ba1c3d9653 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; +import org.elasticsearch.action.admin.indices.close.TransportVerifyShardBeforeCloseAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -39,6 +40,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeActionUtils; @@ -50,6 +52,7 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.action.shard.ShardStateAction.FailedShardEntry; import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardEntry; +import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.coordination.JoinTaskExecutor; import org.elasticsearch.cluster.coordination.NodeRemovalClusterStateTaskExecutor; import org.elasticsearch.cluster.metadata.AliasValidator; @@ -58,6 +61,7 @@ import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; +import org.elasticsearch.cluster.metadata.MetaDataIndexStateServiceUtils; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -77,6 +81,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexEventListener; @@ -90,8 +95,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.function.Function; import java.util.stream.Collectors; import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; @@ -179,8 +187,11 @@ public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData, Version m return indexMetaData; } }; + + TransportVerifyShardBeforeCloseAction transportVerifyShardBeforeCloseAction = new TransportVerifyShardBeforeCloseAction(SETTINGS, + transportService, clusterService, indicesService, threadPool, null, actionFilters, indexNameExpressionResolver); MetaDataIndexStateService indexStateService = new MetaDataIndexStateService(clusterService, allocationService, - metaDataIndexUpgradeService, indicesService, threadPool); + metaDataIndexUpgradeService, indicesService, threadPool, transportVerifyShardBeforeCloseAction); MetaDataDeleteIndexService deleteIndexService = new MetaDataDeleteIndexService(SETTINGS, clusterService, allocationService); MetaDataUpdateSettingsService metaDataUpdateSettingsService = new MetaDataUpdateSettingsService(clusterService, allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, indicesService, threadPool); @@ -210,7 +221,15 @@ public ClusterState createIndex(ClusterState state, CreateIndexRequest request) } public ClusterState closeIndices(ClusterState state, CloseIndexRequest request) { - return execute(transportCloseIndexAction, request, state); + final Index[] concreteIndices = Arrays.stream(request.indices()) + .map(index -> state.metaData().index(index).getIndex()).toArray(Index[]::new); + + final Map blockedIndices = new HashMap<>(); + ClusterState newState = MetaDataIndexStateServiceUtils.addIndexClosedBlocks(concreteIndices, blockedIndices, state); + + newState = MetaDataIndexStateServiceUtils.closeRoutingTable(newState, blockedIndices, blockedIndices.keySet().stream() + .collect(Collectors.toMap(Function.identity(), index -> new AcknowledgedResponse(true)))); + return allocationService.reroute(newState, "indices closed"); } public ClusterState openIndices(ClusterState state, OpenIndexRequest request) { diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 89a6bc8dcf89d..9dd8d5c5b660d 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -292,19 +292,19 @@ public ClusterState randomlyUpdateClusterState(ClusterState state, Map clusterStateServiceMap, Supplier indicesServiceSupplier) { // randomly remove no_master blocks - if (randomBoolean() && state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID)) { + if (randomBoolean() && state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID)) { state = ClusterState.builder(state).blocks( ClusterBlocks.builder().blocks(state.blocks()).removeGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID)).build(); } // randomly add no_master blocks - if (rarely() && state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID) == false) { + if (rarely() && state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID) == false) { ClusterBlock block = randomBoolean() ? DiscoverySettings.NO_MASTER_BLOCK_ALL : DiscoverySettings.NO_MASTER_BLOCK_WRITES; state = ClusterState.builder(state).blocks(ClusterBlocks.builder().blocks(state.blocks()).addGlobalBlock(block)).build(); } // if no_master block is in place, make no other cluster state changes - if (state.blocks().hasGlobalBlock(DiscoverySettings.NO_MASTER_BLOCK_ID)) { + if (state.blocks().hasGlobalBlockWithId(DiscoverySettings.NO_MASTER_BLOCK_ID)) { return state; } diff --git a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index f7d920d1729a6..5535a947d9ef8 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -64,6 +64,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class FlushIT extends ESIntegTestCase { public void testWaitIfOngoing() throws InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index af4dc59ca1a76..3f6a8072d86d5 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.IOUtils; @@ -190,7 +191,7 @@ public void testSendSnapshotSendsOps() throws IOException { final long startingSeqNo = randomIntBetween(0, numberOfDocsWithValidSequenceNumbers - 1); final long requiredStartingSeqNo = randomIntBetween((int) startingSeqNo, numberOfDocsWithValidSequenceNumbers - 1); final long endingSeqNo = randomIntBetween((int) requiredStartingSeqNo - 1, numberOfDocsWithValidSequenceNumbers - 1); - RecoverySourceHandler.SendSnapshotResult result = handler.sendSnapshot(startingSeqNo, requiredStartingSeqNo, + RecoverySourceHandler.SendSnapshotResult result = handler.phase2(startingSeqNo, requiredStartingSeqNo, endingSeqNo, new Translog.Snapshot() { @Override public void close() { @@ -229,7 +230,7 @@ public Translog.Operation next() throws IOException { .filter(o -> o.seqNo() >= requiredStartingSeqNo && o.seqNo() <= endingSeqNo).collect(Collectors.toList()); List opsToSkip = randomSubsetOf(randomIntBetween(1, requiredOps.size()), requiredOps); expectThrows(IllegalStateException.class, () -> - handler.sendSnapshot(startingSeqNo, requiredStartingSeqNo, + handler.phase2(startingSeqNo, requiredStartingSeqNo, endingSeqNo, new Translog.Snapshot() { @Override public void close() { @@ -412,20 +413,23 @@ public void testThrowExceptionOnPrimaryRelocatedBeforePhase1Started() throws IOE recoverySettings.getChunkSize().bytesAsInt()) { @Override - public void phase1(final IndexCommit snapshot, final Supplier translogOps) { + public SendFileResult phase1(final IndexCommit snapshot, final Supplier translogOps) { phase1Called.set(true); + return super.phase1(snapshot, translogOps); } @Override - void prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException { + TimeValue prepareTargetForTranslog(final boolean fileBasedRecovery, final int totalTranslogOps) throws IOException { prepareTargetForTranslogCalled.set(true); + return super.prepareTargetForTranslog(fileBasedRecovery, totalTranslogOps); } @Override - long phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot, - long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) { + SendSnapshotResult phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot, + long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) throws IOException { phase2Called.set(true); - return SequenceNumbers.UNASSIGNED_SEQ_NO; + return super.phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, + maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); } }; diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 694032bd9887a..48061b11d58c7 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -132,19 +132,19 @@ public void testRecoveryWithOutOfOrderDeleteWithTranslog() throws Exception { getTranslog(orgReplica).rollGeneration(); // isolate the delete in it's own generation // index #0 orgReplica.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)); // index #3 orgReplica.applyIndexOperationOnReplica(3, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1. orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true)); // index #2 orgReplica.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)); orgReplica.updateGlobalCheckpointOnReplica(3L, "test"); // index #5 -> force NoOp #4. orgReplica.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; if (randomBoolean()) { @@ -196,19 +196,19 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { orgReplica.flush(new FlushRequest().force(true)); // isolate delete#1 in its own translog generation and lucene segment // index #0 orgReplica.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)); // index #3 orgReplica.applyIndexOperationOnReplica(3, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1. orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true)); // index #2 orgReplica.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)); orgReplica.updateGlobalCheckpointOnReplica(3L, "test"); // index #5 -> force NoOp #4. orgReplica.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - SourceToParse.source(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)); + new SourceToParse(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)); if (randomBoolean()) { if (randomBoolean()) { @@ -312,7 +312,7 @@ public void testPeerRecoverySendSafeCommitInFileBased() throws Exception { long globalCheckpoint = 0; for (int i = 0; i < numDocs; i++) { Engine.IndexResult result = primaryShard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, - SourceToParse.source(primaryShard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), + new SourceToParse(primaryShard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); diff --git a/server/src/test/java/org/elasticsearch/indices/state/CloseIndexIT.java b/server/src/test/java/org/elasticsearch/indices/state/CloseIndexIT.java new file mode 100644 index 0000000000000..a0304c96430f0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/indices/state/CloseIndexIT.java @@ -0,0 +1,344 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.indices.state; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.indices.IndexClosedException; +import org.elasticsearch.test.BackgroundIndexer; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.CountDownLatch; +import java.util.stream.IntStream; + +import static java.util.Collections.emptySet; +import static java.util.stream.Collectors.toList; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class CloseIndexIT extends ESIntegTestCase { + + public void testCloseMissingIndex() { + IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> client().admin().indices().prepareClose("test").get()); + assertThat(e.getMessage(), is("no such index [test]")); + } + + public void testCloseOneMissingIndex() { + createIndex("test1"); + final IndexNotFoundException e = expectThrows(IndexNotFoundException.class, + () -> client().admin().indices().prepareClose("test1", "test2").get()); + assertThat(e.getMessage(), is("no such index [test2]")); + } + + public void testCloseOneMissingIndexIgnoreMissing() { + createIndex("test1"); + assertAcked(client().admin().indices().prepareClose("test1", "test2").setIndicesOptions(IndicesOptions.lenientExpandOpen())); + assertIndexIsClosed("test1"); + } + + public void testCloseNoIndex() { + final ActionRequestValidationException e = expectThrows(ActionRequestValidationException.class, + () -> client().admin().indices().prepareClose().get()); + assertThat(e.getMessage(), containsString("index is missing")); + } + + public void testCloseNullIndex() { + final ActionRequestValidationException e = expectThrows(ActionRequestValidationException.class, + () -> client().admin().indices().prepareClose((String[])null).get()); + assertThat(e.getMessage(), containsString("index is missing")); + } + + public void testCloseIndex() throws Exception { + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex(indexName); + + final int nbDocs = randomIntBetween(0, 50); + indexRandom(randomBoolean(), false, randomBoolean(), IntStream.range(0, nbDocs) + .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)).collect(toList())); + + assertAcked(client().admin().indices().prepareClose(indexName)); + assertIndexIsClosed(indexName); + + assertAcked(client().admin().indices().prepareOpen(indexName)); + assertHitCount(client().prepareSearch(indexName).setSize(0).get(), nbDocs); + } + + public void testCloseAlreadyClosedIndex() throws Exception { + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex(indexName); + + if (randomBoolean()) { + indexRandom(randomBoolean(), false, randomBoolean(), IntStream.range(0, randomIntBetween(1, 10)) + .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)).collect(toList())); + } + // First close should be acked + assertAcked(client().admin().indices().prepareClose(indexName)); + assertIndexIsClosed(indexName); + + // Second close should be acked too + assertAcked(client().admin().indices().prepareClose(indexName)); + assertIndexIsClosed(indexName); + } + + public void testCloseUnassignedIndex() { + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + assertAcked(prepareCreate(indexName) + .setWaitForActiveShards(ActiveShardCount.NONE) + .setSettings(Settings.builder().put("index.routing.allocation.include._name", "nothing").build())); + + final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + assertThat(clusterState.metaData().indices().get(indexName).getState(), is(IndexMetaData.State.OPEN)); + assertThat(clusterState.routingTable().allShards().stream().allMatch(ShardRouting::unassigned), is(true)); + + assertAcked(client().admin().indices().prepareClose(indexName)); + assertIndexIsClosed(indexName); + } + + public void testConcurrentClose() throws InterruptedException { + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex(indexName); + + final int nbDocs = randomIntBetween(10, 50); + indexRandom(randomBoolean(), false, randomBoolean(), IntStream.range(0, nbDocs) + .mapToObj(i -> client().prepareIndex(indexName, "_doc", String.valueOf(i)).setSource("num", i)).collect(toList())); + ensureYellowAndNoInitializingShards(indexName); + + final CountDownLatch startClosing = new CountDownLatch(1); + final Thread[] threads = new Thread[randomIntBetween(2, 5)]; + + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + try { + startClosing.await(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + try { + client().admin().indices().prepareClose(indexName).get(); + } catch (final Exception e) { + assertException(e, indexName); + } + }); + threads[i].start(); + } + + startClosing.countDown(); + for (Thread thread : threads) { + thread.join(); + } + assertIndexIsClosed(indexName); + } + + public void testCloseWhileIndexingDocuments() throws Exception { + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex(indexName); + + int nbDocs = 0; + try (BackgroundIndexer indexer = new BackgroundIndexer(indexName, "_doc", client())) { + indexer.setAssertNoFailuresOnStop(false); + + waitForDocs(randomIntBetween(10, 50), indexer); + assertAcked(client().admin().indices().prepareClose(indexName)); + indexer.stop(); + nbDocs += indexer.totalIndexedDocs(); + + final Throwable[] failures = indexer.getFailures(); + if (failures != null) { + for (Throwable failure : failures) { + assertException(failure, indexName); + } + } + } + + assertIndexIsClosed(indexName); + assertAcked(client().admin().indices().prepareOpen(indexName)); + assertHitCount(client().prepareSearch(indexName).setSize(0).get(), nbDocs); + } + + public void testCloseWhileDeletingIndices() throws Exception { + final String[] indices = new String[randomIntBetween(3, 10)]; + for (int i = 0; i < indices.length; i++) { + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex(indexName); + if (randomBoolean()) { + indexRandom(randomBoolean(), false, randomBoolean(), IntStream.range(0, 10) + .mapToObj(n -> client().prepareIndex(indexName, "_doc", String.valueOf(n)).setSource("num", n)).collect(toList())); + } + indices[i] = indexName; + } + assertThat(client().admin().cluster().prepareState().get().getState().metaData().indices().size(), equalTo(indices.length)); + + final List threads = new ArrayList<>(); + final CountDownLatch latch = new CountDownLatch(1); + + for (final String indexToDelete : indices) { + threads.add(new Thread(() -> { + try { + latch.await(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + try { + assertAcked(client().admin().indices().prepareDelete(indexToDelete)); + } catch (final Exception e) { + assertException(e, indexToDelete); + } + })); + } + for (final String indexToClose : indices) { + threads.add(new Thread(() -> { + try { + latch.await(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + try { + client().admin().indices().prepareClose(indexToClose).get(); + } catch (final Exception e) { + assertException(e, indexToClose); + } + })); + } + + for (Thread thread : threads) { + thread.start(); + } + latch.countDown(); + for (Thread thread : threads) { + thread.join(); + } + } + + public void testConcurrentClosesAndOpens() throws Exception { + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex(indexName); + + final BackgroundIndexer indexer = new BackgroundIndexer(indexName, "_doc", client()); + waitForDocs(1, indexer); + + final CountDownLatch latch = new CountDownLatch(1); + final Runnable waitForLatch = () -> { + try { + latch.await(); + } catch (final InterruptedException e) { + throw new AssertionError(e); + } + }; + + final List threads = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(1, 3); i++) { + threads.add(new Thread(() -> { + try { + waitForLatch.run(); + client().admin().indices().prepareClose(indexName).get(); + } catch (final Exception e) { + throw new AssertionError(e); + } + })); + } + for (int i = 0; i < randomIntBetween(1, 3); i++) { + threads.add(new Thread(() -> { + try { + waitForLatch.run(); + assertAcked(client().admin().indices().prepareOpen(indexName).get()); + } catch (final Exception e) { + throw new AssertionError(e); + } + })); + } + + for (Thread thread : threads) { + thread.start(); + } + latch.countDown(); + for (Thread thread : threads) { + thread.join(); + } + + indexer.setAssertNoFailuresOnStop(false); + indexer.stop(); + + final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + if (clusterState.metaData().indices().get(indexName).getState() == IndexMetaData.State.CLOSE) { + assertIndexIsClosed(indexName); + assertAcked(client().admin().indices().prepareOpen(indexName)); + } + refresh(indexName); + assertIndexIsOpened(indexName); + assertHitCount(client().prepareSearch(indexName).setSize(0).get(), indexer.totalIndexedDocs()); + } + + static void assertIndexIsClosed(final String... indices) { + final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + for (String index : indices) { + assertThat(clusterState.metaData().indices().get(index).getState(), is(IndexMetaData.State.CLOSE)); + assertThat(clusterState.routingTable().index(index), nullValue()); + assertThat(clusterState.blocks().hasIndexBlock(index, MetaDataIndexStateService.INDEX_CLOSED_BLOCK), is(true)); + assertThat("Index " + index + " must have only 1 block with [id=" + MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID + "]", + clusterState.blocks().indices().getOrDefault(index, emptySet()).stream() + .filter(clusterBlock -> clusterBlock.id() == MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID).count(), equalTo(1L)); + } + } + + static void assertIndexIsOpened(final String... indices) { + final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + for (String index : indices) { + assertThat(clusterState.metaData().indices().get(index).getState(), is(IndexMetaData.State.OPEN)); + assertThat(clusterState.routingTable().index(index), notNullValue()); + assertThat(clusterState.blocks().hasIndexBlock(index, MetaDataIndexStateService.INDEX_CLOSED_BLOCK), is(false)); + } + } + + static void assertException(final Throwable throwable, final String indexName) { + final Throwable t = ExceptionsHelper.unwrapCause(throwable); + if (t instanceof ClusterBlockException) { + ClusterBlockException clusterBlockException = (ClusterBlockException) t; + assertThat(clusterBlockException.blocks(), hasSize(1)); + assertTrue(clusterBlockException.blocks().stream().allMatch(b -> b.id() == MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID)); + } else if (t instanceof IndexClosedException) { + IndexClosedException indexClosedException = (IndexClosedException) t; + assertThat(indexClosedException.getIndex(), notNullValue()); + assertThat(indexClosedException.getIndex().getName(), equalTo(indexName)); + } else if (t instanceof IndexNotFoundException) { + IndexNotFoundException indexNotFoundException = (IndexNotFoundException) t; + assertThat(indexNotFoundException.getIndex(), notNullValue()); + assertThat(indexNotFoundException.getIndex().getName(), equalTo(indexName)); + } else { + fail("Unexpected exception: " + t); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/test/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java new file mode 100644 index 0000000000000..165360c35972d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -0,0 +1,196 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.indices.state; + +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.BackgroundIndexer; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; + +import static org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING; +import static org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING; +import static org.elasticsearch.indices.state.CloseIndexIT.assertException; +import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsClosed; +import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsOpened; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.greaterThan; + +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2) +public class CloseWhileRelocatingShardsIT extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), 10) + .put(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), -1) + .build(); + } + + @Override + protected int numberOfReplicas() { + return 1; + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37274") + public void testCloseWhileRelocatingShards() throws Exception { + final String[] indices = new String[randomIntBetween(1, 3)]; + final Map docsPerIndex = new HashMap<>(); + + for (int i = 0; i < indices.length; i++) { + final String indexName = "index-" + i; + createIndex(indexName); + + int nbDocs = 0; + if (randomBoolean()) { + nbDocs = randomIntBetween(1, 20); + for (int j = 0; j < nbDocs; j++) { + IndexResponse indexResponse = client().prepareIndex(indexName, "_doc").setSource("num", j).get(); + assertEquals(RestStatus.CREATED, indexResponse.status()); + } + } + docsPerIndex.put(indexName, (long) nbDocs); + indices[i] = indexName; + } + + ensureGreen(indices); + assertAcked(client().admin().cluster().prepareUpdateSettings() + .setTransientSettings(Settings.builder() + .put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE.toString()))); + + // start some concurrent indexing threads + final Map indexers = new HashMap<>(); + for (final String index : indices) { + if (randomBoolean()) { + final BackgroundIndexer indexer = new BackgroundIndexer(index, "_doc", client(), -1, scaledRandomIntBetween(1, 3)); + waitForDocs(1, indexer); + indexers.put(index, indexer); + } + } + + final Set acknowledgedCloses = ConcurrentCollections.newConcurrentSet(); + final String newNode = internalCluster().startDataOnlyNode(); + try { + final CountDownLatch latch = new CountDownLatch(1); + final List threads = new ArrayList<>(); + + // start shards relocating threads + final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + for (final String indexToRelocate : indices) { + final IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(indexToRelocate); + for (int i = 0; i < getNumShards(indexToRelocate).numPrimaries; i++) { + final int shardId = i; + ShardRouting primary = indexRoutingTable.shard(shardId).primaryShard(); + assertTrue(primary.started()); + ShardRouting replica = indexRoutingTable.shard(shardId).replicaShards().iterator().next(); + assertTrue(replica.started()); + + final String currentNodeId = randomBoolean() ? primary.currentNodeId() : replica.currentNodeId(); + assertNotNull(currentNodeId); + + final Thread thread = new Thread(() -> { + try { + latch.await(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + assertAcked(client().admin().cluster().prepareReroute() + .add(new MoveAllocationCommand(indexToRelocate, shardId, currentNodeId, newNode))); + }); + threads.add(thread); + thread.start(); + } + } + + // start index closing threads + for (final String indexToClose : indices) { + final Thread thread = new Thread(() -> { + try { + latch.await(); + } catch (InterruptedException e) { + throw new AssertionError(e); + } + AcknowledgedResponse closeResponse = client().admin().indices().prepareClose(indexToClose).get(); + if (closeResponse.isAcknowledged()) { + assertTrue("Index closing should not be acknowledged twice", acknowledgedCloses.add(indexToClose)); + } + }); + threads.add(thread); + thread.start(); + } + + latch.countDown(); + for (Thread thread : threads) { + thread.join(); + } + for (Map.Entry entry : indexers.entrySet()) { + final BackgroundIndexer indexer = entry.getValue(); + indexer.setAssertNoFailuresOnStop(false); + indexer.stop(); + + final String indexName = entry.getKey(); + docsPerIndex.computeIfPresent(indexName, (key, value) -> value + indexer.totalIndexedDocs()); + + final Throwable[] failures = indexer.getFailures(); + if (failures != null) { + for (Throwable failure : failures) { + assertException(failure, indexName); + } + } + } + } finally { + assertAcked(client().admin().cluster().prepareUpdateSettings() + .setTransientSettings(Settings.builder().putNull(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey()))); + } + + for (String index : indices) { + if (acknowledgedCloses.contains(index)) { + assertIndexIsClosed(index); + } else { + assertIndexIsOpened(index); + } + } + + assertThat("Consider that the test failed if no indices were successfully closed", acknowledgedCloses.size(), greaterThan(0)); + assertAcked(client().admin().indices().prepareOpen("index-*")); + ensureGreen(indices); + + for (String index : acknowledgedCloses) { + long docsCount = client().prepareSearch(index).setSize(0).get().getHits().getTotalHits().value; + assertEquals("Expected " + docsPerIndex.get(index) + " docs in index " + index + " but got " + docsCount + + " (close acknowledged=" + acknowledgedCloses.contains(index) + ")", (long) docsPerIndex.get(index), docsCount); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java b/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java index 05b2ae1b9cffe..e9e9108f5e8f1 100644 --- a/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -46,6 +45,8 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE; +import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsClosed; +import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsOpened; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -53,7 +54,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; public class OpenCloseIndexIT extends ESIntegTestCase { public void testSimpleCloseOpen() { @@ -72,13 +72,6 @@ public void testSimpleCloseOpen() { assertIndexIsOpened("test1"); } - public void testSimpleCloseMissingIndex() { - Client client = client(); - Exception e = expectThrows(IndexNotFoundException.class, () -> - client.admin().indices().prepareClose("test1").execute().actionGet()); - assertThat(e.getMessage(), is("no such index [test1]")); - } - public void testSimpleOpenMissingIndex() { Client client = client(); Exception e = expectThrows(IndexNotFoundException.class, () -> @@ -86,27 +79,6 @@ public void testSimpleOpenMissingIndex() { assertThat(e.getMessage(), is("no such index [test1]")); } - public void testCloseOneMissingIndex() { - Client client = client(); - createIndex("test1"); - ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); - assertThat(healthResponse.isTimedOut(), equalTo(false)); - Exception e = expectThrows(IndexNotFoundException.class, () -> - client.admin().indices().prepareClose("test1", "test2").execute().actionGet()); - assertThat(e.getMessage(), is("no such index [test2]")); - } - - public void testCloseOneMissingIndexIgnoreMissing() { - Client client = client(); - createIndex("test1"); - ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); - assertThat(healthResponse.isTimedOut(), equalTo(false)); - AcknowledgedResponse closeIndexResponse = client.admin().indices().prepareClose("test1", "test2") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().actionGet(); - assertThat(closeIndexResponse.isAcknowledged(), equalTo(true)); - assertIndexIsClosed("test1"); - } - public void testOpenOneMissingIndex() { Client client = client(); createIndex("test1"); @@ -200,20 +172,6 @@ public void testCloseOpenAllWildcard() { assertIndexIsOpened("test1", "test2", "test3"); } - public void testCloseNoIndex() { - Client client = client(); - Exception e = expectThrows(ActionRequestValidationException.class, () -> - client.admin().indices().prepareClose().execute().actionGet()); - assertThat(e.getMessage(), containsString("index is missing")); - } - - public void testCloseNullIndex() { - Client client = client(); - Exception e = expectThrows(ActionRequestValidationException.class, () -> - client.admin().indices().prepareClose((String[])null).execute().actionGet()); - assertThat(e.getMessage(), containsString("index is missing")); - } - public void testOpenNoIndex() { Client client = client(); Exception e = expectThrows(ActionRequestValidationException.class, () -> @@ -241,23 +199,6 @@ public void testOpenAlreadyOpenedIndex() { assertIndexIsOpened("test1"); } - public void testCloseAlreadyClosedIndex() { - Client client = client(); - createIndex("test1"); - ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); - assertThat(healthResponse.isTimedOut(), equalTo(false)); - - //closing the index - AcknowledgedResponse closeIndexResponse = client.admin().indices().prepareClose("test1").execute().actionGet(); - assertThat(closeIndexResponse.isAcknowledged(), equalTo(true)); - assertIndexIsClosed("test1"); - - //no problem if we try to close an index that's already in close state - closeIndexResponse = client.admin().indices().prepareClose("test1").execute().actionGet(); - assertThat(closeIndexResponse.isAcknowledged(), equalTo(true)); - assertIndexIsClosed("test1"); - } - public void testSimpleCloseOpenAlias() { Client client = client(); createIndex("test1"); @@ -317,23 +258,6 @@ public void testOpenWaitingForActiveShardsFailed() throws Exception { ensureGreen("test"); } - private void assertIndexIsOpened(String... indices) { - checkIndexState(IndexMetaData.State.OPEN, indices); - } - - private void assertIndexIsClosed(String... indices) { - checkIndexState(IndexMetaData.State.CLOSE, indices); - } - - private void checkIndexState(IndexMetaData.State expectedState, String... indices) { - ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().execute().actionGet(); - for (String index : indices) { - IndexMetaData indexMetaData = clusterStateResponse.getState().metaData().indices().get(index); - assertThat(indexMetaData, notNullValue()); - assertThat(indexMetaData.getState(), equalTo(expectedState)); - } - } - public void testOpenCloseWithDocs() throws IOException, ExecutionException, InterruptedException { String mapping = Strings.toString(XContentFactory.jsonBuilder(). startObject(). diff --git a/server/src/test/java/org/elasticsearch/indices/state/ReopenWhileClosingIT.java b/server/src/test/java/org/elasticsearch/indices/state/ReopenWhileClosingIT.java new file mode 100644 index 0000000000000..083c5ab1f5510 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/indices/state/ReopenWhileClosingIT.java @@ -0,0 +1,172 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.indices.state; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.admin.indices.close.TransportVerifyShardBeforeCloseAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Glob; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.util.concurrent.RunOnce; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static java.util.Collections.emptySet; +import static java.util.Collections.singletonList; +import static org.elasticsearch.cluster.metadata.MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID; +import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsClosed; +import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsOpened; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, minNumDataNodes = 2) +public class ReopenWhileClosingIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return singletonList(MockTransportService.TestPlugin.class); + } + + @Override + protected int minimumNumberOfShards() { + return 2; + } + + public void testReopenDuringClose() throws Exception { + final String indexName = "test"; + createIndexWithDocs(indexName); + + ensureYellowAndNoInitializingShards(indexName); + + final CountDownLatch block = new CountDownLatch(1); + final Releasable releaseBlock = interceptVerifyShardBeforeCloseActions(indexName, block::countDown); + + ActionFuture closeIndexResponse = client().admin().indices().prepareClose(indexName).execute(); + assertTrue("Waiting for index to have a closing blocked", block.await(60, TimeUnit.SECONDS)); + assertIndexIsBlocked(indexName); + assertFalse(closeIndexResponse.isDone()); + + assertAcked(client().admin().indices().prepareOpen(indexName)); + + releaseBlock.close(); + assertFalse(closeIndexResponse.get().isAcknowledged()); + assertIndexIsOpened(indexName); + } + + public void testReopenDuringCloseOnMultipleIndices() throws Exception { + final List indices = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(2, 10); i++) { + indices.add("index-" + i); + createIndexWithDocs(indices.get(i)); + } + + ensureYellowAndNoInitializingShards(indices.toArray(Strings.EMPTY_ARRAY)); + + final CountDownLatch block = new CountDownLatch(1); + final Releasable releaseBlock = interceptVerifyShardBeforeCloseActions(randomFrom(indices), block::countDown); + + ActionFuture closeIndexResponse = client().admin().indices().prepareClose("index-*").execute(); + assertTrue("Waiting for index to have a closing blocked", block.await(60, TimeUnit.SECONDS)); + assertFalse(closeIndexResponse.isDone()); + indices.forEach(ReopenWhileClosingIT::assertIndexIsBlocked); + + final List reopenedIndices = randomSubsetOf(randomIntBetween(1, indices.size()), indices); + assertAcked(client().admin().indices().prepareOpen(reopenedIndices.toArray(Strings.EMPTY_ARRAY))); + + releaseBlock.close(); + assertFalse(closeIndexResponse.get().isAcknowledged()); + + indices.forEach(index -> { + if (reopenedIndices.contains(index)) { + assertIndexIsOpened(index); + } else { + assertIndexIsClosed(index); + } + }); + } + + private void createIndexWithDocs(final String indexName) { + createIndex(indexName); + final int nbDocs = scaledRandomIntBetween(1, 100); + for (int i = 0; i < nbDocs; i++) { + index(indexName, "_doc", String.valueOf(i), "num", i); + } + assertIndexIsOpened(indexName); + } + + /** + * Intercepts and blocks the {@link TransportVerifyShardBeforeCloseAction} executed for the given index pattern. + */ + private Releasable interceptVerifyShardBeforeCloseActions(final String indexPattern, final Runnable onIntercept) { + final MockTransportService mockTransportService = (MockTransportService) internalCluster() + .getInstance(TransportService.class, internalCluster().getMasterName()); + + final CountDownLatch release = new CountDownLatch(1); + for (DiscoveryNode node : internalCluster().clusterService().state().getNodes()) { + mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()), + (connection, requestId, action, request, options) -> { + if (action.startsWith(TransportVerifyShardBeforeCloseAction.NAME)) { + if (request instanceof TransportVerifyShardBeforeCloseAction.ShardRequest) { + final String index = ((TransportVerifyShardBeforeCloseAction.ShardRequest) request).shardId().getIndexName(); + if (Glob.globMatch(indexPattern, index)) { + logger.info("request {} intercepted for index {}", requestId, index); + onIntercept.run(); + try { + release.await(); + logger.info("request {} released for index {}", requestId, index); + } catch (final InterruptedException e) { + throw new AssertionError(e); + } + } + } + + } + connection.sendRequest(requestId, action, request, options); + }); + } + final RunOnce releaseOnce = new RunOnce(release::countDown); + return releaseOnce::run; + } + + private static void assertIndexIsBlocked(final String... indices) { + final ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + for (String index : indices) { + assertThat(clusterState.metaData().indices().get(index).getState(), is(IndexMetaData.State.OPEN)); + assertThat(clusterState.routingTable().index(index), notNullValue()); + assertThat("Index " + index + " must have only 1 block with [id=" + INDEX_CLOSED_BLOCK_ID + "]", + clusterState.blocks().indices().getOrDefault(index, emptySet()).stream() + .filter(clusterBlock -> clusterBlock.id() == INDEX_CLOSED_BLOCK_ID).count(), equalTo(1L)); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/server/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index 59d04c767d809..1cc2d3e68e2ae 100644 --- a/server/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/server/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.support.ActiveShardCount; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -35,6 +34,7 @@ import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.test.ESIntegTestCase; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -61,8 +61,7 @@ public void testSimpleOpenClose() { client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); logger.info("--> closing test index..."); - AcknowledgedResponse closeIndexResponse = client().admin().indices().prepareClose("test").get(); - assertThat(closeIndexResponse.isAcknowledged(), equalTo(true)); + assertAcked(client().admin().indices().prepareClose("test")); stateResponse = client().admin().cluster().prepareState().get(); assertThat(stateResponse.getState().metaData().index("test").getState(), equalTo(IndexMetaData.State.CLOSE)); @@ -103,7 +102,7 @@ public void testFastCloseAfterCreateContinuesCreateAfterOpen() { assertThat(health.isTimedOut(), equalTo(false)); assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED)); - client().admin().indices().prepareClose("test").get(); + assertAcked(client().admin().indices().prepareClose("test")); logger.info("--> updating test index settings to allow allocation"); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java index 9111658e49ca8..20f67fd10a36d 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.TemplateScript; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -36,7 +37,12 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class ConfigurationUtilsTests extends ESTestCase { @@ -137,9 +143,9 @@ public void testReadProcessors() throws Exception { unknownTaggedConfig = new HashMap<>(); unknownTaggedConfig.put("tag", "my_unknown"); config2.add(Collections.singletonMap("unknown_processor", unknownTaggedConfig)); - Map secondUnknonwTaggedConfig = new HashMap<>(); - secondUnknonwTaggedConfig.put("tag", "my_second_unknown"); - config2.add(Collections.singletonMap("second_unknown_processor", secondUnknonwTaggedConfig)); + Map secondUnknownTaggedConfig = new HashMap<>(); + secondUnknownTaggedConfig.put("tag", "my_second_unknown"); + config2.add(Collections.singletonMap("second_unknown_processor", secondUnknownTaggedConfig)); e = expectThrows( ElasticsearchParseException.class, () -> ConfigurationUtils.readProcessorConfigs(config2, scriptService, registry) @@ -181,4 +187,27 @@ public void testReadProcessorFromObjectOrMap() throws Exception { assertThat(ex.getMessage(), equalTo("property isn't a map, but of type [" + invalidConfig.getClass().getName() + "]")); } + public void testNoScriptCompilation() { + ScriptService scriptService = mock(ScriptService.class); + when(scriptService.isLangSupported(anyString())).thenReturn(true); + String propertyValue = randomAlphaOfLength(10); + TemplateScript.Factory result; + result = ConfigurationUtils.compileTemplate(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), + propertyValue, scriptService); + assertThat(result.newInstance(null).execute(), equalTo(propertyValue)); + verify(scriptService, times(0)).compile(any(), any()); + } + + public void testScriptShouldCompile() { + ScriptService scriptService = mock(ScriptService.class); + when(scriptService.isLangSupported(anyString())).thenReturn(true); + String propertyValue = "{{" + randomAlphaOfLength(10) + "}}"; + String compiledValue = randomAlphaOfLength(10); + when(scriptService.compile(any(), any())).thenReturn(new TestTemplateService.MockTemplateScript.Factory(compiledValue)); + TemplateScript.Factory result; + result = ConfigurationUtils.compileTemplate(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), + propertyValue, scriptService); + assertThat(result.newInstance(null).execute(), equalTo(compiledValue)); + verify(scriptService, times(1)).compile(any(), any()); + } } diff --git a/server/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java b/server/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java index 72238d3b59656..37b9956119334 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -30,6 +31,12 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class ValueSourceTests extends ESTestCase { @@ -69,4 +76,24 @@ public void testCopyDoesNotChangeProvidedList() { assertThat(myPreciousList.size(), equalTo(1)); assertThat(myPreciousList.get(0), equalTo("value")); } + + public void testNoScriptCompilation() { + ScriptService scriptService = mock(ScriptService.class); + when(scriptService.isLangSupported(anyString())).thenReturn(true); + String propertyValue = randomAlphaOfLength(10); + ValueSource result = ValueSource.wrap(propertyValue, scriptService); + assertThat(result.copyAndResolve(null), equalTo(propertyValue)); + verify(scriptService, times(0)).compile(any(), any()); + } + + public void testScriptShouldCompile() { + ScriptService scriptService = mock(ScriptService.class); + when(scriptService.isLangSupported(anyString())).thenReturn(true); + String propertyValue = "{{" + randomAlphaOfLength(10) + "}}"; + String compiledValue = randomAlphaOfLength(10); + when(scriptService.compile(any(), any())).thenReturn(new TestTemplateService.MockTemplateScript.Factory(compiledValue)); + ValueSource result = ValueSource.wrap(propertyValue, scriptService); + assertThat(result.copyAndResolve(Collections.emptyMap()), equalTo(compiledValue)); + verify(scriptService, times(1)).compile(any(), any()); + } } diff --git a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 62208a404885b..04fb8c08a9778 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -23,9 +23,12 @@ import com.carrotsearch.hppc.procedures.IntProcedure; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.util.English; +import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -506,6 +509,97 @@ public void testIndexAndRelocateConcurrently() throws ExecutionException, Interr } + public void testRelocateWhileWaitingForRefresh() { + logger.info("--> starting [node1] ..."); + final String node1 = internalCluster().startNode(); + + logger.info("--> creating test index ..."); + prepareCreate("test", Settings.builder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("index.refresh_interval", -1) // we want to control refreshes + ).get(); + + logger.info("--> index 10 docs"); + for (int i = 0; i < 10; i++) { + client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + } + logger.info("--> flush so we have an actual index"); + client().admin().indices().prepareFlush().execute().actionGet(); + logger.info("--> index more docs so we have something in the translog"); + for (int i = 10; i < 20; i++) { + client().prepareIndex("test", "type", Integer.toString(i)).setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource("field", "value" + i).execute(); + } + + logger.info("--> start another node"); + final String node2 = internalCluster().startNode(); + ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNodes("2").execute().actionGet(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + + logger.info("--> relocate the shard from node1 to node2"); + client().admin().cluster().prepareReroute() + .add(new MoveAllocationCommand("test", 0, node1, node2)) + .execute().actionGet(); + + clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + + logger.info("--> verifying count"); + client().admin().indices().prepareRefresh().execute().actionGet(); + assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); + } + + public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { + logger.info("--> starting [node1] ..."); + final String node1 = internalCluster().startNode(); + + logger.info("--> creating test index ..."); + prepareCreate("test", Settings.builder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("index.refresh_interval", -1) // we want to control refreshes + ).get(); + + logger.info("--> index 10 docs"); + for (int i = 0; i < 10; i++) { + client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); + } + logger.info("--> flush so we have an actual index"); + client().admin().indices().prepareFlush().execute().actionGet(); + logger.info("--> index more docs so we have something in the translog"); + for (int i = 10; i < 20; i++) { + client().prepareIndex("test", "type", Integer.toString(i)).setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource("field", "value" + i).execute(); + } + + logger.info("--> start another node"); + final String node2 = internalCluster().startNode(); + ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNodes("2").execute().actionGet(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + + logger.info("--> relocate the shard from node1 to node2"); + ActionFuture relocationListener = client().admin().cluster().prepareReroute() + .add(new MoveAllocationCommand("test", 0, node1, node2)) + .execute(); + logger.info("--> index 100 docs while relocating"); + for (int i = 20; i < 120; i++) { + client().prepareIndex("test", "type", Integer.toString(i)).setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource("field", "value" + i).execute(); + } + relocationListener.actionGet(); + clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + + logger.info("--> verifying count"); + client().admin().indices().prepareRefresh().execute().actionGet(); + assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(120L)); + } + class RecoveryCorruption implements StubbableTransport.SendRequestBehavior { private final CountDownLatch corruptionCount; diff --git a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index a80c3b1bd4238..29a7944f58792 100644 --- a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -153,10 +153,14 @@ public void testConvert() throws IOException { new SearchShardTarget("node_1", new Index("foo", "_na_"), 1, null)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", new Index("foo", "_na_"), 2, null)); - SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1}); + SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", + new ShardSearchFailure[] {failure, failure1}); BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex)); String text = response.content().utf8ToString(); - String expected = "{\"error\":{\"root_cause\":[{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}}]},\"status\":400}"; + String expected = "{\"error\":{\"root_cause\":[{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}]," + + "\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true," + + "\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\"," + + "\"reason\":\"foobar\",\"line\":1,\"col\":2}}]},\"status\":400}"; assertEquals(expected.trim(), text.trim()); String stackTrace = ExceptionsHelper.stackTrace(ex); assertTrue(stackTrace.contains("Caused by: ParsingException[foobar]")); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java new file mode 100644 index 0000000000000..dcf4237ae07bf --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.rest.FakeRestChannel; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; +import org.junit.Before; + +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; + +public class RestGetFieldMappingActionTests extends RestActionTestCase { + + @Before + public void setUpAction() { + new RestGetFieldMappingAction(Settings.EMPTY, controller()); + } + + public void testTypeInPath() { + // Test that specifying a type while setting include_type_name to false + // results in an illegal argument exception. + Map params = new HashMap<>(); + params.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.GET) + .withPath("some_index/some_type/_mapping/field/some_field") + .withParams(params) + .build(); + + FakeRestChannel channel = new FakeRestChannel(request, false, 1); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + controller().dispatchRequest(request, channel, threadContext); + + assertEquals(1, channel.errors().get()); + assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); + } +} diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java new file mode 100644 index 0000000000000..5f157cd298d4d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesActionTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; +import static org.mockito.Mockito.mock; + +public class RestGetIndicesActionTests extends ESTestCase { + + /** + * Test that setting the "include_type_name" parameter raises a warning + */ + public void testIncludeTypeNamesWarning() throws IOException { + Map params = new HashMap<>(); + params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.GET) + .withPath("/some_index") + .withParams(params) + .build(); + + RestGetIndicesAction handler = new RestGetIndicesAction(Settings.EMPTY, mock(RestController.class)); + handler.prepareRequest(request, mock(NodeClient.class)); + assertWarnings(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE); + + // the same request without the parameter should pass without warning + request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.GET) + .withPath("/some_index") + .build(); + handler.prepareRequest(request, mock(NodeClient.class)); + } +} diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java new file mode 100644 index 0000000000000..ac0eb8f0d81a6 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateActionTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.Map; + +public class RestPutIndexTemplateActionTests extends RestActionTestCase { + private RestPutIndexTemplateAction action; + + @Before + public void setUpAction() { + action = new RestPutIndexTemplateAction(Settings.EMPTY, controller()); + } + + public void testPrepareTypelessRequest() throws IOException { + XContentBuilder content = XContentFactory.jsonBuilder().startObject() + .startObject("mappings") + .startObject("properties") + .startObject("field").field("type", "keyword").endObject() + .endObject() + .endObject() + .startObject("aliases") + .startObject("read_alias").endObject() + .endObject() + .endObject(); + + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.PUT) + .withPath("/_template/_some_template") + .withContent(BytesReference.bytes(content), XContentType.JSON) + .build(); + boolean includeTypeName = false; + Map source = action.prepareRequestSource(request, includeTypeName); + + XContentBuilder expectedContent = XContentFactory.jsonBuilder().startObject() + .startObject("mappings") + .startObject("_doc") + .startObject("properties") + .startObject("field").field("type", "keyword").endObject() + .endObject() + .endObject() + .endObject() + .startObject("aliases") + .startObject("read_alias").endObject() + .endObject() + .endObject(); + Map expectedContentAsMap = XContentHelper.convertToMap( + BytesReference.bytes(expectedContent), true, expectedContent.contentType()).v2(); + + assertEquals(expectedContentAsMap, source); + } +} diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java index 5011946914a50..b19c8c6412abc 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java @@ -51,7 +51,7 @@ public void testBulkPipelineUpsert() throws Exception { new RestBulkAction(settings(Version.CURRENT).build(), mock(RestController.class)) .handleRequest( new FakeRestRequest.Builder( - xContentRegistry()).withPath("my_index/my_type/_bulk").withParams(params) + xContentRegistry()).withPath("my_index/_bulk").withParams(params) .withContent( new BytesArray( "{\"index\":{\"_id\":\"1\"}}\n" + diff --git a/server/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java index eceef54ccac8a..1919b7e6a9ae4 100644 --- a/server/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -71,7 +71,12 @@ public void testAliasCrudRouting() throws Exception { .execute().actionGet(); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("alias0", "type1", "1").execute().actionGet().isExists(), equalTo(true)); - assertThat(client().prepareGet("alias0", "type1", "1").execute().actionGet().getSourceAsMap().get("field").toString(), equalTo("value2")); + assertThat(client().prepareGet("alias0", "type1", "1") + .execute() + .actionGet() + .getSourceAsMap() + .get("field") + .toString(), equalTo("value2")); } @@ -108,10 +113,10 @@ public void testAliasSearchRouting() throws Exception { createIndex("test"); ensureGreen(); assertAcked(admin().indices().prepareAliases() - .addAliasAction(AliasActions.add().index("test").alias("alias")) - .addAliasAction(AliasActions.add().index("test").alias("alias0").routing("0")) - .addAliasAction(AliasActions.add().index("test").alias("alias1").routing("1")) - .addAliasAction(AliasActions.add().index("test").alias("alias01").searchRouting("0,1"))); + .addAliasAction(AliasActions.add().index("test").alias("alias")) + .addAliasAction(AliasActions.add().index("test").alias("alias0").routing("0")) + .addAliasAction(AliasActions.add().index("test").alias("alias1").routing("1")) + .addAliasAction(AliasActions.add().index("test").alias("alias01").searchRouting("0,1"))); logger.info("--> indexing with id [1], and routing [0] using alias"); client().prepareIndex("alias0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); @@ -126,23 +131,80 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); } logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); - assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); - assertThat(client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); - assertThat(client().prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); + assertThat(client().prepareSearch() + .setRouting("1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(0L)); + + assertThat(client().prepareSearch() + .setSize(0) + .setRouting("1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(0L)); + + assertThat(client().prepareSearch("alias1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(0L)); + + assertThat(client().prepareSearch("alias1") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(0L)); } logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + + assertThat(client().prepareSearch() + .setRouting("0") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting("0") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch("alias0") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch("alias0") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); } logger.info("--> indexing with id [2], and routing [1] using alias"); @@ -150,50 +212,166 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch() + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } logger.info("--> search with 0 routing, should find one"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setRouting("0") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting("0") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch("alias0") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch("alias0") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); } logger.info("--> search with 1 routing, should find one"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setRouting("1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting("1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch("alias1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch("alias1") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); } logger.info("--> search with 0,1 indexRoutings , should find two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch() + .setRouting("0", "1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting("0", "1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias01") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias01") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } logger.info("--> search with two routing aliases , should find two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias0", "alias1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias0", "alias1") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } logger.info("--> search with alias0, alias1 and alias01, should find two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch("alias0", "alias1", "alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias0", "alias1", "alias01") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias0", "alias1", "alias01") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } logger.info("--> search with test, alias0 and alias1, should find two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch("test", "alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("test", "alias0", "alias1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("test", "alias0", "alias1") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } } @@ -210,12 +388,12 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { createIndex("test-b"); ensureGreen(); assertAcked(admin().indices().prepareAliases() - .addAliasAction(AliasActions.add().index("test-a").alias("alias-a0").routing("0")) - .addAliasAction(AliasActions.add().index("test-a").alias("alias-a1").routing("1")) - .addAliasAction(AliasActions.add().index("test-b").alias("alias-b0").routing("0")) - .addAliasAction(AliasActions.add().index("test-b").alias("alias-b1").routing("1")) - .addAliasAction(AliasActions.add().index("test-a").alias("alias-ab").searchRouting("0")) - .addAliasAction(AliasActions.add().index("test-b").alias("alias-ab").searchRouting("1"))); + .addAliasAction(AliasActions.add().index("test-a").alias("alias-a0").routing("0")) + .addAliasAction(AliasActions.add().index("test-a").alias("alias-a1").routing("1")) + .addAliasAction(AliasActions.add().index("test-b").alias("alias-b0").routing("0")) + .addAliasAction(AliasActions.add().index("test-b").alias("alias-b1").routing("1")) + .addAliasAction(AliasActions.add().index("test-a").alias("alias-ab").searchRouting("0")) + .addAliasAction(AliasActions.add().index("test-b").alias("alias-ab").searchRouting("1"))); ensureGreen(); // wait for events again to make sure we got the aliases on all nodes logger.info("--> indexing with id [1], and routing [0] using alias to test-a"); client().prepareIndex("alias-a0", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); @@ -242,20 +420,53 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { logger.info("--> search with alias-a1,alias-b0, should not find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); - assertThat(client().prepareSearch("alias-a1", "alias-b0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); + assertThat(client().prepareSearch("alias-a1", "alias-b0") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(0L)); + assertThat(client().prepareSearch("alias-a1", "alias-b0") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(0L)); } logger.info("--> search with alias-ab, should find two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias-ab") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias-ab") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } logger.info("--> search with alias-a0,alias-b1 should find two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch("alias-a0", "alias-b1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias-a0", "alias-b1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias-a0", "alias-b1") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } } @@ -269,7 +480,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue2682() thro createIndex("index", "index_2"); ensureGreen(); assertAcked(admin().indices().prepareAliases() - .addAliasAction(AliasActions.add().index("index").alias("index_1").routing("1"))); + .addAliasAction(AliasActions.add().index("index").alias("index_1").routing("1"))); logger.info("--> indexing on index_1 which is an alias for index with routing [1]"); client().prepareIndex("index_1", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); @@ -279,7 +490,12 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue2682() thro logger.info("--> search all on index_* should find two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("index_*") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } } @@ -294,14 +510,19 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue3268() thro createIndex("index", "index_2"); ensureGreen(); assertAcked(admin().indices().prepareAliases() - .addAliasAction(AliasActions.add().index("index").alias("index_1").routing("1"))); + .addAliasAction(AliasActions.add().index("index").alias("index_1").routing("1"))); logger.info("--> indexing on index_1 which is an alias for index with routing [1]"); client().prepareIndex("index_1", "type1", "1").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> indexing on index_2 which is a concrete index"); client().prepareIndex("index_2", "type2", "2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch("index_*") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(1) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet(); logger.info("--> search all on index_* should find two"); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); @@ -315,7 +536,7 @@ public void testIndexingAliasesOverTime() throws Exception { ensureGreen(); logger.info("--> creating alias with routing [3]"); assertAcked(admin().indices().prepareAliases() - .addAliasAction(AliasActions.add().index("test").alias("alias").routing("3"))); + .addAliasAction(AliasActions.add().index("test").alias("alias").routing("3"))); logger.info("--> indexing with id [0], and routing [3]"); client().prepareIndex("alias", "type1", "0").setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); @@ -324,23 +545,45 @@ public void testIndexingAliasesOverTime() throws Exception { logger.info("--> verifying get and search with routing, should find"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "type1", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true)); - assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch("alias") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch("alias") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(1L)); } logger.info("--> creating alias with routing [4]"); assertAcked(admin().indices().prepareAliases() - .addAliasAction(AliasActions.add().index("test").alias("alias").routing("4"))); + .addAliasAction(AliasActions.add().index("test").alias("alias").routing("4"))); logger.info("--> verifying search with wrong routing should not find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); - assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); + assertThat(client().prepareSearch("alias") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(0L)); + assertThat(client().prepareSearch("alias") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(0L)); } logger.info("--> creating alias with search routing [3,4] and index routing 4"); assertAcked(client().admin().indices().prepareAliases() - .addAliasAction(AliasActions.add().index("test").alias("alias").searchRouting("3,4").indexRouting("4"))); + .addAliasAction(AliasActions.add().index("test").alias("alias").searchRouting("3,4").indexRouting("4"))); logger.info("--> indexing with id [1], and routing [4]"); client().prepareIndex("alias", "type1", "1").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); @@ -350,8 +593,19 @@ public void testIndexingAliasesOverTime() throws Exception { for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "type1", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true)); assertThat(client().prepareGet("test", "type1", "1").setRouting("4").execute().actionGet().isExists(), equalTo(true)); - assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch("alias") + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits().value, equalTo(2L)); } } diff --git a/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java b/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java index ad8024e76b4b2..b383a80296247 100644 --- a/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -78,7 +78,10 @@ public void testSimpleCrudRouting() throws Exception { ensureGreen(); String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex("test", "type1", "1").setRouting(routingValue).setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) + client().prepareIndex("test", "type1", "1") + .setRouting(routingValue) + .setSource("field", "value1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { @@ -86,25 +89,40 @@ public void testSimpleCrudRouting() throws Exception { } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(true)); } logger.info("--> deleting with no routing, should not delete anything"); client().prepareDelete("test", "type1", "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(true)); } logger.info("--> deleting with routing, should delete"); client().prepareDelete("test", "type1", "1").setRouting(routingValue).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(false)); - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(false)); + assertThat(client().prepareGet("test", "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(false)); } logger.info("--> indexing with id [1], and routing [0]"); - client().prepareIndex("test", "type1", "1").setRouting(routingValue).setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) + client().prepareIndex("test", "type1", "1") + .setRouting(routingValue) + .setSource("field", "value1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { @@ -112,7 +130,11 @@ public void testSimpleCrudRouting() throws Exception { } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(true)); } } @@ -122,7 +144,10 @@ public void testSimpleSearchRouting() { String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); - client().prepareIndex("test", "type1", "1").setRouting(routingValue).setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) + client().prepareIndex("test", "type1", "1") + .setRouting(routingValue) + .setSource("field", "value1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); logger.info("--> verifying get with no routing, should not find anything"); for (int i = 0; i < 5; i++) { @@ -130,65 +155,193 @@ public void testSimpleSearchRouting() { } logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet("test", "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet("test", "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(true)); } logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(1L)); } logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); - assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L)); + assertThat(client().prepareSearch() + .setRouting("1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(0L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting("1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(0L)); } logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setRouting(routingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(1L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting(routingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(1L)); } String secondRoutingValue = "1"; logger.info("--> indexing with id [{}], and routing [{}]", routingValue, secondRoutingValue); - client().prepareIndex("test", "type1", routingValue).setRouting(secondRoutingValue).setSource("field", "value1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + client().prepareIndex("test", "type1", routingValue) + .setRouting(secondRoutingValue) + .setSource("field", "value1") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + .get(); logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(2L)); + assertThat(client().prepareSearch() + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(2L)); } logger.info("--> search with {} routing, should find one", routingValue); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setRouting(routingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(1L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting(routingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(1L)); } logger.info("--> search with {} routing, should find one", secondRoutingValue); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); - assertThat(client().prepareSearch().setSize(0).setRouting(secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L)); + assertThat(client().prepareSearch() + .setRouting("1") + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(1L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting(secondRoutingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(1L)); } logger.info("--> search with {},{} indexRoutings , should find two", routingValue, "1"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch().setSize(0).setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch() + .setRouting(routingValue, secondRoutingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(2L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting(routingValue, secondRoutingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(2L)); } logger.info("--> search with {},{},{} indexRoutings , should find two", routingValue, secondRoutingValue, routingValue); for (int i = 0; i < 5; i++) { - assertThat(client().prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); - assertThat(client().prepareSearch().setSize(0).setRouting(routingValue, secondRoutingValue,routingValue).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L)); + assertThat(client().prepareSearch() + .setRouting(routingValue, secondRoutingValue, routingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(2L)); + assertThat(client().prepareSearch() + .setSize(0) + .setRouting(routingValue, secondRoutingValue, routingValue) + .setQuery(QueryBuilders.matchAllQuery()) + .execute() + .actionGet() + .getHits() + .getTotalHits() + .value, equalTo(2L)); } } public void testRequiredRoutingCrudApis() throws Exception { - client().admin().indices().prepareCreate("test").addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject()) - .execute().actionGet(); + client().admin() + .indices() + .prepareCreate("test") + .addAlias(new Alias("alias")) + .addMapping("type1", XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_routing") + .field("required", true) + .endObject() + .endObject() + .endObject()) + .execute() + .actionGet(); ensureGreen(); String routingValue = findNonMatchingRoutingValue("test", "1"); @@ -207,7 +360,12 @@ public void testRequiredRoutingCrudApis() throws Exception { logger.info("--> verifying get with routing, should find"); for (int i = 0; i < 5; i++) { - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client() + .prepareGet(indexOrAlias(), "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(true)); } logger.info("--> deleting with no routing, should fail"); @@ -226,17 +384,28 @@ public void testRequiredRoutingCrudApis() throws Exception { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); } - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client() + .prepareGet(indexOrAlias(), "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(true)); } try { - client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").execute().actionGet(); + client().prepareUpdate(indexOrAlias(), "type1", "1") + .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") + .execute() + .actionGet(); fail("update with missing routing when routing is required should fail"); - } catch(ElasticsearchException e) { + } catch (ElasticsearchException e) { assertThat(e.unwrapCause(), instanceOf(RoutingMissingException.class)); } - client().prepareUpdate(indexOrAlias(), "type1", "1").setRouting(routingValue).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2").get(); + client().prepareUpdate(indexOrAlias(), "type1", "1") + .setRouting(routingValue) + .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "value2") + .get(); client().admin().indices().prepareRefresh().execute().actionGet(); for (int i = 0; i < 5; i++) { @@ -262,21 +431,32 @@ public void testRequiredRoutingCrudApis() throws Exception { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); } - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(false)); + assertThat(client() + .prepareGet(indexOrAlias(), "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(false)); } } public void testRequiredRoutingBulk() throws Exception { client().admin().indices().prepareCreate("test") - .addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_routing").field("required", true).endObject() - .endObject().endObject()) - .execute().actionGet(); + .addAlias(new Alias("alias")) + .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("_routing").field("required", true).endObject() + .endObject().endObject()) + .execute().actionGet(); ensureGreen(); { - BulkResponse bulkResponse = client().prepareBulk().add(Requests.indexRequest(indexOrAlias()).type("type1").id("1") - .source(Requests.INDEX_CONTENT_TYPE, "field", "value")).execute().actionGet(); + BulkResponse bulkResponse = client() + .prepareBulk() + .add(Requests.indexRequest(indexOrAlias()) + .type("type1") + .id("1") + .source(Requests.INDEX_CONTENT_TYPE, "field", "value")) + .execute() + .actionGet(); assertThat(bulkResponse.getItems().length, equalTo(1)); assertThat(bulkResponse.hasFailures(), equalTo(true)); @@ -290,15 +470,22 @@ public void testRequiredRoutingBulk() throws Exception { } { - BulkResponse bulkResponse = client().prepareBulk().add(Requests.indexRequest(indexOrAlias()).type("type1").id("1").routing("0") - .source(Requests.INDEX_CONTENT_TYPE, "field", "value")).execute().actionGet(); + BulkResponse bulkResponse = client() + .prepareBulk() + .add(Requests.indexRequest(indexOrAlias()) + .type("type1") + .id("1") + .routing("0") + .source(Requests.INDEX_CONTENT_TYPE, "field", "value")) + .execute() + .actionGet(); assertThat(bulkResponse.hasFailures(), equalTo(false)); } { BulkResponse bulkResponse = client().prepareBulk().add(new UpdateRequest(indexOrAlias(), "type1", "1") .doc(Requests.INDEX_CONTENT_TYPE, "field", "value2")) - .execute().actionGet(); + .execute().actionGet(); assertThat(bulkResponse.getItems().length, equalTo(1)); assertThat(bulkResponse.hasFailures(), equalTo(true)); @@ -320,7 +507,7 @@ public void testRequiredRoutingBulk() throws Exception { { BulkResponse bulkResponse = client().prepareBulk().add(Requests.deleteRequest(indexOrAlias()).type("type1").id("1")) - .execute().actionGet(); + .execute().actionGet(); assertThat(bulkResponse.getItems().length, equalTo(1)); assertThat(bulkResponse.hasFailures(), equalTo(true)); @@ -335,7 +522,7 @@ public void testRequiredRoutingBulk() throws Exception { { BulkResponse bulkResponse = client().prepareBulk().add(Requests.deleteRequest(indexOrAlias()).type("type1").id("1") - .routing("0")).execute().actionGet(); + .routing("0")).execute().actionGet(); assertThat(bulkResponse.getItems().length, equalTo(1)); assertThat(bulkResponse.hasFailures(), equalTo(false)); } @@ -343,10 +530,20 @@ public void testRequiredRoutingBulk() throws Exception { public void testRequiredRoutingMappingVariousAPIs() throws Exception { - client().admin().indices().prepareCreate("test").addAlias(new Alias("alias")) - .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("_routing").field("required", true).endObject().endObject().endObject()) - .execute().actionGet(); + client().admin() + .indices() + .prepareCreate("test") + .addAlias(new Alias("alias")) + .addMapping("type1", XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("_routing") + .field("required", true) + .endObject() + .endObject() + .endObject()) + .execute() + .actionGet(); ensureGreen(); String routingValue = findNonMatchingRoutingValue("test", "1"); logger.info("--> indexing with id [1], and routing [{}]", routingValue); @@ -356,7 +553,11 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); logger.info("--> verifying get with id [1] with routing [0], should succeed"); - assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting(routingValue).execute().actionGet().isExists(), equalTo(true)); + assertThat(client().prepareGet(indexOrAlias(), "type1", "1") + .setRouting(routingValue) + .execute() + .actionGet() + .isExists(), equalTo(true)); logger.info("--> verifying get with id [1], with no routing, should fail"); try { @@ -368,8 +569,8 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { logger.info("--> verifying explain with id [2], with routing [0], should succeed"); ExplainResponse explainResponse = client().prepareExplain(indexOrAlias(), "type1", "2") - .setQuery(QueryBuilders.matchAllQuery()) - .setRouting(routingValue).get(); + .setQuery(QueryBuilders.matchAllQuery()) + .setRouting(routingValue).get(); assertThat(explainResponse.isExists(), equalTo(true)); assertThat(explainResponse.isMatch(), equalTo(true)); @@ -383,7 +584,9 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { } logger.info("--> verifying term vector with id [1], with routing [0], should succeed"); - TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "type1", "1").setRouting(routingValue).get(); + TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "type1", "1") + .setRouting(routingValue) + .get(); assertThat(termVectorsResponse.isExists(), equalTo(true)); assertThat(termVectorsResponse.getId(), equalTo("1")); @@ -395,7 +598,7 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { } UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setRouting(routingValue) - .setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1").get(); + .setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value1").get(); assertThat(updateResponse.getId(), equalTo("1")); assertThat(updateResponse.getVersion(), equalTo(2L)); @@ -408,8 +611,8 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { logger.info("--> verifying mget with ids [1,2], with routing [0], should succeed"); MultiGetResponse multiGetResponse = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").routing("0")) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").routing("0")).get(); + .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").routing("0")) + .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").routing("0")).get(); assertThat(multiGetResponse.getResponses().length, equalTo(2)); assertThat(multiGetResponse.getResponses()[0].isFailed(), equalTo(false)); assertThat(multiGetResponse.getResponses()[0].getResponse().getId(), equalTo("1")); @@ -418,8 +621,8 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { logger.info("--> verifying mget with ids [1,2], with no routing, should fail"); multiGetResponse = client().prepareMultiGet() - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1")) - .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2")).get(); + .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1")) + .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2")).get(); assertThat(multiGetResponse.getResponses().length, equalTo(2)); assertThat(multiGetResponse.getResponses()[0].isFailed(), equalTo(true)); assertThat(multiGetResponse.getResponses()[0].getFailure().getId(), equalTo("1")); @@ -429,8 +632,11 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { assertThat(multiGetResponse.getResponses()[1].getFailure().getMessage(), equalTo("routing is required for [test]/[type1]/[2]")); MultiTermVectorsResponse multiTermVectorsResponse = client().prepareMultiTermVectors() - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1").routing(routingValue)) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2").routing(routingValue)).get(); + .add(new TermVectorsRequest(indexOrAlias(), "type1", "1") + .routing(routingValue)) + .add(new TermVectorsRequest(indexOrAlias(), "type1", "2") + .routing(routingValue)) + .get(); assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2)); assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1")); assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(false)); @@ -442,17 +648,21 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { assertThat(multiTermVectorsResponse.getResponses()[1].getResponse().isExists(), equalTo(true)); multiTermVectorsResponse = client().prepareMultiTermVectors() - .add(new TermVectorsRequest(indexOrAlias(), "type1", "1")) - .add(new TermVectorsRequest(indexOrAlias(), "type1", "2")).get(); + .add(new TermVectorsRequest(indexOrAlias(), "type1", "1")) + .add(new TermVectorsRequest(indexOrAlias(), "type1", "2")).get(); assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2)); assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1")); assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(true)); - assertThat(multiTermVectorsResponse.getResponses()[0].getFailure().getCause().getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(multiTermVectorsResponse.getResponses()[0].getFailure() + .getCause() + .getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); assertThat(multiTermVectorsResponse.getResponses()[0].getResponse(), nullValue()); assertThat(multiTermVectorsResponse.getResponses()[1].getId(), equalTo("2")); assertThat(multiTermVectorsResponse.getResponses()[1].isFailed(), equalTo(true)); - assertThat(multiTermVectorsResponse.getResponses()[1].getResponse(),nullValue()); - assertThat(multiTermVectorsResponse.getResponses()[1].getFailure().getCause().getMessage(), equalTo("routing is required for [test]/[type1]/[2]")); + assertThat(multiTermVectorsResponse.getResponses()[1].getResponse(), nullValue()); + assertThat(multiTermVectorsResponse.getResponses()[1].getFailure() + .getCause() + .getMessage(), equalTo("routing is required for [test]/[type1]/[2]")); } private static String indexOrAlias() { diff --git a/server/src/test/java/org/elasticsearch/script/JodaCompatibleZonedDateTimeTests.java b/server/src/test/java/org/elasticsearch/script/JodaCompatibleZonedDateTimeTests.java index f01079d092fa7..4750ee36b0bd6 100644 --- a/server/src/test/java/org/elasticsearch/script/JodaCompatibleZonedDateTimeTests.java +++ b/server/src/test/java/org/elasticsearch/script/JodaCompatibleZonedDateTimeTests.java @@ -152,6 +152,10 @@ public void testYear() { assertThat(javaTime.getYear(), equalTo(jodaTime.getYear())); } + public void testZone() { + assertThat(javaTime.getZone().getId(), equalTo(jodaTime.getZone().getID())); + } + public void testMillis() { assertMethodDeprecation(() -> assertThat(javaTime.getMillis(), equalTo(jodaTime.getMillis())), "getMillis()", "toInstant().toEpochMilli()"); diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 271007f99787d..f49bb70dc7ffc 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -152,10 +152,12 @@ private void assertException(String rate, Class clazz, Stri public void testNotSupportedDisableDynamicSetting() throws IOException { try { - buildScriptService(Settings.builder().put(ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING, randomUnicodeOfLength(randomIntBetween(1, 10))).build()); + buildScriptService(Settings.builder().put( + ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING, randomUnicodeOfLength(randomIntBetween(1, 10))).build()); fail("script service should have thrown exception due to non supported script.disable_dynamic setting"); } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), containsString(ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings")); + assertThat(e.getMessage(), containsString(ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING + + " is not a supported setting, replace with fine-grained script settings")); } } @@ -338,7 +340,8 @@ public void testMaxSizeLimit() throws Exception { private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { try { scriptService.compile(new Script(scriptType, lang, script, Collections.emptyMap()), scriptContext); - fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]"); + fail("compile should have been rejected for lang [" + lang + "], " + + "script_type [" + scriptType + "], scripted_op [" + scriptContext + "]"); } catch (IllegalArgumentException | IllegalStateException e) { // pass } diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index 4b86ff668c0de..189929171a5d1 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.store.Directory; import org.elasticsearch.Version; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; @@ -110,10 +111,12 @@ public void testPreProcess() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); IndexReader reader = w.getReader(); - Engine.Searcher searcher = new Engine.Searcher("test", new IndexSearcher(reader), reader::close)) { + Engine.Searcher searcher = new Engine.Searcher("test", new IndexSearcher(reader), reader)) { - DefaultSearchContext context1 = new DefaultSearchContext(1L, shardSearchRequest, null, searcher, null, indexService, - indexShard, bigArrays, null, timeout, null, null, Version.CURRENT); + SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE); + + DefaultSearchContext context1 = new DefaultSearchContext(1L, shardSearchRequest, target, searcher, null, indexService, + indexShard, bigArrays, null, timeout, null, Version.CURRENT); context1.from(300); // resultWindow greater than maxResultWindow and scrollContext is null @@ -153,8 +156,8 @@ public void testPreProcess() throws Exception { + "] index level setting.")); // rescore is null but sliceBuilder is not null - DefaultSearchContext context2 = new DefaultSearchContext(2L, shardSearchRequest, null, searcher, - null, indexService, indexShard, bigArrays, null, timeout, null, null, Version.CURRENT); + DefaultSearchContext context2 = new DefaultSearchContext(2L, shardSearchRequest, target, searcher, + null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT); SliceBuilder sliceBuilder = mock(SliceBuilder.class); int numSlices = maxSlicesPerScroll + randomIntBetween(1, 100); @@ -170,8 +173,8 @@ public void testPreProcess() throws Exception { when(shardSearchRequest.getAliasFilter()).thenReturn(AliasFilter.EMPTY); when(shardSearchRequest.indexBoost()).thenReturn(AbstractQueryBuilder.DEFAULT_BOOST); - DefaultSearchContext context3 = new DefaultSearchContext(3L, shardSearchRequest, null, searcher, null, - indexService, indexShard, bigArrays, null, timeout, null, null, Version.CURRENT); + DefaultSearchContext context3 = new DefaultSearchContext(3L, shardSearchRequest, target, searcher, null, + indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT); ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery(); context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false); assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query())); diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 3ad39404afefb..fee55f1e22f23 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -21,13 +21,12 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TotalHits; +import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -42,9 +41,9 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightFieldTests; import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.test.RandomObjects; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -214,7 +213,8 @@ public void testToXContent() throws IOException { public void testSerializeShardTarget() throws Exception { String clusterAlias = randomBoolean() ? null : "cluster_alias"; - SearchShardTarget target = new SearchShardTarget("_node_id", new Index("_index", "_na_"), 0, clusterAlias); + SearchShardTarget target = new SearchShardTarget("_node_id", new ShardId(new Index("_index", "_na_"), 0), + clusterAlias, OriginalIndices.NONE); Map innerHits = new HashMap<>(); SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null); @@ -240,12 +240,10 @@ public void testSerializeShardTarget() throws Exception { SearchHits hits = new SearchHits(new SearchHit[]{hit1, hit2}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); - - BytesStreamOutput output = new BytesStreamOutput(); - hits.writeTo(output); - InputStream input = output.bytes().streamInput(); - SearchHits results = SearchHits.readSearchHits(new InputStreamStreamInput(input)); - assertThat(results.getAt(0).getShard(), equalTo(target)); + Version version = VersionUtils.randomVersion(random()); + SearchHits results = copyStreamable(hits, getNamedWriteableRegistry(), SearchHits::new, version); + SearchShardTarget deserializedTarget = results.getAt(0).getShard(); + assertThat(deserializedTarget, equalTo(target)); assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue()); @@ -260,7 +258,6 @@ public void testSerializeShardTarget() throws Exception { } } } - assertThat(results.getAt(1).getShard(), equalTo(target)); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 8623c2021bffd..396879e8f65bd 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -150,7 +150,7 @@ protected SearchHits mutateInstance(SearchHits instance) { if (instance.getCollapseValues() == null) { collapseValues = createCollapseValues(randomIntBetween(1, 5)); } else { - collapseValues = randomBoolean() ? createCollapseValues(instance.getCollapseValues().length) : null; + collapseValues = randomBoolean() ? createCollapseValues(instance.getCollapseValues().length + 1) : null; } return new SearchHits(instance.getHits(), instance.getTotalHits(), instance.getMaxScore(), instance.getSortFields(), instance.getCollapseField(), collapseValues); diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 30598311ad574..894a4fa9d4ae3 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -23,12 +23,14 @@ import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; @@ -71,14 +73,16 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.util.Collection; import java.util.Collections; -import java.util.List; import java.util.LinkedList; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -114,7 +118,6 @@ public static class CustomScriptPlugin extends MockScriptPlugin { static final String DUMMY_SCRIPT = "dummyScript"; - @Override protected Map, Object>> pluginScripts() { return Collections.singletonMap(DUMMY_SCRIPT, vars -> "dummy"); @@ -151,7 +154,6 @@ public void onQueryPhase(SearchContext context, long tookInNanos) { } }); } - } @Override @@ -637,4 +639,28 @@ public void testCreateReduceContext() { reduceContext.consumeBucketsAndMaybeBreak(MultiBucketConsumerService.DEFAULT_MAX_BUCKETS + 1); } } + + public void testCreateSearchContext() throws IOException { + String index = randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT); + IndexService indexService = createIndex(index); + final SearchService service = getInstanceFromNode(SearchService.class); + ShardId shardId = new ShardId(indexService.index(), 0); + long nowInMillis = System.currentTimeMillis(); + String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.allowPartialSearchResults(randomBoolean()); + ShardSearchTransportRequest request = new ShardSearchTransportRequest(OriginalIndices.NONE, searchRequest, shardId, + indexService.numberOfShards(), AliasFilter.EMPTY, 1f, nowInMillis, clusterAlias, Strings.EMPTY_ARRAY); + DefaultSearchContext searchContext = service.createSearchContext(request, new TimeValue(System.currentTimeMillis())); + SearchShardTarget searchShardTarget = searchContext.shardTarget(); + QueryShardContext queryShardContext = searchContext.getQueryShardContext(); + String expectedIndexName = clusterAlias == null ? index : clusterAlias + ":" + index; + assertEquals(expectedIndexName, queryShardContext.getFullyQualifiedIndex().getName()); + assertEquals(expectedIndexName, searchShardTarget.getFullyQualifiedIndexName()); + assertEquals(clusterAlias, searchShardTarget.getClusterAlias()); + assertEquals(shardId, searchShardTarget.getShardId()); + assertSame(searchShardTarget, searchContext.dfsResult().getSearchShardTarget()); + assertSame(searchShardTarget, searchContext.queryResult().getSearchShardTarget()); + assertSame(searchShardTarget, searchContext.fetchResult().getSearchShardTarget()); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 097a3949fc27a..ef001b35feffb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -71,6 +71,7 @@ import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetricTests; import org.elasticsearch.search.aggregations.metrics.InternalTopHitsTests; import org.elasticsearch.search.aggregations.metrics.InternalValueCountTests; +import org.elasticsearch.search.aggregations.metrics.InternalWeightedAvgTests; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValueTests; import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValueTests; import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucketTests; @@ -114,6 +115,7 @@ private static List> getAggsTests() { aggsTests.add(new InternalMinTests()); aggsTests.add(new InternalMaxTests()); aggsTests.add(new InternalAvgTests()); + aggsTests.add(new InternalWeightedAvgTests()); aggsTests.add(new InternalSumTests()); aggsTests.add(new InternalValueCountTests()); aggsTests.add(new InternalSimpleValueTests()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java index 35f3175f7cfe5..756bc14a498ed 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketUtilsTests.java @@ -27,22 +27,14 @@ public class BucketUtilsTests extends ESTestCase { public void testBadInput() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> BucketUtils.suggestShardSideQueueSize(0, randomBoolean())); + () -> BucketUtils.suggestShardSideQueueSize(0)); assertEquals(e.getMessage(), "size must be positive, got 0"); } - public void testOptimizesSingleShard() { - for (int iter = 0; iter < 10; ++iter) { - final int size = randomIntBetween(1, Integer.MAX_VALUE); - assertEquals(size, BucketUtils.suggestShardSideQueueSize( size, true)); - } - } - public void testOverFlow() { for (int iter = 0; iter < 10; ++iter) { final int size = Integer.MAX_VALUE - randomInt(10); - final int numberOfShards = randomIntBetween(1, 10); - final int shardSize = BucketUtils.suggestShardSideQueueSize( size, numberOfShards == 1); + final int shardSize = BucketUtils.suggestShardSideQueueSize( size); assertThat(shardSize, greaterThanOrEqualTo(shardSize)); } } @@ -50,8 +42,7 @@ public void testOverFlow() { public void testShardSizeIsGreaterThanGlobalSize() { for (int iter = 0; iter < 10; ++iter) { final int size = randomIntBetween(1, Integer.MAX_VALUE); - final int numberOfShards = randomIntBetween(1, 10); - final int shardSize = BucketUtils.suggestShardSideQueueSize( size, numberOfShards == 1); + final int shardSize = BucketUtils.suggestShardSideQueueSize( size); assertThat(shardSize, greaterThanOrEqualTo(size)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index a80aa4142ebe4..23842a3f9df55 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -692,7 +692,7 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( } } - public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUknownMetric() throws Exception { + public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index 481050acee498..6b704a6711ad9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -690,7 +690,7 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( } } - public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUknownMetric() throws Exception { + public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { client().prepareSearch(index) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 0f685ded62c1c..a3311db11350f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -23,11 +23,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -35,17 +35,18 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; public class TermsShardMinDocCountIT extends ESIntegTestCase { private static final String index = "someindex"; private static final String type = "testtype"; - public String randomExecutionHint() { + + private static String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString(); } @@ -74,7 +75,7 @@ public void testShardMinDocCountSignificantTermsTest() throws Exception { SearchResponse response = client().prepareSearch(index) .addAggregation( (filter("inclass", QueryBuilders.termQuery("class", true))) - .subAggregation(significantTerms("mySignificantTerms").field("text").minDocCount(2).size(2) + .subAggregation(significantTerms("mySignificantTerms").field("text").minDocCount(2).size(2).shardSize(2) .executionHint(randomExecutionHint())) ) .get(); @@ -87,16 +88,14 @@ public void testShardMinDocCountSignificantTermsTest() throws Exception { response = client().prepareSearch(index) .addAggregation( (filter("inclass", QueryBuilders.termQuery("class", true))) - .subAggregation(significantTerms("mySignificantTerms").field("text").minDocCount(2) - .shardMinDocCount(2).size(2) - .executionHint(randomExecutionHint())) + .subAggregation(significantTerms("mySignificantTerms").field("text").minDocCount(2).shardSize(2) + .shardMinDocCount(2).size(2).executionHint(randomExecutionHint())) ) .get(); assertSearchResponse(response); filteredBucket = response.getAggregations().get("inclass"); sigterms = filteredBucket.getAggregations().get("mySignificantTerms"); assertThat(sigterms.getBuckets().size(), equalTo(2)); - } private void addTermsDocs(String term, int numInClass, int numNotInClass, List builders) { @@ -133,19 +132,18 @@ public void testShardMinDocCountTermsTest() throws Exception { // first, check that indeed when not setting the shardMinDocCount parameter 0 terms are returned SearchResponse response = client().prepareSearch(index) .addAggregation( - terms("myTerms").field("text").minDocCount(2).size(2).executionHint(randomExecutionHint()) - .order(BucketOrder.key(true)) + terms("myTerms").field("text").minDocCount(2).size(2).shardSize(2).executionHint(randomExecutionHint()) + .order(BucketOrder.key(true)) ) .get(); assertSearchResponse(response); Terms sigterms = response.getAggregations().get("myTerms"); assertThat(sigterms.getBuckets().size(), equalTo(0)); - response = client().prepareSearch(index) .addAggregation( - terms("myTerms").field("text").minDocCount(2).shardMinDocCount(2).size(2).executionHint(randomExecutionHint()) - .order(BucketOrder.key(true)) + terms("myTerms").field("text").minDocCount(2).shardMinDocCount(2).size(2).shardSize(2) + .executionHint(randomExecutionHint()).order(BucketOrder.key(true)) ) .get(); assertSearchResponse(response); @@ -154,11 +152,10 @@ public void testShardMinDocCountTermsTest() throws Exception { } - private void addTermsDocs(String term, int numDocs, List builders) { + private static void addTermsDocs(String term, int numDocs, List builders) { String sourceClass = "{\"text\": \"" + term + "\"}"; for (int i = 0; i < numDocs; i++) { builders.add(client().prepareIndex(index, type).setSource(sourceClass, XContentType.JSON)); } - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 006b13e2c0b42..6516309de965f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -296,13 +296,16 @@ public void collect(int doc, long bucket) throws IOException { } } assertEquals(size, Math.min(queue.size(), expected.length - pos)); - int ptr = 0; - for (int slot : queue.getSortedSlot()) { - CompositeKey key = queue.toCompositeKey(slot); - assertThat(key, equalTo(expected[ptr++])); - last = key; - } + int ptr = pos + (queue.size() - 1); pos += queue.size(); + last = null; + while (queue.size() > pos) { + CompositeKey key = queue.toCompositeKey(queue.pop()); + if (last == null) { + last = key; + } + assertThat(key, equalTo(expected[ptr--])); + } } } reader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java index f06472beb46e1..be9a760150427 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import com.google.common.collect.Lists; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.time.DateFormatter; @@ -42,9 +43,14 @@ import java.util.Map; import java.util.TreeSet; import java.util.stream.Collectors; +import java.util.stream.IntStream; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiLettersOfLengthBetween; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class InternalCompositeTests extends InternalMultiBucketAggregationTestCase { @@ -240,4 +246,137 @@ public void testReduceSame() throws IOException { assertThat(bucket.getDocCount(), equalTo(expectedBucket.getDocCount()*numSame)); } } + + public void testCompareCompositeKeyBiggerFieldName() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{1, 2} + ); + InternalComposite.ArrayMap key2 = createMap( + Lists.newArrayList("field3", "field2"), + new Comparable[]{1, 2} + ); + assertThat(key1.compareTo(key2), lessThan(0)); + } + + public void testCompareCompositeKeySmallerFieldName() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field3", "field2"), + new Comparable[]{1, 2} + ); + InternalComposite.ArrayMap key2 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{1, 2} + ); + assertThat(key1.compareTo(key2), greaterThan(0)); + } + + public void testCompareCompositeKeyBiggerValue() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{1, 2} + ); + InternalComposite.ArrayMap key2 = createMap( + Lists.newArrayList("field3", "field2"), + new Comparable[]{2, 3} + ); + assertThat(key1.compareTo(key2), lessThan(0)); + } + + public void testCompareCompositeKeySmallerValue() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field3", "field2"), + new Comparable[]{1, 2} + ); + InternalComposite.ArrayMap key2 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{2, 3} + ); + assertThat(key1.compareTo(key2), greaterThan(0)); + } + + public void testCompareCompositeKeyNullValueIsSmaller1() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{null, 20} + ); + InternalComposite.ArrayMap key2 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{1, 2} + ); + assertThat(key1.compareTo(key2), lessThan(0)); + } + + public void testCompareCompositeKeyNullValueIsSmaller2() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{1, 2} + ); + InternalComposite.ArrayMap key2 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{null, 20} + ); + assertThat(key1.compareTo(key2), greaterThan(0)); + } + + public void testCompareCompositeKeyMoreFieldsIsGreater() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{1, 2} + ); + InternalComposite.ArrayMap key2 = createMap(Lists.newArrayList("field1", "field2", "field3"),new Comparable[]{1, 2, null}); + assertThat(key1.compareTo(key2), lessThan(0)); + } + + public void testCompareCompositeKeyLessFieldsIsLesser() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field1", "field2", "field3"), + new Comparable[]{1, 2, null} + ); + InternalComposite.ArrayMap key2 = createMap(Lists.newArrayList("field1", "field2"),new Comparable[]{1, 2}); + assertThat(key1.compareTo(key2), greaterThan(0)); + } + + public void testCompareCompositeKeyEqual() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field1", "field2", "field3"), + new Comparable[]{null, 1, 2} + ); + InternalComposite.ArrayMap key2 = createMap( + Lists.newArrayList("field1", "field2", "field3"), + new Comparable[]{null, 1, 2} + ); + assertThat(key1.compareTo(key1), equalTo(0)); + assertThat(key1.equals(key1), is(true)); + + assertThat(key1.compareTo(key2), equalTo(0)); + assertThat(key1.equals(key2), is(true)); + assertThat(key2.equals(key1), is(true)); + } + + public void testCompareCompositeKeyValuesHaveDifferentTypes() { + InternalComposite.ArrayMap key1 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{1, 2} + ); + + InternalComposite.ArrayMap key2 = createMap( + Lists.newArrayList("field1", "field2"), + new Comparable[]{"1", 2} + ); + + ClassCastException exception = expectThrows(ClassCastException.class, () -> key1.compareTo(key2)); + assertThat(exception.getMessage(), + containsString("java.lang.String cannot be cast to")); + } + + private InternalComposite.ArrayMap createMap(List fields, Comparable[] values) { + List formats = IntStream.range(0, fields.size()) + .mapToObj(i -> DocValueFormat.RAW).collect(Collectors.toList()); + return new InternalComposite.ArrayMap( + fields, + formats, + values + ); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 274fb2d4ffd87..11eed6f90e739 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -842,7 +842,7 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( } } - public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUknownMetric() throws Exception { + public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { SearchResponse response = client() diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java new file mode 100644 index 0000000000000..b74eac80496cf --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvgTests.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.test.InternalAggregationTestCase; + +public class InternalWeightedAvgTests extends InternalAggregationTestCase { + + @Override + protected InternalWeightedAvg createTestInstance( + String name, + List pipelineAggregators, + Map metaData + ) { + DocValueFormat formatter = randomNumericDocValueFormat(); + return new InternalWeightedAvg( + name, + randomDoubleBetween(0, 100000, true), + randomDoubleBetween(0, 100000, true), + formatter, pipelineAggregators, metaData); + } + + @Override + protected Reader instanceReader() { + return InternalWeightedAvg::new; + } + + @Override + protected void assertReduced(InternalWeightedAvg reduced, List inputs) { + double sum = 0; + double weight = 0; + for (InternalWeightedAvg in : inputs) { + sum += in.getSum(); + weight += in.getWeight(); + } + assertEquals(sum, reduced.getSum(), 0.0000001); + assertEquals(weight, reduced.getWeight(), 0.0000001); + assertEquals(sum / weight, reduced.getValue(), 0.0000001); + } + + @Override + protected void assertFromXContent(InternalWeightedAvg avg, ParsedAggregation parsedAggregation) { + ParsedWeightedAvg parsed = ((ParsedWeightedAvg) parsedAggregation); + assertEquals(avg.getValue(), parsed.getValue(), Double.MIN_VALUE); + // we don't print out VALUE_AS_STRING for avg.getCount() == 0, so we cannot get the exact same value back + if (avg.getWeight() != 0) { + assertEquals(avg.getValueAsString(), parsed.getValueAsString()); + } + } + + @Override + protected InternalWeightedAvg mutateInstance(InternalWeightedAvg instance) { + String name = instance.getName(); + double sum = instance.getSum(); + double weight = instance.getWeight(); + DocValueFormat formatter = instance.getFormatter(); + List pipelineAggregators = instance.pipelineAggregators(); + Map metaData = instance.getMetaData(); + switch (between(0, 2)) { + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + if (Double.isFinite(sum)) { + sum += between(1, 100); + } else { + sum = between(1, 100); + } + break; + case 2: + if (Double.isFinite(weight)) { + weight += between(1, 100); + } else { + weight = between(1, 100); + } + break; + case 3: + if (metaData == null) { + metaData = new HashMap<>(1); + } else { + metaData = new HashMap<>(instance.getMetaData()); + } + metaData.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } + return new InternalWeightedAvg(name, sum, weight, formatter, pipelineAggregators, metaData); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java index 8618d5a34ba5c..7cb4371354c3b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java @@ -43,6 +43,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketSort; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; @@ -191,6 +192,26 @@ public void testSortTermsOnKey() { } } + public void testSortTermsOnKeyWithSize() { + SearchResponse response = client().prepareSearch(INDEX) + .setSize(0) + .addAggregation(terms("foos").field(TERM_FIELD) + .subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("_key"))).size(3))) + .get(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("foos"); + assertThat(terms, notNullValue()); + List termsBuckets = terms.getBuckets(); + assertEquals(3, termsBuckets.size()); + String previousKey = (String) termsBuckets.get(0).getKey(); + for (Terms.Bucket termBucket : termsBuckets) { + assertThat(previousKey, lessThanOrEqualTo((String) termBucket.getKey())); + previousKey = (String) termBucket.getKey(); + } + } + public void testSortTermsOnSubAggregation() { SearchResponse response = client().prepareSearch(INDEX) .setSize(0) @@ -231,6 +252,29 @@ public void testSortTermsOnSubAggregation() { } } + public void testSortTermsOnSubAggregationPreservesOrderOnEquals() { + SearchResponse response = client().prepareSearch(INDEX) + .setSize(0) + .addAggregation(terms("foos").field(TERM_FIELD) + .subAggregation(bucketSort("keyBucketSort", Arrays.asList(new FieldSortBuilder("_key")))) + .subAggregation(max("max").field("missingValue").missing(1)) + .subAggregation(bucketSort("maxBucketSort", Arrays.asList(new FieldSortBuilder("max"))))) + .get(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("foos"); + assertThat(terms, notNullValue()); + List termsBuckets = terms.getBuckets(); + + // Since all max values are equal, we expect the order of keyBucketSort to have been preserved + String previousKey = (String) termsBuckets.get(0).getKey(); + for (Terms.Bucket termBucket : termsBuckets) { + assertThat(previousKey, lessThanOrEqualTo((String) termBucket.getKey())); + previousKey = (String) termBucket.getKey(); + } + } + public void testSortTermsOnCountWithSecondarySort() { SearchResponse response = client().prepareSearch(INDEX) .setSize(0) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java index b213ca785e234..ad41e97364655 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine.Searcher; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; +import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -259,6 +260,16 @@ public void testUnmappedBoolean() throws Exception { } } + public void testTypeFieldDeprecation() { + IndexService indexService = createIndex("index", Settings.EMPTY, "type"); + try (Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { + QueryShardContext context = indexService.newQueryShardContext(0, searcher.reader(), () -> 42L, null); + + ValuesSourceConfig config = ValuesSourceConfig.resolve( + context, null, TypeFieldMapper.NAME, null, null, null, null); + assertWarnings(QueryShardContext.TYPES_DEPRECATION_MESSAGE); + } + } public void testFieldAlias() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 5f5f742bfd630..8a90ca0b8ca47 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -897,8 +898,9 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo("4.0")); assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo("5.0")); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo("6.0")); + // TODO: switch to java date formatter, but will require special casing java 8 as there is a bug with epoch formatting there assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), - equalTo(DateFormatters.forPattern("epoch_millis").format(date))); + equalTo(Joda.forPattern("epoch_millis").format(date))); } public void testScriptFields() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java index b120b54687607..a7faa04017258 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -158,6 +159,88 @@ public void testIndexShapeRouting() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } + public void testIndexPolygonDateLine() throws Exception { + String mappingVector = "{\n" + + " \"properties\": {\n" + + " \"shape\": {\n" + + " \"type\": \"geo_shape\"\n" + + " }\n" + + " }\n" + + " }"; + + String mappingQuad = "{\n" + + " \"properties\": {\n" + + " \"shape\": {\n" + + " \"type\": \"geo_shape\",\n" + + " \"tree\": \"quadtree\"\n" + + " }\n" + + " }\n" + + " }"; + + + // create index + assertAcked(client().admin().indices().prepareCreate("vector").addMapping("doc", mappingVector, XContentType.JSON).get()); + ensureGreen(); + + assertAcked(client().admin().indices().prepareCreate("quad").addMapping("doc", mappingQuad, XContentType.JSON).get()); + ensureGreen(); + + String source = "{\n" + + " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\""+ + "}"; + + indexRandom(true, client().prepareIndex("quad", "doc", "0").setSource(source, XContentType.JSON)); + indexRandom(true, client().prepareIndex("vector", "doc", "0").setSource(source, XContentType.JSON)); + + SearchResponse searchResponse = client().prepareSearch("quad").setQuery( + geoShapeQuery("shape", new PointBuilder(-179.75, 1)) + ).get(); + + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + + searchResponse = client().prepareSearch("quad").setQuery( + geoShapeQuery("shape", new PointBuilder(90, 1)) + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + + searchResponse = client().prepareSearch("quad").setQuery( + geoShapeQuery("shape", new PointBuilder(-180, 1)) + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + searchResponse = client().prepareSearch("quad").setQuery( + geoShapeQuery("shape", new PointBuilder(180, 1)) + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + + searchResponse = client().prepareSearch("vector").setQuery( + geoShapeQuery("shape", new PointBuilder(90, 1)) + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + + searchResponse = client().prepareSearch("vector").setQuery( + geoShapeQuery("shape", new PointBuilder(-179.75, 1)) + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + + searchResponse = client().prepareSearch("vector").setQuery( + geoShapeQuery("shape", new PointBuilder(-180, 1)) + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + + searchResponse = client().prepareSearch("vector").setQuery( + geoShapeQuery("shape", new PointBuilder(180, 1)) + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } + private String findNodeName(String index) { ClusterState state = client().admin().cluster().prepareState().get().getState(); IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index a64f98df5a6eb..35beb10934e3d 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -195,7 +195,44 @@ public void testIndexedShapeReference() throws Exception { .endObject() .endObject()).setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("test").setTypes("type1") + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(geoIntersectionQuery("location", "Big_Rectangle")) + .get(); + + assertSearchResponse(searchResponse); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + + searchResponse = client().prepareSearch("test") + .setQuery(geoShapeQuery("location", "Big_Rectangle")) + .get(); + + assertSearchResponse(searchResponse); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + } + + public void testIndexedShapeReferenceWithTypes() throws Exception { + String mapping = Strings.toString(createMapping()); + client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + createIndex("shapes"); + ensureGreen(); + + EnvelopeBuilder shape = new EnvelopeBuilder(new Coordinate(-45, 45), new Coordinate(45, -45)); + + client().prepareIndex("shapes", "shape_type", "Big_Rectangle").setSource(jsonBuilder().startObject() + .field("shape", shape).endObject()).setRefreshPolicy(IMMEDIATE).get(); + client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() + .field("name", "Document 1") + .startObject("location") + .field("type", "point") + .startArray("coordinates").value(-30).value(-30).endArray() + .endObject() + .endObject()).setRefreshPolicy(IMMEDIATE).get(); + + SearchResponse searchResponse = client().prepareSearch("test") .setQuery(geoIntersectionQuery("location", "Big_Rectangle", "shape_type")) .get(); @@ -225,8 +262,8 @@ public void testIndexedShapeReferenceSourceDisabled() throws Exception { client().prepareIndex("shapes", "shape_type", "Big_Rectangle").setSource(jsonBuilder().startObject() .field("shape", shape).endObject()).setRefreshPolicy(IMMEDIATE).get(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().prepareSearch("test").setTypes("type1") - .setQuery(geoIntersectionQuery("location", "Big_Rectangle", "shape_type")).get()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().prepareSearch("test") + .setQuery(geoIntersectionQuery("location", "Big_Rectangle")).get()); assertThat(e.getMessage(), containsString("source disabled")); } @@ -273,28 +310,28 @@ public void testShapeFetchingPath() throws Exception { .endArray().endArray() .endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); - GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", "1", "type").relation(ShapeRelation.INTERSECTS) + GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", "1").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("location"); SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); - filter = QueryBuilders.geoShapeQuery("location", "1", "type").relation(ShapeRelation.INTERSECTS) + filter = QueryBuilders.geoShapeQuery("location", "1").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.location"); result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); - filter = QueryBuilders.geoShapeQuery("location", "1", "type").relation(ShapeRelation.INTERSECTS) + filter = QueryBuilders.geoShapeQuery("location", "1").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.2.location"); result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); - filter = QueryBuilders.geoShapeQuery("location", "1", "type").relation(ShapeRelation.INTERSECTS) + filter = QueryBuilders.geoShapeQuery("location", "1").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.2.3.location"); result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) @@ -303,25 +340,25 @@ public void testShapeFetchingPath() throws Exception { assertHitCount(result, 1); // now test the query variant - GeoShapeQueryBuilder query = QueryBuilders.geoShapeQuery("location", "1", "type") + GeoShapeQueryBuilder query = QueryBuilders.geoShapeQuery("location", "1") .indexedShapeIndex("shapes") .indexedShapePath("location"); result = client().prepareSearch("test").setQuery(query).get(); assertSearchResponse(result); assertHitCount(result, 1); - query = QueryBuilders.geoShapeQuery("location", "1", "type") + query = QueryBuilders.geoShapeQuery("location", "1") .indexedShapeIndex("shapes") .indexedShapePath("1.location"); result = client().prepareSearch("test").setQuery(query).get(); assertSearchResponse(result); assertHitCount(result, 1); - query = QueryBuilders.geoShapeQuery("location", "1", "type") + query = QueryBuilders.geoShapeQuery("location", "1") .indexedShapeIndex("shapes") .indexedShapePath("1.2.location"); result = client().prepareSearch("test").setQuery(query).get(); assertSearchResponse(result); assertHitCount(result, 1); - query = QueryBuilders.geoShapeQuery("location", "1", "type") + query = QueryBuilders.geoShapeQuery("location", "1") .indexedShapeIndex("shapes") .indexedShapePath("1.2.3.location"); result = client().prepareSearch("test").setQuery(query).get(); @@ -356,7 +393,7 @@ public void testQueryRandomGeoCollection() throws Exception { GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery("location", filterShape); geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); - SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(geoShapeQueryBuilder).get(); + SearchResponse result = client().prepareSearch("test").setQuery(geoShapeQueryBuilder).get(); assertSearchResponse(result); assertHitCount(result, 1); } @@ -405,7 +442,7 @@ public void testRandomGeoCollectionQuery() throws Exception { GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery("location", queryCollection); geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); - SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(geoShapeQueryBuilder).get(); + SearchResponse result = client().prepareSearch("test").setQuery(geoShapeQueryBuilder).get(); assertSearchResponse(result); assertTrue(result.getHits().getTotalHits().value > 0); } @@ -429,7 +466,7 @@ public void testPointQuery() throws Exception { GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery("location", pb); geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); - SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(geoShapeQueryBuilder).get(); + SearchResponse result = client().prepareSearch("test").setQuery(geoShapeQueryBuilder).get(); assertSearchResponse(result); assertHitCount(result, 1); } @@ -454,7 +491,7 @@ public void testContainsShapeQuery() throws Exception { ShapeBuilder filterShape = (gcb.getShapeAt(randomIntBetween(0, gcb.numShapes() - 1))); GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", filterShape) .relation(ShapeRelation.CONTAINS); - SearchResponse response = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(response); @@ -478,7 +515,7 @@ public void testExistsQuery() throws Exception { client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ExistsQueryBuilder eqb = QueryBuilders.existsQuery("location"); - SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(eqb).get(); + SearchResponse result = client().prepareSearch("test").setQuery(eqb).get(); assertSearchResponse(result); assertHitCount(result, 1); } @@ -520,7 +557,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { new PolygonBuilder(new CoordinatesBuilder().coordinate(99.0, -1.0).coordinate(99.0, 3.0) .coordinate(103.0, 3.0).coordinate(103.0, -1.0) .coordinate(99.0, -1.0)))).relation(ShapeRelation.INTERSECTS); - SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) + SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); @@ -530,7 +567,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { new PolygonBuilder(new CoordinatesBuilder().coordinate(199.0, -11.0).coordinate(199.0, 13.0) .coordinate(193.0, 13.0).coordinate(193.0, -11.0) .coordinate(199.0, -11.0)))).relation(ShapeRelation.INTERSECTS); - result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) + result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 0); @@ -541,13 +578,13 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { new PolygonBuilder(new CoordinatesBuilder().coordinate(199.0, -11.0).coordinate(199.0, 13.0) .coordinate(193.0, 13.0).coordinate(193.0, -11.0) .coordinate(199.0, -11.0)))).relation(ShapeRelation.INTERSECTS); - result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) + result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); // no shape filter = QueryBuilders.geoShapeQuery("location", new GeometryCollectionBuilder()); - result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) + result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 0); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java index 21a4f099f5a32..da987a657260a 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java @@ -28,9 +28,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.ToXContent; @@ -59,29 +56,26 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { public void testSerialization() throws Exception { ShardSearchTransportRequest shardSearchTransportRequest = createShardSearchTransportRequest(); - try (BytesStreamOutput output = new BytesStreamOutput()) { - shardSearchTransportRequest.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { - ShardSearchTransportRequest deserializedRequest = new ShardSearchTransportRequest(in); - assertEquals(deserializedRequest.scroll(), shardSearchTransportRequest.scroll()); - assertEquals(deserializedRequest.getAliasFilter(), shardSearchTransportRequest.getAliasFilter()); - assertArrayEquals(deserializedRequest.indices(), shardSearchTransportRequest.indices()); - assertArrayEquals(deserializedRequest.types(), shardSearchTransportRequest.types()); - assertEquals(deserializedRequest.indicesOptions(), shardSearchTransportRequest.indicesOptions()); - assertEquals(deserializedRequest.isProfile(), shardSearchTransportRequest.isProfile()); - assertEquals(deserializedRequest.nowInMillis(), shardSearchTransportRequest.nowInMillis()); - assertEquals(deserializedRequest.source(), shardSearchTransportRequest.source()); - assertEquals(deserializedRequest.searchType(), shardSearchTransportRequest.searchType()); - assertEquals(deserializedRequest.shardId(), shardSearchTransportRequest.shardId()); - assertEquals(deserializedRequest.numberOfShards(), shardSearchTransportRequest.numberOfShards()); - assertEquals(deserializedRequest.indexRoutings(), shardSearchTransportRequest.indexRoutings()); - assertEquals(deserializedRequest.preference(), shardSearchTransportRequest.preference()); - assertEquals(deserializedRequest.cacheKey(), shardSearchTransportRequest.cacheKey()); - assertNotSame(deserializedRequest, shardSearchTransportRequest); - assertEquals(deserializedRequest.getAliasFilter(), shardSearchTransportRequest.getAliasFilter()); - assertEquals(deserializedRequest.indexBoost(), shardSearchTransportRequest.indexBoost(), 0.0f); - } - } + ShardSearchTransportRequest deserializedRequest = + copyWriteable(shardSearchTransportRequest, namedWriteableRegistry, ShardSearchTransportRequest::new); + assertEquals(deserializedRequest.scroll(), shardSearchTransportRequest.scroll()); + assertEquals(deserializedRequest.getAliasFilter(), shardSearchTransportRequest.getAliasFilter()); + assertArrayEquals(deserializedRequest.indices(), shardSearchTransportRequest.indices()); + assertArrayEquals(deserializedRequest.types(), shardSearchTransportRequest.types()); + assertEquals(deserializedRequest.indicesOptions(), shardSearchTransportRequest.indicesOptions()); + assertEquals(deserializedRequest.isProfile(), shardSearchTransportRequest.isProfile()); + assertEquals(deserializedRequest.nowInMillis(), shardSearchTransportRequest.nowInMillis()); + assertEquals(deserializedRequest.source(), shardSearchTransportRequest.source()); + assertEquals(deserializedRequest.searchType(), shardSearchTransportRequest.searchType()); + assertEquals(deserializedRequest.shardId(), shardSearchTransportRequest.shardId()); + assertEquals(deserializedRequest.numberOfShards(), shardSearchTransportRequest.numberOfShards()); + assertArrayEquals(deserializedRequest.indexRoutings(), shardSearchTransportRequest.indexRoutings()); + assertEquals(deserializedRequest.preference(), shardSearchTransportRequest.preference()); + assertEquals(deserializedRequest.cacheKey(), shardSearchTransportRequest.cacheKey()); + assertNotSame(deserializedRequest, shardSearchTransportRequest); + assertEquals(deserializedRequest.getAliasFilter(), shardSearchTransportRequest.getAliasFilter()); + assertEquals(deserializedRequest.indexBoost(), shardSearchTransportRequest.indexBoost(), 0.0f); + assertEquals(deserializedRequest.getClusterAlias(), shardSearchTransportRequest.getClusterAlias()); } private ShardSearchTransportRequest createShardSearchTransportRequest() throws IOException { @@ -97,7 +91,7 @@ private ShardSearchTransportRequest createShardSearchTransportRequest() throws I final String[] routings = generateRandomStringArray(5, 10, false, true); return new ShardSearchTransportRequest(new OriginalIndices(searchRequest), searchRequest, shardId, randomIntBetween(1, 100), filteringAliases, randomBoolean() ? 1.0f : randomFloat(), - Math.abs(randomLong()), null, routings); + Math.abs(randomLong()), randomAlphaOfLengthBetween(3, 10), routings); } public void testFilteringAliases() throws Exception { @@ -154,8 +148,7 @@ public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOExc } private IndexMetaData remove(IndexMetaData indexMetaData, String alias) { - IndexMetaData build = IndexMetaData.builder(indexMetaData).removeAlias(alias).build(); - return build; + return IndexMetaData.builder(indexMetaData).removeAlias(alias).build(); } private IndexMetaData add(IndexMetaData indexMetaData, String alias, @Nullable CompressedXContent filter) { diff --git a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index 62684f811643d..2e29c7c5a3815 100644 --- a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -89,10 +89,34 @@ public void testSimpleMoreLikeThis() throws Exception { logger.info("Running moreLikeThis"); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 1L); } + public void testSimpleMoreLikeThisWithTypes() throws Exception { + logger.info("Creating index test"); + assertAcked(prepareCreate("test").addMapping("type1", + jsonBuilder().startObject().startObject("type1").startObject("properties") + .startObject("text").field("type", "text").endObject() + .endObject().endObject().endObject())); + + logger.info("Running Cluster Health"); + assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); + + logger.info("Indexing..."); + client().index(indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("text", "lucene").endObject())) + .actionGet(); + client().index(indexRequest("test").type("type1").id("2") + .source(jsonBuilder().startObject().field("text", "lucene release").endObject())).actionGet(); + client().admin().indices().refresh(refreshRequest()).actionGet(); + + logger.info("Running moreLikeThis"); + SearchResponse response = client().prepareSearch().setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); + assertHitCount(response, 1L); + } + + //Issue #30148 public void testMoreLikeThisForZeroTokensInOneOfTheAnalyzedFields() throws Exception { CreateIndexRequestBuilder createIndexRequestBuilder = prepareCreate("test") @@ -116,7 +140,7 @@ public void testMoreLikeThisForZeroTokensInOneOfTheAnalyzedFields() throws Excep client().admin().indices().refresh(refreshRequest()).actionGet(); SearchResponse searchResponse = client().prepareSearch().setQuery( - moreLikeThisQuery(new String[]{"myField", "empty"}, null, new Item[]{new Item("test", "type", "1")}) + moreLikeThisQuery(new String[]{"myField", "empty"}, null, new Item[]{new Item("test", "1")}) .minTermFreq(1).minDocFreq(1) ).get(); @@ -142,7 +166,7 @@ public void testSimpleMoreLikeOnLongField() throws Exception { logger.info("Running moreLikeThis"); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 0L); } @@ -173,24 +197,24 @@ public void testMoreLikeThisWithAliases() throws Exception { logger.info("Running moreLikeThis on index"); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 2L); logger.info("Running moreLikeThis on beta shard"); response = client().prepareSearch("beta").setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 1L); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); logger.info("Running moreLikeThis on release shard"); response = client().prepareSearch("release").setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 1L); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); logger.info("Running moreLikeThis on alias with node client"); response = internalCluster().coordOnlyNodeClient().prepareSearch("beta").setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 1L); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); } @@ -311,13 +335,13 @@ public void testNumericField() throws Exception { // Implicit list of fields -> ignore numeric fields SearchResponse searchResponse = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type", "1")}).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(searchResponse, 1L); // Explicit list of fields including numeric fields -> fail assertThrows(client().prepareSearch().setQuery( new MoreLikeThisQueryBuilder(new String[] {"string_value", "int_value"}, null, - new Item[] {new Item("test", "type", "1")}).minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class); + new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class); // mlt query with no field -> No results (because _all is not enabled) searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"index"}).minTermFreq(1).minDocFreq(1)) @@ -417,7 +441,7 @@ public void testSimpleMoreLikeInclude() throws Exception { logger.info("Running More Like This with include true"); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1).include(true) + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1).include(true) .minimumShouldMatch("0%")).get(); assertOrderedSearchHits(response, "1", "2"); @@ -428,7 +452,7 @@ public void testSimpleMoreLikeInclude() throws Exception { logger.info("Running More Like This with include false"); response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1) + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "1")}).minTermFreq(1).minDocFreq(1) .minimumShouldMatch("0%")).get(); assertSearchHits(response, "2"); } @@ -673,7 +697,7 @@ public void testSelectFields() throws IOException, ExecutionException, Interrupt .field("text1", "elasticsearch") .endObject())); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] {new Item("test", "type1", "1")}) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] {new Item("test", "1")}) .minTermFreq(0) .minDocFreq(0) .include(true) @@ -683,7 +707,7 @@ public void testSelectFields() throws IOException, ExecutionException, Interrupt assertSearchResponse(response); assertHitCount(response, 2); - mltQuery = moreLikeThisQuery(new String[] {"text"}, null, new Item[] {new Item("test", "type1", "1")}) + mltQuery = moreLikeThisQuery(new String[] {"text"}, null, new Item[] {new Item("test", "1")}) .minTermFreq(0) .minDocFreq(0) .include(true) @@ -724,19 +748,19 @@ public void testWithMissingRouting() throws IOException { logger.info("Running moreLikeThis with one item without routing attribute"); SearchPhaseExecutionException exception = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[]{ - new Item("test", "type1", "1") + new Item("test", "1") }).minTermFreq(1).minDocFreq(1)).get()); Throwable cause = exception.getCause(); assertThat(cause, instanceOf(RoutingMissingException.class)); - assertThat(cause.getMessage(), equalTo("routing is required for [test]/[type1]/[1]")); + assertThat(cause.getMessage(), equalTo("routing is required for [test]/[_doc]/[1]")); } { logger.info("Running moreLikeThis with one item with routing attribute and two items without routing attribute"); SearchPhaseExecutionException exception = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[]{ - new Item("test", "type1", "1").routing("1"), + new Item("test", "1").routing("1"), new Item("test", "type1", "2"), new Item("test", "type1", "3") }).minTermFreq(1).minDocFreq(1)).get()); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 7e9c0153b728f..a321ff9c1a80a 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -57,6 +57,7 @@ import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.internal.ScrollContext; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.test.TestSearchContext; @@ -453,7 +454,7 @@ public void testIndexSortingEarlyTermination() throws Exception { { contextSearcher = getAssertingEarlyTerminationSearcher(reader, 1); - context.trackTotalHits(false); + context.trackTotalHitsUpTo(SearchContext.TRACK_TOTAL_HITS_DISABLED); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertNull(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 62db66eacdac9..812735092ba44 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -77,7 +77,7 @@ public void testIndexingWithNoContexts() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() .startArray("completion") @@ -116,7 +116,7 @@ public void testIndexingWithSimpleContexts() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() .startArray("completion") @@ -150,7 +150,7 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() .startArray("completion") @@ -184,7 +184,7 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() .startArray("completion") @@ -231,7 +231,7 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { .endObject(); Exception e = expectThrows(MapperParsingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))); + () -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))); assertEquals("contexts must be a string, number or boolean or a list of string, number or boolean, but was [VALUE_NULL]", e.getCause().getMessage()); } @@ -252,7 +252,7 @@ public void testIndexingWithContextList() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() .startObject("completion") @@ -284,7 +284,7 @@ public void testIndexingWithMixedTypeContextList() throws Exception { DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference .bytes(jsonBuilder() .startObject() .startObject("completion") @@ -327,7 +327,7 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { .endObject(); Exception e = expectThrows(MapperParsingException.class, - () -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))); + () -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))); assertEquals("context array must have string, number or boolean values, but was [VALUE_NULL]", e.getCause().getMessage()); } @@ -364,7 +364,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), + ParsedDocument parsedDocument = defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java index a745384eb3edb..85e97d16e3b4f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/GeoContextMappingTests.java @@ -63,7 +63,7 @@ public void testIndexingWithNoContexts() throws Exception { MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fullName("completion"); - ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = mapperService.documentMapper().parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(jsonBuilder() .startObject() .startArray("completion") @@ -102,7 +102,7 @@ public void testIndexingWithSimpleContexts() throws Exception { MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fullName("completion"); - ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = mapperService.documentMapper().parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(jsonBuilder() .startObject() .startArray("completion") @@ -138,7 +138,7 @@ public void testIndexingWithContextList() throws Exception { MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fullName("completion"); - ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = mapperService.documentMapper().parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(jsonBuilder() .startObject() .startObject("completion") @@ -195,7 +195,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endArray() .endObject(); - ParsedDocument parsedDocument = mapperService.documentMapper().parse(SourceToParse.source("test", "type1", "1", + ParsedDocument parsedDocument = mapperService.documentMapper().parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index d54a20f66dbbd..bb579c5a743b9 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -195,7 +195,7 @@ protected void assertSuggestionContext(PhraseSuggestionBuilder builder, Suggesti assertOptionalEquals(builder.confidence(), phraseSuggesterCtx.confidence(), PhraseSuggestionContext.DEFAULT_CONFIDENCE); assertOptionalEquals(builder.collatePrune(), phraseSuggesterCtx.collatePrune(), PhraseSuggestionContext.DEFAULT_COLLATE_PRUNE); assertEquals(builder.separator(), phraseSuggesterCtx.separator().utf8ToString()); - assertOptionalEquals(builder.realWordErrorLikelihood(), phraseSuggesterCtx.realworldErrorLikelyhood(), + assertOptionalEquals(builder.realWordErrorLikelihood(), phraseSuggesterCtx.realworldErrorLikelihood(), PhraseSuggestionContext.DEFAULT_RWE_ERRORLIKELIHOOD); assertOptionalEquals(builder.maxErrors(), phraseSuggesterCtx.maxErrors(), PhraseSuggestionContext.DEFAULT_MAX_ERRORS); assertOptionalEquals(builder.forceUnigrams(), phraseSuggesterCtx.getRequireUnigram(), diff --git a/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 74efe5f68ccfc..afdb14eaf6ba8 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -544,7 +544,7 @@ public void testRestoreIndexWithMissingShards() throws Exception { logger.info("--> start snapshot with default settings and closed index - should be blocked"); assertBlocked(client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-1") .setIndices("test-idx-all", "test-idx-none", "test-idx-some", "test-idx-closed") - .setWaitForCompletion(true), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); + .setWaitForCompletion(true), MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID); logger.info("--> start snapshot with default settings without a closed index - should fail"); @@ -603,7 +603,7 @@ public void testRestoreIndexWithMissingShards() throws Exception { equalTo(SnapshotState.PARTIAL)); } - assertAcked(client().admin().indices().prepareClose("test-idx-some", "test-idx-all").execute().actionGet()); + assertAcked(client().admin().indices().prepareClose("test-idx-some", "test-idx-all")); logger.info("--> restore incomplete snapshot - should fail"); assertThrows(client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-2").setRestoreGlobalState(false) @@ -1179,7 +1179,6 @@ public void testDataNodeRestartWithBusyMasterDuringSnapshot() throws Exception { .setType("mock").setSettings(Settings.builder() .put("location", randomRepoPath()) .put("compress", randomBoolean()) - .put("max_snapshot_bytes_per_sec", "1000b") .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); assertAcked(prepareCreate("test-idx", 0, Settings.builder() .put("number_of_shards", 5).put("number_of_replicas", 0))); @@ -1195,10 +1194,10 @@ public void testDataNodeRestartWithBusyMasterDuringSnapshot() throws Exception { flushAndRefresh(); final String dataNode = blockNodeWithIndex("test-repo", "test-idx"); logger.info("--> snapshot"); - client(internalCluster().getMasterName()).admin().cluster() - .prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get(); ServiceDisruptionScheme disruption = new BusyMasterServiceDisruption(random(), Priority.HIGH); setDisruptionScheme(disruption); + client(internalCluster().getMasterName()).admin().cluster() + .prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get(); disruption.startDisrupting(); logger.info("--> restarting data node, which should cause primary shards to be failed"); internalCluster().restartNode(dataNode, InternalTestCluster.EMPTY_CALLBACK); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index fcb06acd60a33..1826064c97c78 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -445,7 +445,7 @@ public void testRestoreWithDifferentMappingsAndSettings() throws Exception { logger.info("--> assert that old settings are restored"); GetSettingsResponse getSettingsResponse = client.admin().indices().prepareGetSettings("test-idx").execute().actionGet(); - assertThat(getSettingsResponse.getSetting("test-idx", "index.refresh_interval"), equalTo("10000ms")); + assertThat(getSettingsResponse.getSetting("test-idx", "index.refresh_interval"), equalTo("10s")); } public void testEmptySnapshot() throws Exception { @@ -1562,7 +1562,7 @@ public void testSnapshotClosedIndex() throws Exception { logger.info("--> snapshot with closed index"); assertBlocked(client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true) - .setIndices("test-idx", "test-idx-closed"), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); + .setIndices("test-idx", "test-idx-closed"), MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID); } public void testSnapshotSingleClosedIndex() throws Exception { @@ -1580,7 +1580,7 @@ public void testSnapshotSingleClosedIndex() throws Exception { logger.info("--> snapshot"); assertBlocked(client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-1") - .setWaitForCompletion(true).setIndices("test-idx"), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); + .setWaitForCompletion(true).setIndices("test-idx"), MetaDataIndexStateService.INDEX_CLOSED_BLOCK_ID); } public void testRenameOnRestore() throws Exception { @@ -3243,7 +3243,6 @@ public void testSnapshotSucceedsAfterSnapshotFailure() throws Exception { .put("location", repoPath) .put("random_control_io_exception_rate", randomIntBetween(5, 20) / 100f) // test that we can take a snapshot after a failed one, even if a partial index-N was written - .put("allow_atomic_operations", false) .put("random", randomAlphaOfLength(10)))); logger.info("--> indexing some data"); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java new file mode 100644 index 0000000000000..34160e901006f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsServiceTests.java @@ -0,0 +1,554 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.snapshots; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.bootstrap.BootstrapConfiguration; +import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; +import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; +import org.elasticsearch.action.resync.TransportResyncReplicationAction; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.ClusterModule; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ESAllocationTestCase; +import org.elasticsearch.cluster.NodeConnectionsService; +import org.elasticsearch.cluster.SnapshotsInProgress; +import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; +import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.coordination.ClusterBootstrapService; +import org.elasticsearch.cluster.coordination.CoordinationState; +import org.elasticsearch.cluster.coordination.Coordinator; +import org.elasticsearch.cluster.coordination.CoordinatorTests; +import org.elasticsearch.cluster.coordination.DeterministicTaskQueue; +import org.elasticsearch.cluster.coordination.InMemoryPersistedState; +import org.elasticsearch.cluster.coordination.MockSinglePrioritizingExecutor; +import org.elasticsearch.cluster.metadata.AliasValidator; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; +import org.elasticsearch.cluster.metadata.MetaDataMappingService; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingService; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.service.ClusterApplierService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.gateway.MetaStateService; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; +import org.elasticsearch.index.shard.PrimaryReplicaSyncer; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.indices.cluster.FakeThreadPoolMasterService; +import org.elasticsearch.indices.cluster.IndicesClusterStateService; +import org.elasticsearch.indices.flush.SyncedFlushService; +import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.indices.recovery.PeerRecoverySourceService; +import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; +import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.disruption.DisruptableMockTransport; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Predicate; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.elasticsearch.env.Environment.PATH_HOME_SETTING; +import static org.elasticsearch.node.Node.NODE_NAME_SETTING; +import static org.elasticsearch.transport.TransportService.HANDSHAKE_ACTION_NAME; +import static org.elasticsearch.transport.TransportService.NOOP_TRANSPORT_INTERCEPTOR; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.Mockito.mock; + +public class SnapshotsServiceTests extends ESTestCase { + + private DeterministicTaskQueue deterministicTaskQueue; + + private TestClusterNodes testClusterNodes; + + private Path tempDir; + + @Before + public void createServices() { + tempDir = createTempDir(); + deterministicTaskQueue = + new DeterministicTaskQueue(Settings.builder().put(NODE_NAME_SETTING.getKey(), "shared").build(), random()); + } + + @After + public void stopServices() { + testClusterNodes.nodes.values().forEach( + n -> { + n.indicesService.close(); + n.clusterService.close(); + n.indicesClusterStateService.close(); + n.nodeEnv.close(); + n.coordinator.close(); + } + ); + } + + public void testSuccessfulSnapshot() { + setupTestCluster(randomFrom(1, 3, 5), randomIntBetween(2, 10)); + + String repoName = "repo"; + String snapshotName = "snapshot"; + final String index = "test"; + + final int shards = randomIntBetween(1, 10); + + TestClusterNode masterNode = + testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); + final AtomicBoolean createdSnapshot = new AtomicBoolean(); + masterNode.client.admin().cluster().preparePutRepository(repoName) + .setType(FsRepository.TYPE).setSettings(Settings.builder().put("location", randomAlphaOfLength(10))) + .execute( + assertNoFailureListener( + () -> masterNode.client.admin().indices().create( + new CreateIndexRequest(index).waitForActiveShards(ActiveShardCount.ALL).settings( + Settings.builder() + .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), shards) + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0)), + assertNoFailureListener( + () -> masterNode.client.admin().cluster().prepareCreateSnapshot(repoName, snapshotName) + .execute(assertNoFailureListener(() -> createdSnapshot.set(true))))))); + + deterministicTaskQueue.runAllRunnableTasks(); + + assertTrue(createdSnapshot.get()); + SnapshotsInProgress finalSnapshotsInProgress = masterNode.clusterService.state().custom(SnapshotsInProgress.TYPE); + assertFalse(finalSnapshotsInProgress.entries().stream().anyMatch(entry -> entry.state().completed() == false)); + final Repository repository = masterNode.repositoriesService.repository(repoName); + Collection snapshotIds = repository.getRepositoryData().getSnapshotIds(); + assertThat(snapshotIds, hasSize(1)); + + final SnapshotInfo snapshotInfo = repository.getSnapshotInfo(snapshotIds.iterator().next()); + assertEquals(SnapshotState.SUCCESS, snapshotInfo.state()); + assertThat(snapshotInfo.indices(), containsInAnyOrder(index)); + assertEquals(shards, snapshotInfo.successfulShards()); + assertEquals(0, snapshotInfo.failedShards()); + } + + private void startCluster() { + final ClusterState initialClusterState = + new ClusterState.Builder(ClusterName.DEFAULT).nodes(testClusterNodes.randomDiscoveryNodes()).build(); + testClusterNodes.nodes.values().forEach(testClusterNode -> testClusterNode.start(initialClusterState)); + + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + + final BootstrapConfiguration bootstrapConfiguration = new BootstrapConfiguration( + testClusterNodes.nodes.values().stream().filter(n -> n.node.isMasterNode()) + .map(node -> new BootstrapConfiguration.NodeDescription(node.node)) + .distinct() + .collect(Collectors.toList())); + testClusterNodes.nodes.values().stream().filter(n -> n.node.isMasterNode()).forEach( + testClusterNode -> testClusterNode.coordinator.setInitialConfiguration(bootstrapConfiguration) + ); + + runUntil( + () -> { + List masterNodeIds = testClusterNodes.nodes.values().stream() + .map(node -> node.clusterService.state().nodes().getMasterNodeId()) + .distinct().collect(Collectors.toList()); + return masterNodeIds.size() == 1 && masterNodeIds.contains(null) == false; + }, + TimeUnit.SECONDS.toMillis(30L) + ); + } + + private void runUntil(Supplier fulfilled, long timeout) { + final long start = deterministicTaskQueue.getCurrentTimeMillis(); + while (timeout > deterministicTaskQueue.getCurrentTimeMillis() - start) { + deterministicTaskQueue.runAllRunnableTasks(); + if (fulfilled.get()) { + return; + } + deterministicTaskQueue.advanceTime(); + } + fail("Condition wasn't fulfilled."); + } + + private void setupTestCluster(int masterNodes, int dataNodes) { + testClusterNodes = new TestClusterNodes(masterNodes, dataNodes); + startCluster(); + } + + private static ActionListener assertNoFailureListener(Runnable r) { + return new ActionListener() { + @Override + public void onResponse(final T t) { + r.run(); + } + + @Override + public void onFailure(final Exception e) { + throw new AssertionError(e); + } + }; + } + + /** + * Create a {@link Environment} with random path.home and path.repo + **/ + private Environment createEnvironment(String nodeName) { + return TestEnvironment.newEnvironment(Settings.builder() + .put(NODE_NAME_SETTING.getKey(), nodeName) + .put(PATH_HOME_SETTING.getKey(), tempDir.resolve(nodeName).toAbsolutePath()) + .put(Environment.PATH_REPO_SETTING.getKey(), tempDir.resolve("repo").toAbsolutePath()) + .putList(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(), + ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.get(Settings.EMPTY)) + .build()); + } + + private TestClusterNode newMasterNode(String nodeName) throws IOException { + return newNode(nodeName, DiscoveryNode.Role.MASTER); + } + + private TestClusterNode newDataNode(String nodeName) throws IOException { + return newNode(nodeName, DiscoveryNode.Role.DATA); + } + + private TestClusterNode newNode(String nodeName, DiscoveryNode.Role role) throws IOException { + return new TestClusterNode( + new DiscoveryNode(nodeName, randomAlphaOfLength(10), buildNewFakeTransportAddress(), emptyMap(), + Collections.singleton(role), Version.CURRENT) + ); + } + + private static ClusterState stateForNode(ClusterState state, DiscoveryNode node) { + return ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(node.getId())).build(); + } + + private final class TestClusterNodes { + + // LinkedHashMap so we have deterministic ordering when iterating over the map in tests + private final Map nodes = new LinkedHashMap<>(); + + TestClusterNodes(int masterNodes, int dataNodes) { + for (int i = 0; i < masterNodes; ++i) { + nodes.computeIfAbsent("node" + i, nodeName -> { + try { + return SnapshotsServiceTests.this.newMasterNode(nodeName); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + } + for (int i = 0; i < dataNodes; ++i) { + nodes.computeIfAbsent("data-node" + i, nodeName -> { + try { + return SnapshotsServiceTests.this.newDataNode(nodeName); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); + } + } + + /** + * Builds a {@link DiscoveryNodes} instance that has one master eligible node set as its master + * by random. + * @return DiscoveryNodes with set master node + */ + public DiscoveryNodes randomDiscoveryNodes() { + DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); + nodes.values().forEach(node -> builder.add(node.node)); + return builder.build(); + } + + /** + * Returns the {@link TestClusterNode} for the master node in the given {@link ClusterState}. + * @param state ClusterState + * @return Master Node + */ + public TestClusterNode currentMaster(ClusterState state) { + TestClusterNode master = nodes.get(state.nodes().getMasterNode().getName()); + assertNotNull(master); + assertTrue(master.node.isMasterNode()); + return master; + } + } + + private final class TestClusterNode { + + private final Logger logger = LogManager.getLogger(TestClusterNode.class); + + private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables()); + + private final TransportService transportService; + + private final ClusterService clusterService; + + private final RepositoriesService repositoriesService; + + private final SnapshotsService snapshotsService; + + private final SnapshotShardsService snapshotShardsService; + + private final IndicesService indicesService; + + private final IndicesClusterStateService indicesClusterStateService; + + private final DiscoveryNode node; + + private final MasterService masterService; + + private final AllocationService allocationService; + + private final NodeClient client; + + private final NodeEnvironment nodeEnv; + + private final DisruptableMockTransport mockTransport; + + private final ThreadPool threadPool; + + private Coordinator coordinator; + + TestClusterNode(DiscoveryNode node) throws IOException { + this.node = node; + final Environment environment = createEnvironment(node.getName()); + masterService = new FakeThreadPoolMasterService(node.getName(), "test", deterministicTaskQueue::scheduleNow); + final Settings settings = environment.settings(); + final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool = deterministicTaskQueue.getThreadPool(); + clusterService = new ClusterService(settings, clusterSettings, masterService, + new ClusterApplierService(node.getName(), settings, clusterSettings, threadPool) { + @Override + protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { + return new MockSinglePrioritizingExecutor(node.getName(), deterministicTaskQueue); + } + }); + mockTransport = new DisruptableMockTransport(logger) { + @Override + protected DiscoveryNode getLocalNode() { + return node; + } + + @Override + protected ConnectionStatus getConnectionStatus(DiscoveryNode sender, DiscoveryNode destination) { + return ConnectionStatus.CONNECTED; + } + + @Override + protected Optional getDisruptedCapturingTransport(DiscoveryNode node, String action) { + final Predicate matchesDestination; + if (action.equals(HANDSHAKE_ACTION_NAME)) { + matchesDestination = n -> n.transportService.getLocalNode().getAddress().equals(node.getAddress()); + } else { + matchesDestination = n -> n.transportService.getLocalNode().equals(node); + } + return testClusterNodes.nodes.values().stream().filter(matchesDestination).findAny().map(cn -> cn.mockTransport); + } + + @Override + protected void handle(DiscoveryNode sender, DiscoveryNode destination, String action, Runnable doDelivery) { + // handshake needs to run inline as the caller blockingly waits on the result + final Runnable runnable = CoordinatorTests.onNode(destination, doDelivery); + if (action.equals(HANDSHAKE_ACTION_NAME)) { + runnable.run(); + } else { + deterministicTaskQueue.scheduleNow(runnable); + } + } + }; + transportService = mockTransport.createTransportService( + settings, deterministicTaskQueue.getThreadPool(runnable -> CoordinatorTests.onNode(node, runnable)), + NOOP_TRANSPORT_INTERCEPTOR, + a -> node, null, emptySet() + ); + final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); + repositoriesService = new RepositoriesService( + settings, clusterService, transportService, + Collections.singletonMap(FsRepository.TYPE, metaData -> { + final Repository repository = new FsRepository(metaData, environment, xContentRegistry()) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo in the test thread + } + }; + repository.start(); + return repository; + } + ), + emptyMap(), + threadPool + ); + snapshotsService = + new SnapshotsService(settings, clusterService, indexNameExpressionResolver, repositoriesService, threadPool); + nodeEnv = new NodeEnvironment(settings, environment); + final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(Collections.emptyList()); + final ScriptService scriptService = new ScriptService(settings, emptyMap(), emptyMap()); + client = new NodeClient(settings, threadPool); + allocationService = ESAllocationTestCase.createAllocationService(settings); + final IndexScopedSettings indexScopedSettings = + new IndexScopedSettings(settings, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + indicesService = new IndicesService( + settings, + mock(PluginsService.class), + nodeEnv, + namedXContentRegistry, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), + emptyMap(), emptyMap(), emptyMap(), emptyMap()), + indexNameExpressionResolver, + new MapperRegistry(emptyMap(), emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), + namedWriteableRegistry, + threadPool, + indexScopedSettings, + new NoneCircuitBreakerService(), + new BigArrays(new PageCacheRecycler(settings), null, "test"), + scriptService, + client, + new MetaStateService(nodeEnv, namedXContentRegistry), + Collections.emptyList(), + emptyMap() + ); + final RecoverySettings recoverySettings = new RecoverySettings(settings, clusterSettings); + final ActionFilters actionFilters = new ActionFilters(emptySet()); + snapshotShardsService = new SnapshotShardsService( + settings, clusterService, snapshotsService, threadPool, + transportService, indicesService, actionFilters, indexNameExpressionResolver); + final ShardStateAction shardStateAction = new ShardStateAction( + clusterService, transportService, allocationService, + new RoutingService(settings, clusterService, allocationService), + deterministicTaskQueue.getThreadPool() + ); + indicesClusterStateService = new IndicesClusterStateService( + settings, indicesService, clusterService, threadPool, + new PeerRecoveryTargetService( + deterministicTaskQueue.getThreadPool(), transportService, recoverySettings, + clusterService + ), + shardStateAction, + new NodeMappingRefreshAction(transportService, new MetaDataMappingService(clusterService, indicesService)), + repositoriesService, + mock(SearchService.class), + new SyncedFlushService(indicesService, clusterService, transportService, indexNameExpressionResolver), + new PeerRecoverySourceService(transportService, indicesService, recoverySettings), + snapshotShardsService, + new PrimaryReplicaSyncer( + transportService, + new TransportResyncReplicationAction( + settings, transportService, clusterService, indicesService, threadPool, + shardStateAction, actionFilters, indexNameExpressionResolver) + ), + new GlobalCheckpointSyncAction( + settings, transportService, clusterService, indicesService, threadPool, + shardStateAction, actionFilters, indexNameExpressionResolver) + ); + Map actions = new HashMap<>(); + actions.put(CreateIndexAction.INSTANCE, + new TransportCreateIndexAction( + transportService, clusterService, threadPool, + new MetaDataCreateIndexService(settings, clusterService, indicesService, + allocationService, new AliasValidator(), environment, indexScopedSettings, + threadPool, namedXContentRegistry, false), + actionFilters, indexNameExpressionResolver + )); + actions.put(PutRepositoryAction.INSTANCE, + new TransportPutRepositoryAction( + transportService, clusterService, repositoriesService, threadPool, + actionFilters, indexNameExpressionResolver + )); + actions.put(CreateSnapshotAction.INSTANCE, + new TransportCreateSnapshotAction( + transportService, clusterService, threadPool, + snapshotsService, actionFilters, indexNameExpressionResolver + )); + client.initialize(actions, () -> clusterService.localNode().getId(), transportService.getRemoteClusterService()); + } + + public void start(ClusterState initialState) { + transportService.start(); + transportService.acceptIncomingRequests(); + snapshotsService.start(); + snapshotShardsService.start(); + final CoordinationState.PersistedState persistedState = + new InMemoryPersistedState(0L, stateForNode(initialState, node)); + coordinator = new Coordinator(node.getName(), clusterService.getSettings(), + clusterService.getClusterSettings(), transportService, namedWriteableRegistry, + allocationService, masterService, () -> persistedState, + hostsResolver -> testClusterNodes.nodes.values().stream().filter(n -> n.node.isMasterNode()) + .map(n -> n.node.getAddress()).collect(Collectors.toList()), + clusterService.getClusterApplierService(), Collections.emptyList(), random()); + masterService.setClusterStatePublisher(coordinator); + coordinator.start(); + masterService.start(); + clusterService.getClusterApplierService().setNodeConnectionsService( + new NodeConnectionsService(clusterService.getSettings(), threadPool, transportService)); + clusterService.getClusterApplierService().start(); + indicesService.start(); + indicesClusterStateService.start(); + coordinator.startInitialJoin(); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index 2b634385b29af..8a49324757f27 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -110,8 +110,6 @@ public long getFailureCount() { /** Allows blocking on writing the snapshot file at the end of snapshot creation to simulate a died master node */ private volatile boolean blockAndFailOnWriteSnapFile; - private volatile boolean allowAtomicOperations; - private volatile boolean blocked = false; public MockRepository(RepositoryMetaData metadata, Environment environment, @@ -127,7 +125,6 @@ public MockRepository(RepositoryMetaData metadata, Environment environment, blockAndFailOnWriteSnapFile = metadata.settings().getAsBoolean("block_on_snap", false); randomPrefix = metadata.settings().get("random", "default"); waitAfterUnblock = metadata.settings().getAsLong("wait_after_unblock", 0L); - allowAtomicOperations = metadata.settings().getAsBoolean("allow_atomic_operations", true); logger.info("starting mock repository with random prefix {}", randomPrefix); } @@ -361,25 +358,18 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize, b public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize, final boolean failIfAlreadyExists) throws IOException { final Random random = RandomizedContext.current().getRandom(); - if (allowAtomicOperations && random.nextBoolean()) { - if ((delegate() instanceof FsBlobContainer) && (random.nextBoolean())) { - // Simulate a failure between the write and move operation in FsBlobContainer - final String tempBlobName = FsBlobContainer.tempBlobName(blobName); - super.writeBlob(tempBlobName, inputStream, blobSize, failIfAlreadyExists); - maybeIOExceptionOrBlock(blobName); - final FsBlobContainer fsBlobContainer = (FsBlobContainer) delegate(); - fsBlobContainer.moveBlobAtomic(tempBlobName, blobName, failIfAlreadyExists); - } else { - // Atomic write since it is potentially supported - // by the delegating blob container - maybeIOExceptionOrBlock(blobName); - super.writeBlobAtomic(blobName, inputStream, blobSize, failIfAlreadyExists); - } + if ((delegate() instanceof FsBlobContainer) && (random.nextBoolean())) { + // Simulate a failure between the write and move operation in FsBlobContainer + final String tempBlobName = FsBlobContainer.tempBlobName(blobName); + super.writeBlob(tempBlobName, inputStream, blobSize, failIfAlreadyExists); + maybeIOExceptionOrBlock(blobName); + final FsBlobContainer fsBlobContainer = (FsBlobContainer) delegate(); + fsBlobContainer.moveBlobAtomic(tempBlobName, blobName, failIfAlreadyExists); } else { - // Simulate a non-atomic write since many blob container - // implementations does not support atomic write + // Atomic write since it is potentially supported + // by the delegating blob container maybeIOExceptionOrBlock(blobName); - super.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + super.writeBlobAtomic(blobName, inputStream, blobSize, failIfAlreadyExists); } } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareTests.java new file mode 100644 index 0000000000000..711528924a0d8 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareTests.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.test.ESTestCase; + +public class RemoteClusterAwareTests extends ESTestCase { + + public void testBuildRemoteIndexName() { + { + String clusterAlias = randomAlphaOfLengthBetween(5, 10); + String index = randomAlphaOfLengthBetween(5, 10); + String remoteIndexName = RemoteClusterAware.buildRemoteIndexName(clusterAlias, index); + assertEquals(clusterAlias + ":" + index, remoteIndexName); + } + { + String index = randomAlphaOfLengthBetween(5, 10); + String clusterAlias = randomBoolean() ? null : RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + String remoteIndexName = RemoteClusterAware.buildRemoteIndexName(clusterAlias, index); + assertEquals(index, remoteIndexName); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 34dfc420133c0..d5671eec21961 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -60,6 +60,7 @@ import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.anyOf; @@ -742,7 +743,7 @@ public void testCollectSearchShards() throws Exception { AtomicReference failure = new AtomicReference<>(); remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, remoteIndicesByCluster, new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - assertTrue(latch.await(1, TimeUnit.SECONDS)); + awaitLatch(latch, 5, TimeUnit.SECONDS); assertNull(failure.get()); assertNotNull(response.get()); Map map = response.get(); @@ -761,7 +762,7 @@ public void testCollectSearchShards() throws Exception { remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), "index_not_found", null, remoteIndicesByCluster, new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - assertTrue(latch.await(2, TimeUnit.SECONDS)); + awaitLatch(latch, 5, TimeUnit.SECONDS); assertNull(response.get()); assertNotNull(failure.get()); assertThat(failure.get(), instanceOf(RemoteTransportException.class)); @@ -800,7 +801,7 @@ public void onNodeDisconnected(DiscoveryNode node) { AtomicReference failure = new AtomicReference<>(); remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, remoteIndicesByCluster, new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - assertTrue(latch.await(1, TimeUnit.SECONDS)); + awaitLatch(latch, 5, TimeUnit.SECONDS); assertNull(response.get()); assertNotNull(failure.get()); assertThat(failure.get(), instanceOf(RemoteTransportException.class)); @@ -818,7 +819,7 @@ public void onNodeDisconnected(DiscoveryNode node) { AtomicReference failure = new AtomicReference<>(); remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, remoteIndicesByCluster, new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - assertTrue(latch.await(1, TimeUnit.SECONDS)); + awaitLatch(latch, 5, TimeUnit.SECONDS); assertNull(failure.get()); assertNotNull(response.get()); Map map = response.get(); @@ -837,7 +838,7 @@ public void onNodeDisconnected(DiscoveryNode node) { //give transport service enough time to realize that the node is down, and to notify the connection listeners //so that RemoteClusterConnection is left with no connected nodes, hence it will retry connecting next - assertTrue(disconnectedLatch.await(1, TimeUnit.SECONDS)); + assertTrue(disconnectedLatch.await(5, TimeUnit.SECONDS)); service.clearAllRules(); if (randomBoolean()) { @@ -854,7 +855,7 @@ public void onNodeDisconnected(DiscoveryNode node) { AtomicReference failure = new AtomicReference<>(); remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, remoteIndicesByCluster, new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - assertTrue(latch.await(1, TimeUnit.SECONDS)); + awaitLatch(latch, 5, TimeUnit.SECONDS); assertNull(failure.get()); assertNotNull(response.get()); Map map = response.get(); @@ -863,7 +864,7 @@ public void onNodeDisconnected(DiscoveryNode node) { String clusterAlias = "remote" + i; assertTrue(map.containsKey(clusterAlias)); ClusterSearchShardsResponse shardsResponse = map.get(clusterAlias); - assertTrue(shardsResponse != ClusterSearchShardsResponse.EMPTY); + assertNotSame(ClusterSearchShardsResponse.EMPTY, shardsResponse); } } assertEquals(0, service.getConnectionManager().size()); diff --git a/test/build.gradle b/test/build.gradle index d0a3065e7c80a..8293835528880 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -17,8 +17,6 @@ * under the License. */ -import org.elasticsearch.gradle.precommit.PrecommitTasks - subprojects { // fixtures is just intermediate parent project if (name == 'fixtures') return diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 8871c69b87acc..26a36852d378d 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -47,7 +47,7 @@ forbiddenApisMain { dependencyLicenses.enabled = false dependenciesInfo.enabled = false -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // classes are missing 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', @@ -61,8 +61,8 @@ thirdPartyAudit.excludes = [ 'org.apache.tools.ant.types.FileSet', 'org.easymock.EasyMock', 'org.easymock.IArgumentMatcher', - 'org.jmock.core.Constraint', -] + 'org.jmock.core.Constraint' +) task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) { checkForTestsInMain = true diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/DeterministicTaskQueue.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/DeterministicTaskQueue.java index 035ca6b05efd9..eb4e55853d5a1 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/DeterministicTaskQueue.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/DeterministicTaskQueue.java @@ -405,7 +405,7 @@ public Object get(long timeout, TimeUnit unit) { @Override public Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, String executor) { - throw new UnsupportedOperationException(); + return super.scheduleWithFixedDelay(command, interval, executor); } @Override @@ -430,7 +430,92 @@ public boolean awaitTermination(long timeout, TimeUnit unit) { @Override public ScheduledExecutorService scheduler() { - throw new UnsupportedOperationException(); + return new ScheduledExecutorService() { + @Override + public ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit) { + throw new UnsupportedOperationException(); + } + + @Override + public ScheduledFuture schedule(Callable callable, long delay, TimeUnit unit) { + throw new UnsupportedOperationException(); + } + + @Override + public ScheduledFuture scheduleAtFixedRate(Runnable command, long initialDelay, long period, TimeUnit unit) { + throw new UnsupportedOperationException(); + } + + @Override + public ScheduledFuture scheduleWithFixedDelay(Runnable command, long initialDelay, long delay, TimeUnit unit) { + throw new UnsupportedOperationException(); + } + + @Override + public void shutdown() { + throw new UnsupportedOperationException(); + } + + @Override + public List shutdownNow() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isShutdown() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isTerminated() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean awaitTermination(long timeout, TimeUnit unit) { + throw new UnsupportedOperationException(); + } + + @Override + public Future submit(Callable task) { + throw new UnsupportedOperationException(); + } + + @Override + public Future submit(Runnable task, T result) { + throw new UnsupportedOperationException(); + } + + @Override + public Future submit(Runnable task) { + throw new UnsupportedOperationException(); + } + + @Override + public List> invokeAll(Collection> tasks) { + throw new UnsupportedOperationException(); + } + + @Override + public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) { + throw new UnsupportedOperationException(); + } + + @Override + public T invokeAny(Collection> tasks) { + throw new UnsupportedOperationException(); + } + + @Override + public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) { + throw new UnsupportedOperationException(); + } + + @Override + public void execute(Runnable command) { + throw new UnsupportedOperationException(); + } + }; } }; } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/MockSinglePrioritizingExecutor.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/MockSinglePrioritizingExecutor.java new file mode 100644 index 0000000000000..cc21fef5f5559 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/MockSinglePrioritizingExecutor.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.coordination; + +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; + +import java.util.concurrent.TimeUnit; + +/** + * Mock single threaded {@link PrioritizedEsThreadPoolExecutor} based on {@link DeterministicTaskQueue}, + * simulating the behaviour of an executor returned by {@link EsExecutors#newSinglePrioritizing}. + */ +public class MockSinglePrioritizingExecutor extends PrioritizedEsThreadPoolExecutor { + + public MockSinglePrioritizingExecutor(String name, DeterministicTaskQueue deterministicTaskQueue) { + super(name, 0, 1, 0L, TimeUnit.MILLISECONDS, + r -> new Thread() { + @Override + public void start() { + deterministicTaskQueue.scheduleNow(() -> { + try { + r.run(); + } catch (KillWorkerError kwe) { + // hacks everywhere + } + }); + } + }, + deterministicTaskQueue.getThreadPool().getThreadContext(), deterministicTaskQueue.getThreadPool().scheduler()); + } + + @Override + protected void afterExecute(Runnable r, Throwable t) { + super.afterExecute(r, t); + // kill worker so that next one will be scheduled + throw new KillWorkerError(); + } + + private static final class KillWorkerError extends Error { + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 711bbc1336c7c..58059cd30e382 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -84,6 +84,7 @@ import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.seqno.LocalCheckpointTracker; import org.elasticsearch.index.seqno.ReplicationTracker; +import org.elasticsearch.index.seqno.RetentionLease; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; @@ -104,6 +105,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -115,6 +117,7 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; +import java.util.function.Supplier; import java.util.function.ToLongBiFunction; import java.util.stream.Collectors; @@ -224,7 +227,8 @@ public EngineConfig copy(EngineConfig config, LongSupplier globalCheckpointSuppl new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), - config.getCircuitBreakerService(), globalCheckpointSupplier, config.getPrimaryTermSupplier(), tombstoneDocSupplier()); + config.getCircuitBreakerService(), globalCheckpointSupplier, config.retentionLeasesSupplier(), + config.getPrimaryTermSupplier(), tombstoneDocSupplier()); } public EngineConfig copy(EngineConfig config, Analyzer analyzer) { @@ -233,8 +237,8 @@ public EngineConfig copy(EngineConfig config, Analyzer analyzer) { new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), - config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.getPrimaryTermSupplier(), - config.getTombstoneDocSupplier()); + config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.retentionLeasesSupplier(), + config.getPrimaryTermSupplier(), config.getTombstoneDocSupplier()); } public EngineConfig copy(EngineConfig config, MergePolicy mergePolicy) { @@ -243,8 +247,8 @@ public EngineConfig copy(EngineConfig config, MergePolicy mergePolicy) { new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), - config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.getPrimaryTermSupplier(), - config.getTombstoneDocSupplier()); + config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.retentionLeasesSupplier(), + config.getPrimaryTermSupplier(), config.getTombstoneDocSupplier()); } @Override @@ -332,7 +336,7 @@ public static CheckedBiFunction ne source.endObject(); } source.endObject(); - return nestedMapper.parse(SourceToParse.source("test", "type", docId, BytesReference.bytes(source), XContentType.JSON)); + return nestedMapper.parse(new SourceToParse("test", "type", docId, BytesReference.bytes(source), XContentType.JSON)); }; } @@ -581,7 +585,8 @@ public EngineConfig config(IndexSettings indexSettings, Store store, Path transl public EngineConfig config(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, ReferenceManager.RefreshListener externalRefreshListener, ReferenceManager.RefreshListener internalRefreshListener, - Sort indexSort, LongSupplier globalCheckpointSupplier, CircuitBreakerService breakerService) { + Sort indexSort, @Nullable final LongSupplier maybeGlobalCheckpointSupplier, + CircuitBreakerService breakerService) { IndexWriterConfig iwc = newIndexWriterConfig(); TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); Engine.EventListener listener = new Engine.EventListener() { @@ -594,14 +599,22 @@ public void onFailedEngine(String reason, @Nullable Exception e) { externalRefreshListener == null ? emptyList() : Collections.singletonList(externalRefreshListener); final List intRefreshListenerList = internalRefreshListener == null ? emptyList() : Collections.singletonList(internalRefreshListener); + final LongSupplier globalCheckpointSupplier; + final Supplier> retentionLeasesSupplier; + if (maybeGlobalCheckpointSupplier == null) { + final ReplicationTracker replicationTracker = new ReplicationTracker( + shardId, allocationId.getId(), indexSettings, SequenceNumbers.NO_OPS_PERFORMED, update -> {}, () -> 0L); + globalCheckpointSupplier = replicationTracker; + retentionLeasesSupplier = replicationTracker::getRetentionLeases; + } else { + globalCheckpointSupplier = maybeGlobalCheckpointSupplier; + retentionLeasesSupplier = Collections::emptyList; + } EngineConfig config = new EngineConfig(shardId, allocationId.getId(), threadPool, indexSettings, null, store, mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), listener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), extRefreshListenerList, intRefreshListenerList, indexSort, - breakerService, - globalCheckpointSupplier == null ? - new ReplicationTracker(shardId, allocationId.getId(), indexSettings, SequenceNumbers.NO_OPS_PERFORMED, update -> {}) : - globalCheckpointSupplier, primaryTerm::get, tombstoneDocSupplier()); + breakerService, globalCheckpointSupplier, retentionLeasesSupplier, primaryTerm::get, tombstoneDocSupplier()); return config; } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index 830fcec3726a0..7090419e9f23d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.RootObjectMapper; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.similarity.SimilarityService; @@ -45,7 +46,6 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; -import static org.elasticsearch.index.mapper.SourceToParse.source; public class TranslogHandler implements Engine.TranslogRecoveryRunner { @@ -122,9 +122,8 @@ private Engine.Operation convertToEngineOp(Translog.Operation operation, Engine. final String indexName = mapperService.index().getName(); final Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), mapperService.getIndexSettings().getIndexVersionCreated(), - source(indexName, index.type(), index.id(), index.source(), - XContentHelper.xContentType(index.source())) - .routing(index.routing()), index.seqNo(), index.primaryTerm(), + new SourceToParse(indexName, index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source()), + index.routing()), index.seqNo(), index.primaryTerm(), index.version(), null, origin, index.getAutoGeneratedIdTimestamp(), true, SequenceNumbers.UNASSIGNED_SEQ_NO, 0); return engineIndex; case DELETE: diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 0a007f2a18e2c..2e13cd6e66543 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -133,20 +133,28 @@ public void onRecoveryFailure(RecoveryState state, RecoveryFailedException e, bo @Override public void setUp() throws Exception { super.setUp(); - threadPool = new TestThreadPool(getClass().getName(), threadPoolSettings()); + threadPool = setUpThreadPool(); primaryTerm = randomIntBetween(1, 100); // use random but fixed term for creating shards failOnShardFailures(); } + protected ThreadPool setUpThreadPool() { + return new TestThreadPool(getClass().getName(), threadPoolSettings()); + } + @Override public void tearDown() throws Exception { try { - ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + tearDownThreadPool(); } finally { super.tearDown(); } } + protected void tearDownThreadPool() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + } + /** * by default, tests will fail if any shard created by this class fails. Tests that cause failures by design * can call this method to ignore those failures @@ -697,8 +705,8 @@ protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, String source, XContentType xContentType, String routing) throws IOException { - SourceToParse sourceToParse = SourceToParse.source(shard.shardId().getIndexName(), type, id, new BytesArray(source), xContentType); - sourceToParse.routing(routing); + SourceToParse sourceToParse = new SourceToParse( + shard.shardId().getIndexName(), type, id, new BytesArray(source), xContentType, routing); Engine.IndexResult result; if (shard.routingEntry().primary()) { result = shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, sourceToParse, diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index d534af5789448..5d96cd37b054d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -39,6 +39,7 @@ import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.slice.SliceBuilder; @@ -83,7 +84,17 @@ private RandomSearchRequestGenerator() {} * {@link #randomSearchSourceBuilder(Supplier, Supplier, Supplier, Supplier, Supplier)}. */ public static SearchRequest randomSearchRequest(Supplier randomSearchSourceBuilder) { - SearchRequest searchRequest = new SearchRequest(); + return randomSearchRequest(new SearchRequest(), randomSearchSourceBuilder); + } + + /** + * Set random fields to the provided search request. + * + * @param searchRequest the search request + * @param randomSearchSourceBuilder builds a random {@link SearchSourceBuilder}. You can use + * {@link #randomSearchSourceBuilder(Supplier, Supplier, Supplier, Supplier, Supplier)}. + */ + public static SearchRequest randomSearchRequest(SearchRequest searchRequest, Supplier randomSearchSourceBuilder) { searchRequest.allowPartialSearchResults(true); if (randomBoolean()) { searchRequest.indices(generateRandomStringArray(10, 10, false, false)); @@ -147,7 +158,13 @@ public static SearchSourceBuilder randomSearchSourceBuilder( builder.terminateAfter(randomIntBetween(1, 100000)); } if (randomBoolean()) { - builder.trackTotalHits(randomBoolean()); + if (randomBoolean()) { + builder.trackTotalHits(randomBoolean()); + } else { + builder.trackTotalHitsUpTo( + randomIntBetween(SearchContext.TRACK_TOTAL_HITS_DISABLED, SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO) + ); + } } switch(randomInt(2)) { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index ffd19d8e94d6e..66188e57f4337 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -213,7 +213,7 @@ protected void randomFieldOrScript(ValuesSourceAggregationBuilder factory, factory.script(mockScript("doc[" + field + "] + 1")); break; default: - throw new AssertionError("Unknow random operation [" + choice + "]"); + throw new AssertionError("Unknown random operation [" + choice + "]"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 40c23b9d5b3fb..a16f55e04d74a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -379,7 +379,7 @@ protected void assertParsedQuery(String queryAsString, QueryBuilder expectedQuer /** * Parses the query provided as bytes argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ - private static void assertParsedQuery(XContentParser parser, QueryBuilder expectedQuery) throws IOException { + private void assertParsedQuery(XContentParser parser, QueryBuilder expectedQuery) throws IOException { QueryBuilder newQuery = parseQuery(parser); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); @@ -396,7 +396,7 @@ protected QueryBuilder parseQuery(String queryAsString) throws IOException { return parseQuery(parser); } - protected static QueryBuilder parseQuery(XContentParser parser) throws IOException { + protected QueryBuilder parseQuery(XContentParser parser) throws IOException { QueryBuilder parseInnerQueryBuilder = parseInnerQueryBuilder(parser); assertNull(parser.nextToken()); return parseInnerQueryBuilder; @@ -416,7 +416,7 @@ protected boolean builderGeneratesCacheableQueries() { public void testToQuery() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { QueryShardContext context = createShardContext(); - assert context.isCachable(); + assert context.isCacheable(); context.setAllowUnmappedFields(true); QB firstQuery = createTestQueryBuilder(); QB controlQuery = copyQuery(firstQuery); @@ -426,12 +426,12 @@ public void testToQuery() throws IOException { * we first rewrite the query with a private context, then reset the context and then build the actual lucene query*/ QueryBuilder rewritten = rewriteQuery(firstQuery, new QueryShardContext(context)); Query firstLuceneQuery = rewritten.toQuery(context); - if (isCachable(firstQuery)) { + if (isCacheable(firstQuery)) { assertTrue("query was marked as not cacheable in the context but this test indicates it should be cacheable: " - + firstQuery.toString(), context.isCachable()); + + firstQuery.toString(), context.isCacheable()); } else { assertFalse("query was marked as cacheable in the context but this test indicates it should not be cacheable: " - + firstQuery.toString(), context.isCachable()); + + firstQuery.toString(), context.isCacheable()); } assertNotNull("toQuery should not return null", firstLuceneQuery); assertLuceneQuery(firstQuery, firstLuceneQuery, searchContext); @@ -476,7 +476,7 @@ private QueryBuilder rewriteQuery(QB queryBuilder, QueryRewriteContext rewriteCo return rewritten; } - protected boolean isCachable(QB queryBuilder) { + protected boolean isCacheable(QB queryBuilder) { return true; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index 198c02829b171..eabb05a537ca7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -20,12 +20,10 @@ import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -63,6 +61,7 @@ public class BackgroundIndexer implements AutoCloseable { final Semaphore availableBudget = new Semaphore(0); final boolean useAutoGeneratedIDs; private final Set ids = ConcurrentCollections.newConcurrentSet(); + private boolean assertNoFailuresOnStop = true; volatile int minFieldSize = 10; volatile int maxFieldSize = 140; @@ -163,13 +162,11 @@ public void run() { } BulkResponse bulkResponse = bulkRequest.get(); for (BulkItemResponse bulkItemResponse : bulkResponse) { - if (!bulkItemResponse.isFailed()) { + if (bulkItemResponse.isFailed() == false) { boolean add = ids.add(bulkItemResponse.getId()); assert add : "ID: " + bulkItemResponse.getId() + " already used"; } else { - throw new ElasticsearchException("bulk request failure, id: [" - + bulkItemResponse.getFailure().getId() + "] message: " - + bulkItemResponse.getFailure().getMessage()); + failures.add(bulkItemResponse.getFailure().getCause()); } } @@ -283,7 +280,9 @@ public void stop() throws InterruptedException { } stop.set(true); Assert.assertThat("timeout while waiting for indexing threads to stop", stopLatch.await(6, TimeUnit.MINUTES), equalTo(true)); - assertNoFailures(); + if (assertNoFailuresOnStop) { + assertNoFailures(); + } } public long totalIndexedDocs() { @@ -308,6 +307,10 @@ public void setMaxFieldSize(int fieldSize) { maxFieldSize = fieldSize; } + public void setAssertNoFailuresOnStop(final boolean assertNoFailuresOnStop) { + this.assertNoFailuresOnStop = assertNoFailuresOnStop; + } + @Override public void close() throws Exception { stop(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index e17377b500d6b..6e3089a6d63e1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -373,18 +373,24 @@ protected final boolean enableWarningsCheck() { protected final void beforeInternal() throws Exception { final Scope currentClusterScope = getCurrentClusterScope(); + Callable setup = () -> { + cluster().beforeTest(random(), getPerTestTransportClientRatio()); + cluster().wipe(excludeTemplates()); + randomIndexTemplate(); + return null; + }; switch (currentClusterScope) { case SUITE: assert SUITE_SEED != null : "Suite seed was not initialized"; currentCluster = buildAndPutCluster(currentClusterScope, SUITE_SEED); + RandomizedContext.current().runWithPrivateRandomness(SUITE_SEED, setup); break; case TEST: currentCluster = buildAndPutCluster(currentClusterScope, randomLong()); + setup.call(); break; } - cluster().beforeTest(random(), getPerTestTransportClientRatio()); - cluster().wipe(excludeTemplates()); - randomIndexTemplate(); + } private void printTestMessage(String message) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 59327121c9038..551110ca2520a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -114,11 +114,13 @@ import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.ParsedWeightedAvg; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; @@ -186,6 +188,7 @@ public abstract class InternalAggregationTestCase map.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)); map.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)); map.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)); + map.put(WeightedAvgAggregationBuilder.NAME, (p, c) -> ParsedWeightedAvg.fromXContent(p, (String) c)); map.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)); map.put(InternalSimpleValue.NAME, (p, c) -> ParsedSimpleValue.fromXContent(p, (String) c)); map.put(DerivativePipelineAggregationBuilder.NAME, (p, c) -> ParsedDerivative.fromXContent(p, (String) c)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 9d03383561614..18edb5ec3790a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -83,7 +83,7 @@ public class TestSearchContext extends SearchContext { SearchTask task; SortAndFormats sort; boolean trackScores = false; - boolean trackTotalHits = true; + int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; ContextIndexSearcher searcher; int size; @@ -364,14 +364,14 @@ public boolean trackScores() { } @Override - public SearchContext trackTotalHits(boolean trackTotalHits) { - this.trackTotalHits = trackTotalHits; + public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) { + this.trackTotalHitsUpTo = trackTotalHitsUpTo; return this; } @Override - public boolean trackTotalHits() { - return trackTotalHits; + public int trackTotalHitsUpTo() { + return trackTotalHitsUpTo; } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java index 783dc6325c4a2..53be34c0b40c8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java @@ -81,7 +81,7 @@ public Map> getDiscoveryTypes(ThreadPool threadPool, return new Coordinator("test_node", fixedSettings, clusterSettings, transportService, namedWriteableRegistry, allocationService, masterService, () -> gatewayMetaState.getPersistedState(settings, (ClusterApplierService) clusterApplier), hostsProvider, - clusterApplier, new Random(Randomness.get().nextLong())); + clusterApplier, Collections.emptyList(), new Random(Randomness.get().nextLong())); } else { return new TestZenDiscovery(fixedSettings, threadPool, transportService, namedWriteableRegistry, masterService, clusterApplier, clusterSettings, hostsProvider, allocationService, gatewayMetaState); diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 6005e7e6163f1..f18824b27e6c7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -47,6 +47,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; @@ -72,6 +73,8 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.function.Function; import static org.hamcrest.CoreMatchers.equalTo; @@ -129,7 +132,7 @@ public static void assertAcked(CreateIndexResponse response) { * @param builder the request builder */ public static void assertBlocked(ActionRequestBuilder builder) { - assertBlocked(builder, null); + assertBlocked(builder, (ClusterBlock) null); } /** @@ -155,9 +158,9 @@ public static void assertBlocked(BroadcastResponse replicatedBroadcastResponse) * Executes the request and fails if the request has not been blocked by a specific {@link ClusterBlock}. * * @param builder the request builder - * @param expectedBlock the expected block + * @param expectedBlockId the expected block id */ - public static void assertBlocked(ActionRequestBuilder builder, ClusterBlock expectedBlock) { + public static void assertBlocked(final ActionRequestBuilder builder, @Nullable final Integer expectedBlockId) { try { builder.get(); fail("Request executed with success but a ClusterBlockException was expected"); @@ -165,19 +168,29 @@ public static void assertBlocked(ActionRequestBuilder builder, ClusterBlock expe assertThat(e.blocks().size(), greaterThan(0)); assertThat(e.status(), equalTo(RestStatus.FORBIDDEN)); - if (expectedBlock != null) { + if (expectedBlockId != null) { boolean found = false; for (ClusterBlock clusterBlock : e.blocks()) { - if (clusterBlock.id() == expectedBlock.id()) { + if (clusterBlock.id() == expectedBlockId) { found = true; break; } } - assertThat("Request should have been blocked by [" + expectedBlock + "] instead of " + e.blocks(), found, equalTo(true)); + assertThat("Request should have been blocked by [" + expectedBlockId + "] instead of " + e.blocks(), found, equalTo(true)); } } } + /** + * Executes the request and fails if the request has not been blocked by a specific {@link ClusterBlock}. + * + * @param builder the request builder + * @param expectedBlock the expected block + */ + public static void assertBlocked(final ActionRequestBuilder builder, @Nullable final ClusterBlock expectedBlock) { + assertBlocked(builder, expectedBlock != null ? expectedBlock.id() : null); + } + public static String formatShardStatus(BroadcastResponse response) { StringBuilder msg = new StringBuilder(); msg.append(" Total shards: ").append(response.getTotalShards()) @@ -683,6 +696,23 @@ public static void assertToXContentEquivalent(BytesReference expected, BytesRefe } } + /** + * Wait for a latch to countdown and provide a useful error message if it does not + * Often latches are called as assertTrue(latch.await(1, TimeUnit.SECONDS)); + * In case of a failure this will just throw an assertion error without any further message + * + * @param latch The latch to wait for + * @param timeout The value of the timeout + * @param unit The unit of the timeout + * @throws InterruptedException An exception if the waiting is interrupted + */ + public static void awaitLatch(CountDownLatch latch, long timeout, TimeUnit unit) throws InterruptedException { + TimeValue timeValue = new TimeValue(timeout, unit); + String message = String.format(Locale.ROOT, "expected latch to be counted down after %s, but was not", timeValue); + boolean isCountedDown = latch.await(timeout, unit); + assertThat(message, isCountedDown, is(true)); + } + /** * Compares two maps recursively, using arrays comparisons for byte[] through Arrays.equals(byte[], byte[]) */ diff --git a/test/framework/src/test/java/org/elasticsearch/cluster/coordination/DeterministicTaskQueueTests.java b/test/framework/src/test/java/org/elasticsearch/cluster/coordination/DeterministicTaskQueueTests.java index f55ac081f11fa..c13c840377f5e 100644 --- a/test/framework/src/test/java/org/elasticsearch/cluster/coordination/DeterministicTaskQueueTests.java +++ b/test/framework/src/test/java/org/elasticsearch/cluster/coordination/DeterministicTaskQueueTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; @@ -30,6 +31,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.node.Node.NODE_NAME_SETTING; import static org.elasticsearch.threadpool.ThreadPool.Names.GENERIC; @@ -390,7 +392,37 @@ public void testDelayVariabilityAppliesToFutureTasks() { assertThat(elapsedTime, lessThanOrEqualTo(delayMillis + variabilityMillis)); } - private static DeterministicTaskQueue newTaskQueue() { + public void testThreadPoolSchedulesPeriodicFutureTasks() { + final DeterministicTaskQueue taskQueue = newTaskQueue(); + advanceToRandomTime(taskQueue); + final List strings = new ArrayList<>(5); + + final ThreadPool threadPool = taskQueue.getThreadPool(); + final long intervalMillis = randomLongBetween(1, 100); + + final AtomicInteger counter = new AtomicInteger(0); + Scheduler.Cancellable cancellable = threadPool.scheduleWithFixedDelay( + () -> strings.add("periodic-" + counter.getAndIncrement()), TimeValue.timeValueMillis(intervalMillis), GENERIC); + assertFalse(taskQueue.hasRunnableTasks()); + assertTrue(taskQueue.hasDeferredTasks()); + + for (int i = 0; i < 3; ++i) { + taskQueue.advanceTime(); + assertTrue(taskQueue.hasRunnableTasks()); + taskQueue.runAllRunnableTasks(); + } + + assertThat(strings, contains("periodic-0", "periodic-1", "periodic-2")); + + cancellable.cancel(); + + taskQueue.advanceTime(); + taskQueue.runAllRunnableTasks(); + + assertThat(strings, contains("periodic-0", "periodic-1", "periodic-2")); + } + + static DeterministicTaskQueue newTaskQueue() { return newTaskQueue(random()); } diff --git a/test/framework/src/test/java/org/elasticsearch/cluster/coordination/MockSinglePrioritizingExecutorTests.java b/test/framework/src/test/java/org/elasticsearch/cluster/coordination/MockSinglePrioritizingExecutorTests.java new file mode 100644 index 0000000000000..427f0d32c1270 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/cluster/coordination/MockSinglePrioritizingExecutorTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.coordination; + +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; +import org.elasticsearch.common.util.concurrent.PrioritizedRunnable; +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.atomic.AtomicBoolean; + +public class MockSinglePrioritizingExecutorTests extends ESTestCase { + + public void testPrioritizedEsThreadPoolExecutor() { + final DeterministicTaskQueue taskQueue = DeterministicTaskQueueTests.newTaskQueue(); + final PrioritizedEsThreadPoolExecutor executor = new MockSinglePrioritizingExecutor("test", taskQueue); + final AtomicBoolean called1 = new AtomicBoolean(); + final AtomicBoolean called2 = new AtomicBoolean(); + executor.execute(new PrioritizedRunnable(Priority.NORMAL) { + @Override + public void run() { + assertTrue(called1.compareAndSet(false, true)); // check that this is only called once + } + + }); + executor.execute(new PrioritizedRunnable(Priority.HIGH) { + @Override + public void run() { + assertTrue(called2.compareAndSet(false, true)); // check that this is only called once + } + }); + assertFalse(called1.get()); + assertFalse(called2.get()); + taskQueue.runRandomTask(); + assertFalse(called1.get()); + assertTrue(called2.get()); + taskQueue.runRandomTask(); + assertTrue(called1.get()); + assertTrue(called2.get()); + taskQueue.runRandomTask(); + assertFalse(taskQueue.hasRunnableTasks()); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 52c599c89bac6..01768edc5b25c 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -56,13 +56,16 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.IntStream; -import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODE_COUNT_SETTING; +import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.elasticsearch.cluster.node.DiscoveryNode.Role.DATA; import static org.elasticsearch.cluster.node.DiscoveryNode.Role.INGEST; import static org.elasticsearch.cluster.node.DiscoveryNode.Role.MASTER; +import static org.elasticsearch.discovery.DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING; import static org.elasticsearch.discovery.zen.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING; -import static org.elasticsearch.test.discovery.TestZenDiscovery.USE_ZEN2; +import static org.elasticsearch.node.Node.NODE_MASTER_SETTING; +import static org.elasticsearch.node.Node.NODE_NAME_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; import static org.hamcrest.Matchers.equalTo; @@ -187,6 +190,7 @@ public void testBeforeTest() throws Exception { final String clusterName1 = "shared1"; final String clusterName2 = "shared2"; String transportClient = getTestTransportType(); + final long bootstrapNodeSelectionSeed = randomLong(); NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { @@ -200,12 +204,19 @@ public Settings nodeSettings(int nodeOrdinal) { if (autoManageMinMasterNodes == false) { assert minNumDataNodes == maxNumDataNodes; assert masterNodes == false; - settings.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minNumDataNodes / 2 + 1) - .put(INITIAL_MASTER_NODE_COUNT_SETTING.getKey(), minNumDataNodes); + settings.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minNumDataNodes / 2 + 1); } return settings.build(); } + @Override + public List addExtraClusterBootstrapSettings(List allNodesSettings) { + if (autoManageMinMasterNodes) { + return allNodesSettings; + } + return addBootstrapConfiguration(new Random(bootstrapNodeSelectionSeed), allNodesSettings); + } + @Override public Path nodeConfigPath(int nodeOrdinal) { return null; @@ -255,6 +266,19 @@ public Settings transportClientSettings() { } } + private static List addBootstrapConfiguration(Random random, List allNodesSettings) { + final List updatedSettings = new ArrayList<>(allNodesSettings); + final int bootstrapIndex = randomFrom(random, IntStream.range(0, updatedSettings.size()) + .filter(i -> NODE_MASTER_SETTING.get(allNodesSettings.get(i))).boxed().collect(Collectors.toList())); + final Settings settings = updatedSettings.get(bootstrapIndex); + assertFalse(INITIAL_MASTER_NODES_SETTING.exists(settings)); + assertTrue(NODE_MASTER_SETTING.get(settings)); + updatedSettings.set(bootstrapIndex, + Settings.builder().put(settings).putList(INITIAL_MASTER_NODES_SETTING.getKey(), allNodesSettings.stream() + .filter(NODE_MASTER_SETTING::get).map(NODE_NAME_SETTING::get).collect(Collectors.toList())).build()); + return updatedSettings; + } + public void testDataFolderAssignmentAndCleaning() throws IOException, InterruptedException { long clusterSeed = randomLong(); boolean masterNodes = randomBoolean(); @@ -272,7 +296,8 @@ public Settings nodeSettings(int nodeOrdinal) { NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 2 + (masterNodes ? InternalTestCluster.DEFAULT_HIGH_NUM_MASTER_NODES : 0) + maxNumDataNodes + numClientNodes) .put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType()) - .put(USE_ZEN2.getKey(), false) // full cluster restarts not yet supported + .putList(DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file") + .putList(SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey()) .build(); } @@ -375,6 +400,9 @@ public void testDifferentRolesMaintainPathOnRestart() throws Exception { String transportClient = getTestTransportType(); InternalTestCluster cluster = new InternalTestCluster(randomLong(), baseDir, false, false, 0, 0, "test", new NodeConfigurationSource() { + + private boolean bootstrapConfigurationSet; + @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() @@ -384,10 +412,21 @@ public Settings nodeSettings(int nodeOrdinal) { // speedup join timeout as setting initial state timeout to 0 makes split // elections more likely .put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "3s") - .put(USE_ZEN2.getKey(), false) // full cluster restarts not yet supported + .putList(DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file") + .putList(SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey()) .build(); } + @Override + public List addExtraClusterBootstrapSettings(List allNodesSettings) { + if (bootstrapConfigurationSet || allNodesSettings.stream().noneMatch(NODE_MASTER_SETTING::get)) { + return allNodesSettings; + } + + bootstrapConfigurationSet = true; + return addBootstrapConfiguration(random(), allNodesSettings); + } + @Override public Path nodeConfigPath(int nodeOrdinal) { return null; @@ -408,10 +447,11 @@ public Settings transportClientSettings() { roles.add(role); } + final long masterCount = roles.stream().filter(role -> role == MASTER).count(); final Settings minMasterNodes = Settings.builder() - .put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), - roles.stream().filter(role -> role == MASTER).count() / 2 + 1 - ).build(); + .put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), masterCount / 2 + 1) + .build(); + try { Map> pathsPerRole = new HashMap<>(); for (int i = 0; i < numNodes; i++) { @@ -467,7 +507,8 @@ public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 2) .put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType()) - .put(USE_ZEN2.getKey(), false) // full cluster restarts not yet supported + .putList(DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file") + .putList(SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey()) .build(); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java index cd3c3b2032331..4e0623cd13448 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java @@ -41,7 +41,6 @@ public class SuiteScopeClusterIT extends ESIntegTestCase { @Test @SuppressForbidden(reason = "repeat is a feature here") @Repeat(iterations = 10, useConstantSeed = true) - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/36202") public void testReproducible() throws IOException { if (ITER++ == 0) { CLUSTER_SEED = cluster().seed(); diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index 6ab975fd42eba..8e374d2cf6af7 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -31,7 +31,7 @@ forbiddenApisMain { } jarHell.enabled = true // disabled by parent project -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // log4j 'org.osgi.framework.AdaptPermission', 'org.osgi.framework.AdminPermission', @@ -42,4 +42,4 @@ thirdPartyAudit.excludes = [ 'org.osgi.framework.SynchronousBundleListener', 'org.osgi.framework.wiring.BundleWire', 'org.osgi.framework.wiring.BundleWiring' -] \ No newline at end of file +) diff --git a/x-pack/build.gradle b/x-pack/build.gradle index e1c72c734798e..a0db6d9a78b4c 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -1,6 +1,4 @@ -import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.plugin.PluginBuildPlugin -import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.precommit.LicenseHeadersTask Project xpackRootProject = project diff --git a/x-pack/docs/en/security/auditing/overview.asciidoc b/x-pack/docs/en/security/auditing/overview.asciidoc index 8248bcb082479..2bd66190fdb27 100644 --- a/x-pack/docs/en/security/auditing/overview.asciidoc +++ b/x-pack/docs/en/security/auditing/overview.asciidoc @@ -23,7 +23,7 @@ The {es} {security-features} provide two ways to persist audit logs: index. The audit index can reside on the same cluster, or a separate cluster. By default, only the `logfile` output is used when enabling auditing, -implicitly outputing to both `_audit.log` and `_access.log`. +implicitly outputting to both `_audit.log` and `_access.log`. To facilitate browsing and analyzing the events, you can also enable indexing by setting `xpack.security.audit.outputs` in `elasticsearch.yml`: diff --git a/x-pack/docs/en/security/authentication/configuring-pki-realm.asciidoc b/x-pack/docs/en/security/authentication/configuring-pki-realm.asciidoc index 184fc76209339..0554be2f87286 100644 --- a/x-pack/docs/en/security/authentication/configuring-pki-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/configuring-pki-realm.asciidoc @@ -159,7 +159,7 @@ user: <1> <1> The name of a role. <2> The distinguished name (DN) of a PKI user. -The disinguished name for a PKI user follows X.500 naming conventions which +The distinguished name for a PKI user follows X.500 naming conventions which place the most specific fields (like `cn` or `uid`) at the beginning of the name, and the most general fields (like `o` or `dc`) at the end of the name. Some tools, such as _openssl_, may print out the subject name in a different diff --git a/x-pack/docs/en/security/authentication/configuring-saml-realm.asciidoc b/x-pack/docs/en/security/authentication/configuring-saml-realm.asciidoc index 35f79b16d4574..b0bdd67d2deef 100644 --- a/x-pack/docs/en/security/authentication/configuring-saml-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/configuring-saml-realm.asciidoc @@ -218,12 +218,12 @@ When a user authenticates using SAML, they are identified to the {stack}, but this does not automatically grant them access to perform any actions or access any data. -Your SAML users cannot do anything until they are mapped to roles. See -{stack-ov}/saml-role-mapping.html[Configuring role mappings]. +Your SAML users cannot do anything until they are assigned roles. This can be done +through either the {stack-ov}/saml-role-mapping.html[role mapping API], or with +{stack-ov}/realm-chains.html#authorization_realms[authorization realms]. -NOTE: The SAML realm supports -{stack-ov}/realm-chains.html#authorization_realms[authorization realms] as an -alternative to role mapping. +NOTE: You cannot use {stack-ov}/mapping-roles.html#mapping-roles-file[role mapping files] +to grant roles to users authenticating via SAML. -- diff --git a/x-pack/docs/en/security/authentication/saml-guide.asciidoc b/x-pack/docs/en/security/authentication/saml-guide.asciidoc index 2c11050a74753..9201a5520f76a 100644 --- a/x-pack/docs/en/security/authentication/saml-guide.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-guide.asciidoc @@ -251,7 +251,7 @@ additional names that can be used: `NameID` elements are an optional, but frequently provided, field within a SAML Assertion that the IdP may use to identify the Subject of that Assertion. In some cases the `NameID` will relate to the user's login - identifier (username) wihin the IdP, but in many cases they will be + identifier (username) within the IdP, but in many cases they will be internally generated identifiers that have no obvious meaning outside of the IdP. @@ -531,7 +531,7 @@ The path to the PEM formatted certificate file. e.g. `saml/saml-sign.crt` The path to the PEM formatted key file. e.g. `saml/saml-sign.key` `signing.secure_key_passphrase`:: -The passphrase for the key, if the file is encypted. This is a +The passphrase for the key, if the file is encrypted. This is a {ref}/secure-settings.html[secure setting] that must be set with the `elasticsearch-keystore` tool. @@ -545,7 +545,7 @@ The path to the PKCS#12 or JKS keystore. e.g. `saml/saml-sign.p12` The alias of the key within the keystore. e.g. `signing-key` `signing.keystore.secure_password`:: -The passphrase for the keystore, if the file is encypted. This is a +The passphrase for the keystore, if the file is encrypted. This is a {ref}/secure-settings.html[secure setting] that must be set with the `elasticsearch-keystore` tool. @@ -582,7 +582,7 @@ The path to the PEM formatted certificate file. e.g. `saml/saml-crypt.crt` The path to the PEM formatted key file. e.g. `saml/saml-crypt.key` `encryption.secure_key_passphrase`:: -The passphrase for the key, if the file is encypted. This is a +The passphrase for the key, if the file is encrypted. This is a {ref}/secure-settings.html[secure setting] that must be set with the `elasticsearch-keystore` tool. @@ -596,7 +596,7 @@ The path to the PKCS#12 or JKS keystore. e.g. `saml/saml-crypt.p12` The alias of the key within the keystore. e.g. `encryption-key` `encryption.keystore.secure_password`:: -The passphrase for the keystore, if the file is encypted. This is a +The passphrase for the keystore, if the file is encrypted. This is a {ref}/secure-settings.html[secure setting] that must be set with the `elasticsearch-keystore` tool. @@ -620,11 +620,14 @@ When a user authenticates using SAML, they are identified to the Elastic Stack, but this does not automatically grant them access to perform any actions or access any data. -Your SAML users cannot do anything until they are assigned roles. This is done +Your SAML users cannot do anything until they are assigned roles. This can be done through either the {ref}/security-api-put-role-mapping.html[add role mapping API], or with <>. +NOTE: You cannot use {stack-ov}/mapping-roles.html#mapping-roles-file[role mapping files] +to grant roles to users authenticating via SAML. + This is an example of a simple role mapping that grants the `kibana_user` role to any user who authenticates against the `saml1` realm: @@ -728,7 +731,7 @@ the certificates that {es} has been configured to use. SAML authentication in {kib} is also subject to the `xpack.security.sessionTimeout` setting that is described in the {kib} security -documentation, and you may wish to adjst this timeout to meet your local needs. +documentation, and you may wish to adjust this timeout to meet your local needs. The two additional settings that are required for SAML support are shown below: diff --git a/x-pack/docs/en/security/ccs-clients-integrations/http.asciidoc b/x-pack/docs/en/security/ccs-clients-integrations/http.asciidoc index 6b7ff26cbf3eb..ca22ceeebbe22 100644 --- a/x-pack/docs/en/security/ccs-clients-integrations/http.asciidoc +++ b/x-pack/docs/en/security/ccs-clients-integrations/http.asciidoc @@ -56,7 +56,7 @@ http://elasticsearch-py.readthedocs.org/en/master/#ssl-and-authentication[Python https://metacpan.org/pod/Search::Elasticsearch::Cxn::HTTPTiny#CONFIGURATION[Perl], http://www.elastic.co/guide/en/elasticsearch/client/php-api/current/_security.html[PHP], http://nest.azurewebsites.net/elasticsearch-net/security.html[.NET], -http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Javascript] +http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[JavaScript] //// Groovy - TODO link diff --git a/x-pack/docs/en/watcher/actions.asciidoc b/x-pack/docs/en/watcher/actions.asciidoc index 7e527530f4291..67844290cd051 100644 --- a/x-pack/docs/en/watcher/actions.asciidoc +++ b/x-pack/docs/en/watcher/actions.asciidoc @@ -198,7 +198,7 @@ image::images/action-throttling.jpg[align="center"] When a watch is triggered, its condition determines whether or not to execute the watch actions. Within each action, you can also add a condition per action. These additional conditions enable a single alert to execute different actions depending -on a their respective conditions. The following watch would alway send an email, when +on a their respective conditions. The following watch would always send an email, when hits are found from the input search, but only trigger the `notify_pager` action when there are more than 5 hits in the search result. diff --git a/x-pack/docs/en/watcher/customizing-watches.asciidoc b/x-pack/docs/en/watcher/customizing-watches.asciidoc index ea44b8aa231b7..27aa2baef04b7 100644 --- a/x-pack/docs/en/watcher/customizing-watches.asciidoc +++ b/x-pack/docs/en/watcher/customizing-watches.asciidoc @@ -49,7 +49,7 @@ initial payload. A <> input contains a `request` object that specifies the indices you want to search, the {ref}/search-request-search-type.html[search type], and the search request body. The `body` field of a search input is the same as -the body of an Elasticsearch `_search` request, making the full Elaticsearch +the body of an Elasticsearch `_search` request, making the full Elasticsearch Query DSL available for you to use. For example, the following `search` input loads the latest VIX quote: diff --git a/x-pack/docs/en/watcher/release-notes.asciidoc b/x-pack/docs/en/watcher/release-notes.asciidoc index 627c45829d3e2..5875458a15433 100644 --- a/x-pack/docs/en/watcher/release-notes.asciidoc +++ b/x-pack/docs/en/watcher/release-notes.asciidoc @@ -121,7 +121,7 @@ March 30, 2016 .New Features * Added <> * Added support for adding <> - via HTTP requests and superceding and deprecating the usage of `attach_data` + via HTTP requests and superseding and deprecating the usage of `attach_data` in order to use this feature [float] @@ -143,7 +143,7 @@ February 2, 2016 February 2, 2016 .Enhancements -* Adds support for Elasticssearch 2.1.2 +* Adds support for Elasticsearch 2.1.2 [float] ==== 2.1.1 diff --git a/x-pack/plugin/ccr/build.gradle b/x-pack/plugin/ccr/build.gradle index 97a25d1110d89..b8ed9f55932cc 100644 --- a/x-pack/plugin/ccr/build.gradle +++ b/x-pack/plugin/ccr/build.gradle @@ -54,3 +54,9 @@ dependencyLicenses { run { plugin xpackModule('core') } + +testingConventions.naming { + IT { + baseClass "org.elasticsearch.xpack.CcrIntegTestCase" + } +} diff --git a/x-pack/plugin/ccr/qa/rest/build.gradle b/x-pack/plugin/ccr/qa/rest/build.gradle index c2ca1d499b610..38873262ed273 100644 --- a/x-pack/plugin/ccr/qa/rest/build.gradle +++ b/x-pack/plugin/ccr/qa/rest/build.gradle @@ -13,6 +13,10 @@ task restTest(type: RestIntegTestTask) { restTestCluster { distribution 'zip' + // Disable assertions in FollowingEngineAssertions, otherwise an AssertionError is thrown before + // indexing a document directly in a follower index. In a rest test we like to test the exception + // that is thrown in production when indexing a document directly in a follower index. + environment 'ES_JAVA_OPTS', '-da:org.elasticsearch.xpack.ccr.index.engine.FollowingEngineAssertions' setting 'xpack.ml.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.security.enabled', 'true' diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml index d50bc52bc3620..f73f5c6dfb2d3 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml @@ -52,7 +52,6 @@ - do: ccr.resume_follow: index: bar - body: {} - is_true: acknowledged - do: diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/index_directly_into_follower_index.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/index_directly_into_follower_index.yml new file mode 100644 index 0000000000000..60c3b404b6f09 --- /dev/null +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/index_directly_into_follower_index.yml @@ -0,0 +1,66 @@ +--- +"Test indexing direcly into a follower index": + - do: + cluster.state: {} + + - set: {master_node: master} + + - do: + nodes.info: {} + + - set: {nodes.$master.transport_address: local_ip} + + - do: + cluster.put_settings: + body: + transient: + cluster.remote.local.seeds: $local_ip + flat_settings: true + + - match: {transient: {cluster.remote.local.seeds: $local_ip}} + + - do: + indices.create: + index: foo + body: + settings: + index: + soft_deletes: + enabled: true + mappings: + doc: + properties: + field: + type: keyword + - is_true: acknowledged + + - do: + ccr.follow: + index: bar + body: + remote_cluster: local + leader_index: foo + - is_true: follow_index_created + - is_true: follow_index_shards_acked + - is_true: index_following_started + + - do: + catch: forbidden + index: + index: bar + body: {} + + - do: + ccr.pause_follow: + index: bar + - is_true: acknowledged + + - do: + indices.close: + index: bar + - is_true: acknowledged + + - do: + ccr.unfollow: + index: bar + - is_true: acknowledged diff --git a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java index 6c3b99d8da3ef..25fbef7ada73e 100644 --- a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java +++ b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java @@ -25,7 +25,7 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HIT_AS_INT_PARAM; +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -99,7 +99,7 @@ protected static void verifyDocuments(final String index, request.addParameter("size", Integer.toString(expectedNumDocs)); request.addParameter("sort", "field:asc"); request.addParameter("q", query); - request.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + request.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); Map response = toMap(client.performRequest(request)); int numDocs = (int) XContentMapValues.extractValue("hits.total", response); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 0a7900d004b7b..55e24abc86c26 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -246,6 +246,7 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS }; newAutoFollowers.put(remoteCluster, autoFollower); + LOGGER.info("starting auto follower for remote cluster [{}]", remoteCluster); autoFollower.start(); } @@ -256,9 +257,10 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS boolean exist = autoFollowMetadata.getPatterns().values().stream() .anyMatch(pattern -> pattern.getRemoteCluster().equals(remoteCluster)); if (exist == false) { + LOGGER.info("removing auto follower for remote cluster [{}]", remoteCluster); removedRemoteClusters.add(remoteCluster); } else if (autoFollower.remoteClusterConnectionMissing) { - LOGGER.info("Retrying auto follower [{}] after remote cluster connection was missing", remoteCluster); + LOGGER.info("retrying auto follower [{}] after remote cluster connection was missing", remoteCluster); autoFollower.remoteClusterConnectionMissing = false; autoFollower.start(); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 13b12d4b96f2b..b1d6467168c90 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -29,8 +29,7 @@ import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.transport.NodeDisconnectedException; -import org.elasticsearch.transport.NodeNotConnectedException; +import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsResponse; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; @@ -448,7 +447,10 @@ static boolean shouldRetry(String remoteCluster, Exception e) { return true; } + // This is thrown when using a Client and its remote cluster alias went MIA String noSuchRemoteClusterMessage = "no such remote cluster: " + remoteCluster; + // This is thrown when creating a Client and the remote cluster does not exist: + String unknownClusterMessage = "unknown cluster alias [" + remoteCluster + "]"; final Throwable actual = ExceptionsHelper.unwrapCause(e); return actual instanceof ShardNotFoundException || actual instanceof IllegalIndexShardStateException || @@ -458,11 +460,11 @@ static boolean shouldRetry(String remoteCluster, Exception e) { actual instanceof ElasticsearchSecurityException || // If user does not have sufficient privileges actual instanceof ClusterBlockException || // If leader index is closed or no elected master actual instanceof IndexClosedException || // If follow index is closed - actual instanceof NodeDisconnectedException || - actual instanceof NodeNotConnectedException || + actual instanceof ConnectTransportException || actual instanceof NodeClosedException || (actual.getMessage() != null && actual.getMessage().contains("TransportService is closed")) || - (actual instanceof IllegalArgumentException && noSuchRemoteClusterMessage.equals(actual.getMessage())); + (actual instanceof IllegalArgumentException && (noSuchRemoteClusterMessage.equals(actual.getMessage()) || + unknownClusterMessage.equals(actual.getMessage()))); } // These methods are protected for testing purposes: diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 14ec147a5366f..97308126ffb3f 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -94,12 +94,6 @@ protected AllocatedPersistentTask createTask(long id, String type, String action PersistentTasksCustomMetaData.PersistentTask taskInProgress, Map headers) { ShardFollowTask params = taskInProgress.getParams(); - final Client remoteClient; - if (params.getRemoteCluster() != null) { - remoteClient = wrapClient(client.getRemoteClusterClient(params.getRemoteCluster()), params.getHeaders()); - } else { - remoteClient = wrapClient(client, params.getHeaders()); - } Client followerClient = wrapClient(client, params.getHeaders()); BiConsumer scheduler = (delay, command) -> { try { @@ -123,8 +117,7 @@ protected void innerUpdateMapping(LongConsumer handler, Consumer erro Index followIndex = params.getFollowShardId().getIndex(); ClusterStateRequest clusterStateRequest = CcrRequests.metaDataRequest(leaderIndex.getName()); - - remoteClient.admin().cluster().state(clusterStateRequest, ActionListener.wrap(clusterStateResponse -> { + CheckedConsumer onResponse = clusterStateResponse -> { IndexMetaData indexMetaData = clusterStateResponse.getState().metaData().getIndexSafe(leaderIndex); if (indexMetaData.getMappings().isEmpty()) { assert indexMetaData.getMappingVersion() == 1; @@ -140,7 +133,12 @@ protected void innerUpdateMapping(LongConsumer handler, Consumer erro followerClient.admin().indices().putMapping(putMappingRequest, ActionListener.wrap( putMappingResponse -> handler.accept(indexMetaData.getMappingVersion()), errorHandler)); - }, errorHandler)); + }; + try { + remoteClient(params).admin().cluster().state(clusterStateRequest, ActionListener.wrap(onResponse, errorHandler)); + } catch (Exception e) { + errorHandler.accept(e); + } } @Override @@ -181,7 +179,11 @@ protected void innerUpdateSettings(final LongConsumer finalHandler, final Consum } } }; - remoteClient.admin().cluster().state(clusterStateRequest, ActionListener.wrap(onResponse, errorHandler)); + try { + remoteClient(params).admin().cluster().state(clusterStateRequest, ActionListener.wrap(onResponse, errorHandler)); + } catch (Exception e) { + errorHandler.accept(e); + } } private void closeIndexUpdateSettingsAndOpenIndex(String followIndex, @@ -236,7 +238,7 @@ protected void innerSendShardChangesRequest(long from, int maxOperationCount, Co request.setMaxBatchSize(params.getMaxReadRequestSize()); request.setPollTimeout(params.getReadPollTimeout()); try { - remoteClient.execute(ShardChangesAction.INSTANCE, request, ActionListener.wrap(handler::accept, errorHandler)); + remoteClient(params).execute(ShardChangesAction.INSTANCE, request, ActionListener.wrap(handler::accept, errorHandler)); } catch (Exception e) { errorHandler.accept(e); } @@ -251,6 +253,10 @@ private String getLeaderShardHistoryUUID(ShardFollowTask params) { return recordedLeaderShardHistoryUUIDs[params.getLeaderShardId().id()]; } + private Client remoteClient(ShardFollowTask params) { + return wrapClient(client.getRemoteClusterClient(params.getRemoteCluster()), params.getHeaders()); + } + interface FollowerStatsInfoHandler { void accept(String followerHistoryUUID, long globalCheckpoint, long maxSeqNo); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 4fc3efedd82c6..0a1a22215a04b 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -385,6 +385,7 @@ static String[] extractLeaderShardHistoryUUIDs(Map ccrIndexMetaD nonReplicatedSettings.add(IndexSettings.ALLOW_UNMAPPED); nonReplicatedSettings.add(IndexSettings.INDEX_SEARCH_IDLE_AFTER); nonReplicatedSettings.add(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING); + nonReplicatedSettings.add(IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_SETTING); nonReplicatedSettings.add(IndexSettings.MAX_SCRIPT_FIELDS_SETTING); nonReplicatedSettings.add(IndexSettings.MAX_REGEX_LENGTH_SETTING); nonReplicatedSettings.add(IndexSettings.MAX_TERMS_COUNT_SETTING); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java index c5728516426d8..b7086ed876db7 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngine.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.index.VersionType; @@ -23,6 +24,7 @@ import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ccr.CcrSettings; import java.io.IOException; @@ -56,17 +58,11 @@ private static EngineConfig validateEngineConfig(final EngineConfig engineConfig } private void preFlight(final Operation operation) { - /* - * We assert here so that this goes uncaught in unit tests and fails nodes in standalone tests (we want a harsh failure so that we - * do not have a situation where a shard fails and is recovered elsewhere and a test subsequently passes). We throw an exception so - * that we also prevent issues in production code. - */ - assert operation.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO; + assert FollowingEngineAssertions.preFlight(operation); if (operation.seqNo() == SequenceNumbers.UNASSIGNED_SEQ_NO) { - throw new IllegalStateException("a following engine does not accept operations without an assigned sequence number"); + throw new ElasticsearchStatusException("a following engine does not accept operations without an assigned sequence number", + RestStatus.FORBIDDEN); } - assert (operation.origin() == Operation.Origin.PRIMARY) == (operation.versionType() == VersionType.EXTERNAL) : - "invalid version_type in a following engine; version_type=" + operation.versionType() + "origin=" + operation.origin(); } @Override @@ -133,8 +129,7 @@ public int fillSeqNoGaps(long primaryTerm) throws IOException { @Override protected boolean assertPrimaryIncomingSequenceNumber(final Operation.Origin origin, final long seqNo) { - // sequence number should be set when operation origin is primary - assert seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO : "primary operations on a following index must have an assigned sequence number"; + assert FollowingEngineAssertions.assertPrimaryIncomingSequenceNumber(origin, seqNo); return true; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineAssertions.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineAssertions.java new file mode 100644 index 0000000000000..31b13270ba191 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineAssertions.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.index.engine; + +import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.seqno.SequenceNumbers; + +/** + * Moved preflight and assertPrimaryIncomingSequenceNumber check to its own class, + * so that when testing writing directly into a follower index, + * only these assertions here need to be disabled instead of all assertions in FollowingEngine class. + */ +final class FollowingEngineAssertions { + + static boolean preFlight(final Engine.Operation operation) { + /* + * We assert here so that this goes uncaught in unit tests and fails nodes in standalone tests (we want a harsh failure so that we + * do not have a situation where a shard fails and is recovered elsewhere and a test subsequently passes). We throw an exception so + * that we also prevent issues in production code. + */ + assert operation.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO; + assert (operation.origin() == Engine.Operation.Origin.PRIMARY) == (operation.versionType() == VersionType.EXTERNAL) : + "invalid version_type in a following engine; version_type=" + operation.versionType() + "origin=" + operation.origin(); + return true; + } + + static boolean assertPrimaryIncomingSequenceNumber(final Engine.Operation.Origin origin, final long seqNo) { + // sequence number should be set when operation origin is primary + assert seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO : "primary operations on a following index must have an assigned sequence number"; + return true; + } + +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java index 62b3f6323ab88..ce2eab52e0cab 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestResumeFollowAction.java @@ -37,8 +37,14 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient } static Request createRequest(RestRequest restRequest) throws IOException { - try (XContentParser parser = restRequest.contentOrSourceParamParser()) { - return Request.fromXContent(parser, restRequest.param("index")); + if (restRequest.hasContentOrSourceParam()) { + try (XContentParser parser = restRequest.contentOrSourceParamParser()) { + return Request.fromXContent(parser, restRequest.param("index")); + } + } else { + Request request = new Request(); + request.setFollowerIndex(restRequest.param("index")); + return request; } } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 01e51ea94f255..05b20050ee3c9 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -202,7 +202,7 @@ private NodeConfigurationSource createNodeConfigurationSource(String leaderSeedA builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial"); // Let cluster state api return quickly in order to speed up auto follow tests: builder.put(CcrSettings.CCR_AUTO_FOLLOW_WAIT_FOR_METADATA_TIMEOUT.getKey(), TimeValue.timeValueMillis(100)); - if (leaderSeedAddress != null) { + if (configureRemoteClusterViaNodeSettings() && leaderSeedAddress != null) { builder.put("cluster.remote.leader_cluster.seeds", leaderSeedAddress); } return new NodeConfigurationSource() { @@ -247,6 +247,10 @@ protected boolean reuseClusters() { return true; } + protected boolean configureRemoteClusterViaNodeSettings() { + return true; + } + protected final Client leaderClient() { return clusterGroup.leaderCluster.client(); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java index 9287d92cc0e99..f80fc0c07d9f4 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java @@ -92,6 +92,7 @@ public void testDoNotCreateFollowerIfLeaderDoesNotHaveSoftDeletes() throws Excep assertThat(client().admin().indices().prepareExists("follower-index").get().isExists(), equalTo(false)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37014") public void testRemoveRemoteConnection() throws Exception { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); request.setName("my_pattern"); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/RestartIndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/RestartIndexFollowingIT.java index d72eca17fdb1b..b8649aaa4320c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/RestartIndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/RestartIndexFollowingIT.java @@ -6,8 +6,11 @@ package org.elasticsearch.xpack.ccr; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.CcrIntegTestCase; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; @@ -24,12 +27,17 @@ protected int numberOfNodesPerCluster() { return 1; } + @Override + protected boolean configureRemoteClusterViaNodeSettings() { + return false; + } + public void testFollowIndex() throws Exception { final String leaderIndexSettings = getIndexSettings(1, 0, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); - singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true"); assertAcked(leaderClient().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); ensureLeaderGreen("index1"); + setupRemoteCluster(); final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); @@ -57,6 +65,28 @@ public void testFollowIndex() throws Exception { assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, equalTo(firstBatchNumDocs + secondBatchNumDocs)); }); + + getLeaderCluster().fullRestart(); + ensureLeaderGreen("index1"); + // Remote connection needs to be re-configured, because all the nodes in leader cluster have been restarted: + setupRemoteCluster(); + + final long thirdBatchNumDocs = randomIntBetween(2, 64); + for (int i = 0; i < thirdBatchNumDocs; i++) { + leaderClient().prepareIndex("index1", "doc").setSource("{}", XContentType.JSON).get(); + } + + assertBusy(() -> { + assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits().value, + equalTo(firstBatchNumDocs + secondBatchNumDocs + thirdBatchNumDocs)); + }); + } + + private void setupRemoteCluster() { + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + String address = getLeaderCluster().getMasterNodeInstance(TransportService.class).boundAddress().publishAddress().toString(); + updateSettingsRequest.persistentSettings(Settings.builder().put("cluster.remote.leader_cluster.seeds", address)); + assertAcked(followerClient().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java index 9428ca8978500..8d3c0c3b472aa 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java @@ -39,13 +39,13 @@ public void testDoNotFillGaps() throws Exception { long seqNo = -1; for (int i = 0; i < 8; i++) { final String id = Long.toString(i); - SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", id, + SourceToParse sourceToParse = new SourceToParse(indexShard.shardId().getIndexName(), "_doc", id, new BytesArray("{}"), XContentType.JSON); indexShard.applyIndexOperationOnReplica(++seqNo, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); } long seqNoBeforeGap = seqNo; seqNo += 8; - SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", "9", + SourceToParse sourceToParse = new SourceToParse(indexShard.shardId().getIndexName(), "_doc", "9", new BytesArray("{}"), XContentType.JSON); indexShard.applyIndexOperationOnReplica(seqNo, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index 99998342b11e2..bccc5fed8364e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -270,9 +270,9 @@ public void onFailedEngine(String reason, Exception e) { null, new NoneCircuitBreakerService(), globalCheckpoint::longValue, + Collections::emptyList, () -> primaryTerm.get(), - EngineTestCase.tombstoneDocSupplier() - ); + EngineTestCase.tombstoneDocSupplier()); } private static Store createStore( diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 817d994fa8bc0..ed61fc9e3e703 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -1,9 +1,7 @@ import org.elasticsearch.gradle.MavenFilteringHack import java.nio.file.Files -import java.nio.file.Path import java.nio.file.Paths -import java.nio.file.StandardCopyOption import com.carrotsearch.gradle.junit4.RandomizedTestingTask; apply plugin: 'elasticsearch.esplugin' @@ -125,7 +123,7 @@ artifacts { testArtifacts testJar } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( //commons-logging optional dependencies 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', @@ -133,7 +131,7 @@ thirdPartyAudit.excludes = [ //commons-logging provided dependencies 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' -] +) // xpack modules are installed in real clusters as the meta plugin, so // installing them as individual plugins for integ tests doesn't make sense, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java index b2130ac9f4b81..158c0eb7b2e63 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java @@ -469,9 +469,17 @@ public static License fromXContent(XContentParser parser) throws IOException { } // not a license spec if (builder.signature != null) { - byte[] signatureBytes = Base64.getDecoder().decode(builder.signature); - ByteBuffer byteBuffer = ByteBuffer.wrap(signatureBytes); - int version = byteBuffer.getInt(); + int version; + // In case the signature is truncated/malformed we might end up with fewer than 4 bytes in the byteBuffer + // or with a string that cannot be base64 decoded. In either case return a more friendly error instead of + // just throwing the BufferUnderflowException or the IllegalArgumentException + try { + byte[] signatureBytes = Base64.getDecoder().decode(builder.signature); + ByteBuffer byteBuffer = ByteBuffer.wrap(signatureBytes); + version = byteBuffer.getInt(); + } catch (Exception e) { + throw new ElasticsearchException("malformed signature for license [" + builder.uid + "]", e); + } // we take the absolute version, because negative versions // mean that the license was generated by the cluster (see TrialLicense) // and positive version means that the license was signed diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index f40cf8eba57c6..d7abe9a1f0f03 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.AllocateAction; import org.elasticsearch.xpack.core.indexlifecycle.DeleteAction; import org.elasticsearch.xpack.core.indexlifecycle.ForceMergeAction; +import org.elasticsearch.xpack.core.indexlifecycle.FreezeAction; import org.elasticsearch.xpack.core.indexlifecycle.IndexLifecycleFeatureSetUsage; import org.elasticsearch.xpack.core.indexlifecycle.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleAction; @@ -104,6 +105,7 @@ import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction; import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.action.PutJobAction; +import org.elasticsearch.xpack.core.ml.action.MlUpgradeAction; import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; @@ -288,6 +290,7 @@ public List> getClientActions() { PostCalendarEventsAction.INSTANCE, PersistJobAction.INSTANCE, FindFileStructureAction.INSTANCE, + MlUpgradeAction.INSTANCE, // security ClearRealmCacheAction.INSTANCE, ClearRolesCacheAction.INSTANCE, @@ -423,7 +426,8 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 111d8a9a68ca9..13cc4c121daf6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -139,7 +139,7 @@ private XPackSettings() { * Do not allow insecure hashing algorithms to be used for password hashing */ public static final Setting PASSWORD_HASHING_ALGORITHM = new Setting<>( - "xpack.security.authc.password_hashing.algorithm", "bcrypt", Function.identity(), (v, s) -> { + "xpack.security.authc.password_hashing.algorithm", "bcrypt", Function.identity(), v -> { if (Hasher.getAvailableAlgoStoredHash().contains(v.toLowerCase(Locale.ROOT)) == false) { throw new IllegalArgumentException("Invalid algorithm: " + v + ". Valid values for password hashing are " + Hasher.getAvailableAlgoStoredHash().toString()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java index 1feb84d9b9539..3031ec5b2a409 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportFreezeIndexAction.java @@ -5,11 +5,14 @@ */ package org.elasticsearch.xpack.core.action; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.support.ActionFilters; @@ -54,17 +57,20 @@ public final class TransportFreezeIndexAction extends private final DestructiveOperations destructiveOperations; private final MetaDataIndexStateService indexStateService; + private final TransportCloseIndexAction transportCloseIndexAction; @Inject public TransportFreezeIndexAction(MetaDataIndexStateService indexStateService, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - DestructiveOperations destructiveOperations) { + DestructiveOperations destructiveOperations, + TransportCloseIndexAction transportCloseIndexAction) { super(FreezeIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, FreezeRequest::new); this.destructiveOperations = destructiveOperations; this.indexStateService = indexStateService; + this.transportCloseIndexAction = transportCloseIndexAction; } @Override protected String executor() { @@ -103,11 +109,44 @@ private Index[] resolveIndices(FreezeRequest request, ClusterState state) { @Override protected void masterOperation(FreezeRequest request, ClusterState state, ActionListener listener) { + throw new UnsupportedOperationException("The task parameter is required"); + } + + @Override + protected void masterOperation(Task task, TransportFreezeIndexAction.FreezeRequest request, ClusterState state, + ActionListener listener) throws Exception { final Index[] concreteIndices = resolveIndices(request, state); if (concreteIndices.length == 0) { listener.onResponse(new FreezeResponse(true, true)); return; } + + final CloseIndexClusterStateUpdateRequest closeRequest = new CloseIndexClusterStateUpdateRequest(task.getId()) + .ackTimeout(request.timeout()) + .masterNodeTimeout(request.masterNodeTimeout()) + .indices(concreteIndices); + + indexStateService.closeIndices(closeRequest, new ActionListener() { + @Override + public void onResponse(final AcknowledgedResponse response) { + if (response.isAcknowledged()) { + toggleFrozenSettings(concreteIndices, request, listener); + } else { + // TODO improve FreezeResponse so that it also reports failures from the close index API + listener.onResponse(new FreezeResponse(false, false)); + } + } + + @Override + public void onFailure(final Exception t) { + logger.debug(() -> new ParameterizedMessage("failed to close indices [{}]", (Object) concreteIndices), t); + listener.onFailure(t); + } + }); + } + + private void toggleFrozenSettings(final Index[] concreteIndices, final FreezeRequest request, + final ActionListener listener) { clusterService.submitStateUpdateTask("toggle-frozen-settings", new AckedClusterStateUpdateTask(Priority.URGENT, request, new ActionListener() { @Override @@ -136,14 +175,6 @@ public void onFailure(Exception e) { }) { @Override public ClusterState execute(ClusterState currentState) { - List toClose = new ArrayList<>(); - for (Index index : concreteIndices) { - IndexMetaData metaData = currentState.metaData().index(index); - if (metaData.getState() != IndexMetaData.State.CLOSE) { - toClose.add(index); - } - } - currentState = indexStateService.closeIndices(currentState, toClose.toArray(new Index[0]), toClose.toString()); final MetaData.Builder builder = MetaData.builder(currentState.metaData()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); for (Index index : concreteIndices) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java index e709959b279b6..600bd5fced3ae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java @@ -117,11 +117,13 @@ public AutoFollowStats(StreamInput in) throws IOException { numberOfFailedRemoteClusterStateRequests = in.readVLong(); numberOfSuccessfulFollowIndices = in.readVLong(); if (in.getVersion().onOrAfter(Version.V_6_7_0)) { - recentAutoFollowErrors = new TreeMap<>(in.readMap(StreamInput::readString, - in1 -> new Tuple<>(in1.readZLong(), in1.readException()))); + // note: the casts to the following Writeable.Reader instances are needed by some IDEs (e.g. Eclipse 4.8) as a compiler help + recentAutoFollowErrors = new TreeMap<>(in.readMap((Writeable.Reader) StreamInput::readString, + (Writeable.Reader>) in1 -> new Tuple<>(in1.readZLong(), in1.readException()))); } else { - recentAutoFollowErrors = new TreeMap<>(in.readMap(StreamInput::readString, - in1 -> new Tuple<>(-1L, in1.readException()))); + // note: the casts to the following Writeable.Reader instances are needed by some IDEs (e.g. Eclipse 4.8) as a compiler help + recentAutoFollowErrors = new TreeMap<>(in.readMap((Writeable.Reader) StreamInput::readString, + (Writeable.Reader>) in1 -> new Tuple<>(-1L, in1.readException()))); } if (in.getVersion().onOrAfter(Version.V_6_6_0)) { autoFollowedClusters = new TreeMap<>(in.readMap(StreamInput::readString, AutoFollowedCluster::new)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeAction.java new file mode 100644 index 0000000000000..63dbedadd4fe4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeAction.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +/** + * A {@link LifecycleAction} which freezes the index. + */ +public class FreezeAction implements LifecycleAction { + public static final String NAME = "freeze"; + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, FreezeAction::new); + + public static FreezeAction parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + public FreezeAction() { + } + + public FreezeAction(StreamInput in) { + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public boolean isSafeAction() { + return true; + } + + @Override + public List toSteps(Client client, String phase, StepKey nextStepKey) { + StepKey freezeStepKey = new StepKey(phase, NAME, FreezeStep.NAME); + FreezeStep freezeStep = new FreezeStep(freezeStepKey, nextStepKey, client); + return Arrays.asList(freezeStep); + } + + @Override + public List toStepKeys(String phase) { + StepKey freezeStepKey = new StepKey(phase, NAME, FreezeStep.NAME); + return Arrays.asList(freezeStepKey); + } + + @Override + public int hashCode() { + return 1; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != getClass()) { + return false; + } + return true; + } + + @Override + public String toString() { + return Strings.toString(this); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeStep.java new file mode 100644 index 0000000000000..523aad10a488a --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeStep.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.xpack.core.action.TransportFreezeIndexAction; + +/** + * Freezes an index. + */ +public class FreezeStep extends AsyncActionStep { + public static final String NAME = "freeze"; + + public FreezeStep(StepKey key, StepKey nextStepKey, Client client) { + super(key, nextStepKey, client); + } + + @Override + public void performAction(IndexMetaData indexMetaData, ClusterState currentState, Listener listener) { + getClient().admin().indices().execute(TransportFreezeIndexAction.FreezeIndexAction.INSTANCE, + new TransportFreezeIndexAction.FreezeRequest(indexMetaData.getIndex().getName()), + ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure)); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java index 17c9eaf17c083..331a4f9c33aa0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleType.java @@ -37,7 +37,7 @@ public class TimeseriesLifecycleType implements LifecycleType { static final List ORDERED_VALID_HOT_ACTIONS = Collections.singletonList(RolloverAction.NAME); static final List ORDERED_VALID_WARM_ACTIONS = Arrays.asList(ReadOnlyAction.NAME, AllocateAction.NAME, ShrinkAction.NAME, ForceMergeAction.NAME); - static final List ORDERED_VALID_COLD_ACTIONS = Arrays.asList(AllocateAction.NAME); + static final List ORDERED_VALID_COLD_ACTIONS = Arrays.asList(AllocateAction.NAME, FreezeAction.NAME); static final List ORDERED_VALID_DELETE_ACTIONS = Arrays.asList(DeleteAction.NAME); static final Set VALID_HOT_ACTIONS = Sets.newHashSet(ORDERED_VALID_HOT_ACTIONS); static final Set VALID_WARM_ACTIONS = Sets.newHashSet(ORDERED_VALID_WARM_ACTIONS); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java index e78649d152296..b81a1f7d7b9c0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java @@ -55,6 +55,11 @@ public static PersistentTasksCustomMetaData.PersistentTask getDatafeedTask(St return tasks == null ? null : tasks.getTask(datafeedTaskId(datafeedId)); } + /** + * Note that the return value of this method does NOT take node relocations into account. + * Use {@link #getJobStateModifiedForReassignments} to return a value adjusted to the most + * appropriate value following relocations. + */ public static JobState getJobState(String jobId, @Nullable PersistentTasksCustomMetaData tasks) { PersistentTasksCustomMetaData.PersistentTask task = getJobTask(jobId, tasks); if (task != null) { @@ -68,6 +73,36 @@ public static JobState getJobState(String jobId, @Nullable PersistentTasksCustom return JobState.CLOSED; } + public static JobState getJobStateModifiedForReassignments(String jobId, @Nullable PersistentTasksCustomMetaData tasks) { + return getJobStateModifiedForReassignments(getJobTask(jobId, tasks)); + } + + public static JobState getJobStateModifiedForReassignments(@Nullable PersistentTasksCustomMetaData.PersistentTask task) { + if (task == null) { + // A closed job has no persistent task + return JobState.CLOSED; + } + JobTaskState jobTaskState = (JobTaskState) task.getState(); + if (jobTaskState == null) { + return JobState.OPENING; + } + JobState jobState = jobTaskState.getState(); + if (jobTaskState.isStatusStale(task)) { + // the job is re-locating + if (jobState == JobState.CLOSING) { + // previous executor node failed while the job was closing - it won't + // be reopened on another node, so consider it CLOSED for most purposes + return JobState.CLOSED; + } + if (jobState != JobState.FAILED) { + // previous executor node failed and current executor node didn't + // have the chance to set job status to OPENING + return JobState.OPENING; + } + } + return jobState; + } + public static DatafeedState getDatafeedState(String datafeedId, @Nullable PersistentTasksCustomMetaData tasks) { PersistentTasksCustomMetaData.PersistentTask task = getDatafeedTask(datafeedId, tasks); if (task != null && task.getState() != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlUpgradeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlUpgradeAction.java new file mode 100644 index 0000000000000..404f15d4f6270 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlUpgradeAction.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + + +public class MlUpgradeAction extends Action { + public static final MlUpgradeAction INSTANCE = new MlUpgradeAction(); + public static final String NAME = "cluster:admin/xpack/ml/upgrade"; + + private MlUpgradeAction() { + super(NAME); + } + + @Override + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + public static class Request extends MasterNodeReadRequest implements ToXContentObject { + + private static final ParseField REINDEX_BATCH_SIZE = new ParseField("reindex_batch_size"); + + public static ObjectParser PARSER = new ObjectParser<>("ml_upgrade", true, Request::new); + static { + PARSER.declareInt(Request::setReindexBatchSize, REINDEX_BATCH_SIZE); + } + + static final String INDEX = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*"; + private int reindexBatchSize = 1000; + + /** + * Should this task store its result? + */ + private boolean shouldStoreResult; + + // for serialization + public Request() { + } + + public Request(StreamInput in) throws IOException { + super(in); + reindexBatchSize = in.readInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeInt(reindexBatchSize); + } + + public String[] indices() { + return new String[]{INDEX}; + } + + public IndicesOptions indicesOptions() { + return IndicesOptions.strictExpandOpenAndForbidClosed(); + } + + /** + * Should this task store its result after it has finished? + */ + public Request setShouldStoreResult(boolean shouldStoreResult) { + this.shouldStoreResult = shouldStoreResult; + return this; + } + + @Override + public boolean getShouldStoreResult() { + return shouldStoreResult; + } + + public Request setReindexBatchSize(int reindexBatchSize) { + this.reindexBatchSize = reindexBatchSize; + return this; + } + + public int getReindexBatchSize() { + return reindexBatchSize; + } + + @Override + public ActionRequestValidationException validate() { + if (reindexBatchSize <= 0) { + ActionRequestValidationException validationException = new ActionRequestValidationException(); + validationException.addValidationError("["+ REINDEX_BATCH_SIZE.getPreferredName()+"] must be greater than 0."); + return validationException; + } + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Request request = (Request) o; + return Objects.equals(reindexBatchSize, request.reindexBatchSize); + } + + @Override + public int hashCode() { + return Objects.hash(reindexBatchSize); + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "ml-upgrade", parentTaskId, headers) { + @Override + public boolean shouldCancelChildrenOnCancellation() { + return true; + } + }; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(REINDEX_BATCH_SIZE.getPreferredName(), reindexBatchSize); + builder.endObject(); + return builder; + } + } + + public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder { + + public RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 02f76c54a2739..3002a43e9d8c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -124,7 +124,6 @@ public class DatafeedConfig extends AbstractDiffable implements public static final ParseField FREQUENCY = new ParseField("frequency"); public static final ParseField INDEXES = new ParseField("indexes"); public static final ParseField INDICES = new ParseField("indices"); - public static final ParseField TYPES = new ParseField("types"); public static final ParseField QUERY = new ParseField("query"); public static final ParseField SCROLL_SIZE = new ParseField("scroll_size"); public static final ParseField AGGREGATIONS = new ParseField("aggregations"); @@ -161,7 +160,6 @@ private static ObjectParser createParser(boolean ignoreUnknownFie parser.declareString(Builder::setJobId, Job.ID); parser.declareStringArray(Builder::setIndices, INDEXES); parser.declareStringArray(Builder::setIndices, INDICES); - parser.declareStringArray(Builder::setTypes, TYPES); parser.declareString((builder, val) -> builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY); parser.declareString((builder, val) -> @@ -212,7 +210,6 @@ private static ObjectParser createParser(boolean ignoreUnknownFie private final TimeValue frequency; private final List indices; - private final List types; private final Map query; private final Map aggregations; private final List scriptFields; @@ -223,7 +220,7 @@ private static ObjectParser createParser(boolean ignoreUnknownFie private final CachedSupplier querySupplier; private final CachedSupplier aggSupplier; - private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, List types, + private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, Map query, Map aggregations, List scriptFields, Integer scrollSize, ChunkingConfig chunkingConfig, Map headers, DelayedDataCheckConfig delayedDataCheckConfig) { @@ -232,7 +229,6 @@ private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue this.queryDelay = queryDelay; this.frequency = frequency; this.indices = indices == null ? null : Collections.unmodifiableList(indices); - this.types = types == null ? null : Collections.unmodifiableList(types); this.query = query == null ? null : Collections.unmodifiableMap(query); this.aggregations = aggregations == null ? null : Collections.unmodifiableMap(aggregations); this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields); @@ -254,10 +250,11 @@ public DatafeedConfig(StreamInput in) throws IOException { } else { this.indices = null; } - if (in.readBoolean()) { - this.types = Collections.unmodifiableList(in.readList(StreamInput::readString)); - } else { - this.types = null; + // This consumes the list of types if there was one. + if (in.getVersion().before(Version.V_7_0_0)) { + if (in.readBoolean()) { + in.readList(StreamInput::readString); + } } if (in.getVersion().before(Version.V_6_6_0)) { this.query = QUERY_TRANSFORMER.toMap(in.readNamedWriteable(QueryBuilder.class)); @@ -325,10 +322,6 @@ public List getIndices() { return indices; } - public List getTypes() { - return types; - } - public Integer getScrollSize() { return scrollSize; } @@ -419,11 +412,11 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeBoolean(false); } - if (types != null) { + // Write the now removed types to prior versions. + // An empty list is expected + if (out.getVersion().before(Version.V_7_0_0)) { out.writeBoolean(true); - out.writeStringList(types); - } else { - out.writeBoolean(false); + out.writeStringList(Collections.emptyList()); } if (out.getVersion().before(Version.V_6_6_0)) { out.writeNamedWriteable(getParsedQuery()); @@ -464,7 +457,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep()); } builder.field(INDICES.getPreferredName(), indices); - builder.field(TYPES.getPreferredName(), types); builder.field(QUERY.getPreferredName(), query); if (aggregations != null) { builder.field(AGGREGATIONS.getPreferredName(), aggregations); @@ -512,7 +504,6 @@ public boolean equals(Object other) { && Objects.equals(this.frequency, that.frequency) && Objects.equals(this.queryDelay, that.queryDelay) && Objects.equals(this.indices, that.indices) - && Objects.equals(this.types, that.types) && Objects.equals(this.query, that.query) && Objects.equals(this.scrollSize, that.scrollSize) && Objects.equals(this.aggregations, that.aggregations) @@ -524,8 +515,8 @@ public boolean equals(Object other) { @Override public int hashCode() { - return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields, - chunkingConfig, headers, delayedDataCheckConfig); + return Objects.hash(id, jobId, frequency, queryDelay, indices, query, scrollSize, aggregations, scriptFields, chunkingConfig, + headers, delayedDataCheckConfig); } @Override @@ -591,7 +582,6 @@ public static class Builder { private TimeValue queryDelay; private TimeValue frequency; private List indices = Collections.emptyList(); - private List types = Collections.emptyList(); private Map query; private Map aggregations; private List scriptFields; @@ -618,7 +608,6 @@ public Builder(DatafeedConfig config) { this.queryDelay = config.queryDelay; this.frequency = config.frequency; this.indices = new ArrayList<>(config.indices); - this.types = new ArrayList<>(config.types); this.query = config.query == null ? null : new LinkedHashMap<>(config.query); this.aggregations = config.aggregations == null ? null : new LinkedHashMap<>(config.aggregations); this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields); @@ -648,10 +637,6 @@ public void setIndices(List indices) { this.indices = ExceptionsHelper.requireNonNull(indices, INDICES.getPreferredName()); } - public void setTypes(List types) { - this.types = ExceptionsHelper.requireNonNull(types, TYPES.getPreferredName()); - } - public void setQueryDelay(TimeValue queryDelay) { TimeUtils.checkNonNegativeMultiple(queryDelay, TimeUnit.MILLISECONDS, QUERY_DELAY); this.queryDelay = queryDelay; @@ -741,15 +726,12 @@ public DatafeedConfig build() { if (indices == null || indices.isEmpty() || indices.contains(null) || indices.contains("")) { throw invalidOptionValue(INDICES.getPreferredName(), indices); } - if (types == null || types.contains(null) || types.contains("")) { - throw invalidOptionValue(TYPES.getPreferredName(), types); - } validateScriptFields(); setDefaultChunkingConfig(); setDefaultQueryDelay(); - return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize, + return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize, chunkingConfig, headers, delayedDataCheckConfig); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index 62cce599735e8..8bc49d4598a21 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -48,7 +48,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { PARSER.declareString(Builder::setJobId, Job.ID); PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES); PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES); - PARSER.declareStringArray(Builder::setTypes, DatafeedConfig.TYPES); PARSER.declareString((builder, val) -> builder.setQueryDelay( TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), DatafeedConfig.QUERY_DELAY); PARSER.declareString((builder, val) -> builder.setFrequency( @@ -79,7 +78,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { private final TimeValue queryDelay; private final TimeValue frequency; private final List indices; - private final List types; private final QueryBuilder query; private final AggregatorFactories.Builder aggregations; private final List scriptFields; @@ -87,15 +85,14 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { private final ChunkingConfig chunkingConfig; private final DelayedDataCheckConfig delayedDataCheckConfig; - private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, List types, - QueryBuilder query, AggregatorFactories.Builder aggregations, List scriptFields, - Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) { + private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, QueryBuilder query, + AggregatorFactories.Builder aggregations, List scriptFields, Integer scrollSize, + ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) { this.id = id; this.jobId = jobId; this.queryDelay = queryDelay; this.frequency = frequency; this.indices = indices; - this.types = types; this.query = query; this.aggregations = aggregations; this.scriptFields = scriptFields; @@ -114,10 +111,11 @@ public DatafeedUpdate(StreamInput in) throws IOException { } else { this.indices = null; } - if (in.readBoolean()) { - this.types = in.readList(StreamInput::readString); - } else { - this.types = null; + // This consumes the list of types if there was one. + if (in.getVersion().before(Version.V_7_0_0)) { + if (in.readBoolean()) { + in.readList(StreamInput::readString); + } } this.query = in.readOptionalNamedWriteable(QueryBuilder.class); this.aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new); @@ -154,11 +152,11 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeBoolean(false); } - if (types != null) { + // Write the now removed types to prior versions. + // An empty list is expected + if (out.getVersion().before(Version.V_7_0_0)) { out.writeBoolean(true); - out.writeStringList(types); - } else { - out.writeBoolean(false); + out.writeStringList(Collections.emptyList()); } out.writeOptionalNamedWriteable(query); out.writeOptionalWriteable(aggregations); @@ -187,7 +185,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); } addOptionalField(builder, DatafeedConfig.INDICES, indices); - addOptionalField(builder, DatafeedConfig.TYPES, types); addOptionalField(builder, DatafeedConfig.QUERY, query); addOptionalField(builder, DatafeedConfig.AGGREGATIONS, aggregations); if (scriptFields != null) { @@ -226,10 +223,6 @@ List getIndices() { return indices; } - List getTypes() { - return types; - } - Integer getScrollSize() { return scrollSize; } @@ -291,9 +284,6 @@ public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map h if (indices != null) { builder.setIndices(indices); } - if (types != null) { - builder.setTypes(types); - } if (query != null) { builder.setParsedQuery(query); } @@ -347,7 +337,6 @@ public boolean equals(Object other) { && Objects.equals(this.frequency, that.frequency) && Objects.equals(this.queryDelay, that.queryDelay) && Objects.equals(this.indices, that.indices) - && Objects.equals(this.types, that.types) && Objects.equals(this.query, that.query) && Objects.equals(this.scrollSize, that.scrollSize) && Objects.equals(this.aggregations, that.aggregations) @@ -358,8 +347,8 @@ public boolean equals(Object other) { @Override public int hashCode() { - return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields, - chunkingConfig, delayedDataCheckConfig); + return Objects.hash(id, jobId, frequency, queryDelay, indices, query, scrollSize, aggregations, scriptFields, chunkingConfig, + delayedDataCheckConfig); } @Override @@ -371,7 +360,6 @@ boolean isNoop(DatafeedConfig datafeed) { return (frequency == null || Objects.equals(frequency, datafeed.getFrequency())) && (queryDelay == null || Objects.equals(queryDelay, datafeed.getQueryDelay())) && (indices == null || Objects.equals(indices, datafeed.getIndices())) - && (types == null || Objects.equals(types, datafeed.getTypes())) && (query == null || Objects.equals(query, datafeed.getParsedQuery())) && (scrollSize == null || Objects.equals(scrollSize, datafeed.getQueryDelay())) && (aggregations == null || Objects.equals(aggregations, datafeed.getParsedAggregations())) @@ -387,7 +375,6 @@ public static class Builder { private TimeValue queryDelay; private TimeValue frequency; private List indices; - private List types; private QueryBuilder query; private AggregatorFactories.Builder aggregations; private List scriptFields; @@ -408,7 +395,6 @@ public Builder(DatafeedUpdate config) { this.queryDelay = config.queryDelay; this.frequency = config.frequency; this.indices = config.indices; - this.types = config.types; this.query = config.query; this.aggregations = config.aggregations; this.scriptFields = config.scriptFields; @@ -429,10 +415,6 @@ public void setIndices(List indices) { this.indices = indices; } - public void setTypes(List types) { - this.types = types; - } - public void setQueryDelay(TimeValue queryDelay) { this.queryDelay = queryDelay; } @@ -468,7 +450,7 @@ public void setChunkingConfig(ChunkingConfig chunkingConfig) { } public DatafeedUpdate build() { - return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize, + return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize, chunkingConfig, delayedDataCheckConfig); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimits.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimits.java index 797df5892f82f..1e114ee0f7a46 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimits.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimits.java @@ -36,7 +36,7 @@ public class AnalysisLimits implements ToXContentObject, Writeable { * the old default value should be used. From 6.3 onwards, the value will always be explicit. */ public static final long DEFAULT_MODEL_MEMORY_LIMIT_MB = 1024L; - static final long PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB = 4096L; + public static final long PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB = 4096L; public static final long DEFAULT_CATEGORIZATION_EXAMPLES_LIMIT = 4; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java index 2e6cc4b99c4bb..d979b897ad43a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobTaskState.java @@ -67,6 +67,17 @@ public JobState getState() { return state; } + /** + * The job state stores the allocation ID at the time it was last set. + * This method compares the allocation ID in the state with the allocation + * ID in the task. If the two are different then the task has been relocated + * to a different node after the last time the state was set. This in turn + * means that the state is not necessarily correct. For example, a job that + * has a state of OPENED but is stale must be considered to be OPENING, because + * it won't yet have a corresponding autodetect process. + * @param task The job task to check. + * @return Has the task been relocated to another node and not had its status set since then? + */ public boolean isStatusStale(PersistentTask task) { return allocationId != task.getAllocationId(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java index 673e796ef7e1f..b9f887d2d49fc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java @@ -44,7 +44,15 @@ public static String resultsWriteAlias(String jobId) { * @return The index name */ public static String jobStateIndexName() { - return AnomalyDetectorsIndexFields.STATE_INDEX_NAME; + return AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX; + } + + /** + * The name pattern to capture all .ml-state prefixed indices + * @return The .ml-state index pattern + */ + public static String jobStateIndexPattern() { + return AnomalyDetectorsIndexFields.STATE_INDEX_PREFIX + "*"; } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java index 527ba5dc1458b..96f21876223ce 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndexFields.java @@ -9,7 +9,7 @@ public final class AnomalyDetectorsIndexFields { public static final String CONFIG_INDEX = ".ml-config"; public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; - public static final String STATE_INDEX_NAME = ".ml-state"; + public static final String STATE_INDEX_PREFIX = ".ml-state"; public static final String RESULTS_INDEX_DEFAULT = "shared"; private AnomalyDetectorsIndexFields() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index b3d4155f30adc..fb0db771fa581 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -325,9 +325,6 @@ public static void addDatafeedConfigFields(XContentBuilder builder) throws IOExc .startObject(DatafeedConfig.INDICES.getPreferredName()) .field(TYPE, KEYWORD) .endObject() - .startObject(DatafeedConfig.TYPES.getPreferredName()) - .field(TYPE, KEYWORD) - .endObject() .startObject(DatafeedConfig.QUERY.getPreferredName()) .field(ENABLED, false) .endObject() @@ -898,6 +895,8 @@ private static void addModelSnapshotMapping(XContentBuilder builder) throws IOEx builder.startObject(ModelSnapshot.QUANTILES.getPreferredName()) .field(ENABLED, false) + .endObject().startObject(ModelSnapshot.MIN_VERSION.getPreferredName()) + .field(TYPE, KEYWORD) .endObject() .startObject(ModelSnapshot.LATEST_RECORD_TIME.getPreferredName()) .field(TYPE, DATE) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java index 646671c2c3925..333b87b0c294f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ReservedFieldNames.java @@ -167,6 +167,7 @@ public final class ReservedFieldNames { ModelSnapshot.LATEST_RECORD_TIME.getPreferredName(), ModelSnapshot.LATEST_RESULT_TIME.getPreferredName(), ModelSnapshot.RETAIN.getPreferredName(), + ModelSnapshot.MIN_VERSION.getPreferredName(), Result.RESULT_TYPE.getPreferredName(), Result.TIMESTAMP.getPreferredName(), @@ -242,7 +243,6 @@ public final class ReservedFieldNames { DatafeedConfig.QUERY_DELAY.getPreferredName(), DatafeedConfig.FREQUENCY.getPreferredName(), DatafeedConfig.INDICES.getPreferredName(), - DatafeedConfig.TYPES.getPreferredName(), DatafeedConfig.QUERY.getPreferredName(), DatafeedConfig.SCROLL_SIZE.getPreferredName(), DatafeedConfig.AGGREGATIONS.getPreferredName(), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java index de3b73ec4afce..43348456d2306 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.security.action.token; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.Nullable; @@ -137,57 +136,19 @@ public void setUserName(String userName) { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getVersion().before(Version.V_7_0_0)) { - if (Strings.isNullOrEmpty(tokenString)) { - throw new IllegalArgumentException("token is required for versions < v6.6.0"); - } - out.writeString(tokenString); - } else { - out.writeOptionalString(tokenString); - } - if (out.getVersion().onOrAfter(Version.V_6_2_0)) { - if (out.getVersion().before(Version.V_7_0_0)) { - if (tokenType == null) { - throw new IllegalArgumentException("token type is not optional for versions > v6.2.0 and < v6.6.0"); - } - out.writeVInt(tokenType.ordinal()); - } else { - out.writeOptionalVInt(tokenType == null ? null : tokenType.ordinal()); - } - } else if (tokenType == Type.REFRESH_TOKEN) { - throw new IllegalArgumentException("refresh token invalidation cannot be serialized with version [" + out.getVersion() + "]"); - } - if (out.getVersion().onOrAfter(Version.V_7_0_0)) { - out.writeOptionalString(realmName); - out.writeOptionalString(userName); - } else if (realmName != null || userName != null) { - throw new IllegalArgumentException( - "realm or user token invalidation cannot be serialized with version [" + out.getVersion() + "]"); - } + out.writeOptionalString(tokenString); + out.writeOptionalVInt(tokenType == null ? null : tokenType.ordinal()); + out.writeOptionalString(realmName); + out.writeOptionalString(userName); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - if (in.getVersion().before(Version.V_7_0_0)) { - tokenString = in.readString(); - } else { - tokenString = in.readOptionalString(); - } - if (in.getVersion().onOrAfter(Version.V_6_2_0)) { - if (in.getVersion().before(Version.V_7_0_0)) { - int type = in.readVInt(); - tokenType = Type.values()[type]; - } else { - Integer type = in.readOptionalVInt(); - tokenType = type == null ? null : Type.values()[type]; - } - } else { - tokenType = Type.ACCESS_TOKEN; - } - if (in.getVersion().onOrAfter(Version.V_7_0_0)) { - realmName = in.readOptionalString(); - userName = in.readOptionalString(); - } + tokenString = in.readOptionalString(); + Integer type = in.readOptionalVInt(); + tokenType = type == null ? null : Type.values()[type]; + realmName = in.readOptionalString(); + userName = in.readOptionalString(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java index 886caeac370fa..9f11c48c96aba 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.security.action.token; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -14,8 +13,6 @@ import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.Objects; /** @@ -35,35 +32,16 @@ public TokensInvalidationResult getResult() { return result; } - private boolean isCreated() { - return result.getInvalidatedTokens().size() > 0 - && result.getPreviouslyInvalidatedTokens().isEmpty() - && result.getErrors().isEmpty(); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getVersion().before(Version.V_7_0_0)) { - out.writeBoolean(isCreated()); - } else { - result.writeTo(out); - } + result.writeTo(out); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - if (in.getVersion().before(Version.V_7_0_0)) { - final boolean created = in.readBoolean(); - if (created) { - result = new TokensInvalidationResult(Arrays.asList(""), Collections.emptyList(), Collections.emptyList(), 0); - } else { - result = new TokensInvalidationResult(Collections.emptyList(), Arrays.asList(""), Collections.emptyList(), 0); - } - } else { - result = new TokensInvalidationResult(in); - } + result = new TokensInvalidationResult(in); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java index 4f548f80dd283..f27d95dc868ab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java @@ -99,12 +99,12 @@ public ElasticsearchSecurityException exceptionProcessingRequest(TransportMessag @Override public ElasticsearchSecurityException missingToken(RestRequest request, ThreadContext context) { - return createAuthenticationError("missing authentication token for REST request [{}]", null, request.uri()); + return createAuthenticationError("missing authentication credentials for REST request [{}]", null, request.uri()); } @Override public ElasticsearchSecurityException missingToken(TransportMessage message, String action, ThreadContext context) { - return createAuthenticationError("missing authentication token for action [{}]", null, action); + return createAuthenticationError("missing authentication credentials for action [{}]", null, action); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java index cfa83b63ed54f..117ef3316e1b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java @@ -79,8 +79,6 @@ public int getAttemptCount() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject() - //Remove created after PR is backported to 6.x - .field("created", isCreated()) .field("invalidated_tokens", invalidatedTokens.size()) .field("previously_invalidated_tokens", previouslyInvalidatedTokens.size()) .field("error_count", errors.size()); @@ -104,10 +102,4 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(errors, StreamOutput::writeException); out.writeVInt(attemptCount); } - - private boolean isCreated() { - return this.getInvalidatedTokens().size() > 0 - && this.getPreviouslyInvalidatedTokens().isEmpty() - && this.getErrors().isEmpty(); - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index d6549df5f9d5a..3a92c08704e41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -61,7 +61,7 @@ public final class IndexPrivilege extends Privilege { ClusterSearchShardsAction.NAME, TypesExistsAction.NAME, ValidateQueryAction.NAME + "*", GetSettingsAction.NAME, ExplainLifecycleAction.NAME); private static final Automaton MANAGE_FOLLOW_INDEX_AUTOMATON = patterns(PutFollowAction.NAME, UnfollowAction.NAME, - CloseIndexAction.NAME); + CloseIndexAction.NAME + "*"); private static final Automaton MANAGE_ILM_AUTOMATON = patterns("indices:admin/ilm/*"); public static final IndexPrivilege NONE = new IndexPrivilege("none", Automatons.EMPTY); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java index 79ccaeaae1a9e..900d9468e2aa8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java @@ -52,6 +52,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.function.Supplier; import java.util.stream.Collectors; /** @@ -201,9 +202,14 @@ SSLIOSessionStrategy sslIOSessionStrategy(SSLContext sslContext, String[] protoc * @return Never {@code null}. */ public SSLSocketFactory sslSocketFactory(SSLConfiguration configuration) { - SSLSocketFactory socketFactory = sslContext(configuration).getSocketFactory(); - return new SecuritySSLSocketFactory(socketFactory, configuration.supportedProtocols().toArray(Strings.EMPTY_ARRAY), - supportedCiphers(socketFactory.getSupportedCipherSuites(), configuration.cipherSuites(), false)); + final SSLContextHolder contextHolder = sslContextHolder(configuration); + SSLSocketFactory socketFactory = contextHolder.sslContext().getSocketFactory(); + final SecuritySSLSocketFactory securitySSLSocketFactory = new SecuritySSLSocketFactory( + () -> contextHolder.sslContext().getSocketFactory(), + configuration.supportedProtocols().toArray(Strings.EMPTY_ARRAY), + supportedCiphers(socketFactory.getSupportedCipherSuites(), configuration.cipherSuites(), false)); + contextHolder.addReloadListener(securitySSLSocketFactory::reload); + return securitySSLSocketFactory; } /** @@ -463,12 +469,15 @@ public Set getLoadedCertificates() throws GeneralSecurityExcept */ private static class SecuritySSLSocketFactory extends SSLSocketFactory { - private final SSLSocketFactory delegate; + private final Supplier delegateSupplier; private final String[] supportedProtocols; private final String[] ciphers; - SecuritySSLSocketFactory(SSLSocketFactory delegate, String[] supportedProtocols, String[] ciphers) { - this.delegate = delegate; + private volatile SSLSocketFactory delegate; + + SecuritySSLSocketFactory(Supplier delegateSupplier, String[] supportedProtocols, String[] ciphers) { + this.delegateSupplier = delegateSupplier; + this.delegate = this.delegateSupplier.get(); this.supportedProtocols = supportedProtocols; this.ciphers = ciphers; } @@ -525,6 +534,11 @@ public Socket createSocket(InetAddress address, int port, InetAddress localAddre return sslSocket; } + public void reload() { + final SSLSocketFactory newDelegate = delegateSupplier.get(); + this.delegate = newDelegate; + } + private void configureSSLSocket(SSLSocket socket) { SSLParameters parameters = new SSLParameters(ciphers, supportedProtocols); // we use the cipher suite order so that we can prefer the ciphers we set first in the list @@ -543,12 +557,14 @@ final class SSLContextHolder { private final KeyConfig keyConfig; private final TrustConfig trustConfig; private final SSLConfiguration sslConfiguration; + private final List reloadListeners; SSLContextHolder(SSLContext context, SSLConfiguration sslConfiguration) { this.context = context; this.sslConfiguration = sslConfiguration; this.keyConfig = sslConfiguration.keyConfig(); this.trustConfig = sslConfiguration.trustConfig(); + this.reloadListeners = new ArrayList<>(); } SSLContext sslContext() { @@ -559,6 +575,7 @@ synchronized void reload() { invalidateSessions(context.getClientSessionContext()); invalidateSessions(context.getServerSessionContext()); reloadSslContext(); + this.reloadListeners.forEach(Runnable::run); } private void reloadSslContext() { @@ -592,6 +609,10 @@ X509ExtendedTrustManager getEmptyTrustManager() throws GeneralSecurityException, trustManagerFactory.init(keyStore); return (X509ExtendedTrustManager) trustManagerFactory.getTrustManagers()[0]; } + + public void addReloadListener(Runnable listener) { + this.reloadListeners.add(listener); + } } /** diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseTests.java new file mode 100644 index 0000000000000..e6cc8f2bd89f2 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseTests.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.license; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.nio.BufferUnderflowException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class LicenseTests extends ESTestCase { + + public void testFromXContent() throws Exception { + + String licenseString = "{\"license\":" + + "{\"uid\":\"4056779d-b823-4c12-a9cb-efa4a8d8c422\"," + + "\"type\":\"gold\"," + + "\"issue_date_in_millis\":1546589020459," + + "\"expiry_date_in_millis\":1546596340459," + + "\"max_nodes\":5," + + "\"issued_to\":\"customer\"," + + "\"issuer\":\"elasticsearch\"," + + "\"signature\":\"AAAAAgAAAA34V2kfTJVtvdL2LttwAAABmFJ6NGRnbEM3WVQrZVQwNkdKQmR1VytlMTMyM1J0dTZ1WGwyY2ZCVFhqMGtJU2gzZ3pnNTVpOW" + + "F5Y1NaUkwyN2VsTEtCYnlZR2c5WWtjQ0phaDlhRjlDUXViUmUwMWhjSkE2TFcwSGdneTJHbUV4N2RHUWJxV20ybjRsZHRzV2xkN0ZmdDlYblJmNVcxMlBWeU81" + + "V1hLUm1EK0V1dmF3cFdlSGZzTU5SZE1qUmFra3JkS1hCanBWVmVTaFFwV3BVZERzeG9Sci9rYnlJK2toODZXY09tNmFHUVNUL3IyUHExV3VSTlBneWNJcFQ0bX" + + "l0cmhNNnRwbE1CWE4zWjJ5eGFuWFo0NGhsb3B5WFd1eTdYbFFWQkxFVFFPSlBERlB0eVVJYXVSZ0lsR2JpRS9rN1h4MSsvNUpOcGN6cU1NOHN1cHNtSTFIUGN1" + + "bWNGNEcxekhrblhNOXZ2VEQvYmRzQUFwbytUZEpRR3l6QU5oS2ZFSFdSbGxxNDZyZ0xvUHIwRjdBL2JqcnJnNGFlK09Cek9pYlJ5Umc9PQAAAQAth77fQLF7CC" + + "EL7wA6Z0/UuRm/weECcsjW/50kBnPLO8yEs+9/bPa5LSU0bF6byEXOVeO0ebUQfztpjulbXh8TrBDSG+6VdxGtohPo2IYPBaXzGs3LOOor6An/lhptxBWdwYmf" + + "bcp0m8mnXZh1vN9rmbTsZXnhBIoPTaRDwUBi3vJ3Ms3iLaEm4S8Slrfmtht2jUjgGZ2vAeZ9OHU2YsGtrSpz6f\"}"; + License license = License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON); + assertThat(license.type(), equalTo("gold")); + assertThat(license.uid(), equalTo("4056779d-b823-4c12-a9cb-efa4a8d8c422")); + assertThat(license.issuer(), equalTo("elasticsearch")); + assertThat(license.issuedTo(), equalTo("customer")); + assertThat(license.expiryDate(), equalTo(1546596340459L)); + assertThat(license.issueDate(), equalTo(1546589020459L)); + } + + public void testNotEnoughBytesFromXContent() throws Exception { + + String licenseString = "{\"license\": " + + "{\"uid\":\"4056779d-b823-4c12-a9cb-efa4a8d8c422\"," + + "\"type\":\"gold\"," + + "\"issue_date_in_millis\":1546589020459," + + "\"expiry_date_in_millis\":1546596340459," + + "\"max_nodes\":5," + + "\"issued_to\":\"customer\"," + + "\"issuer\":\"elasticsearch\"," + + "\"signature\":\"AA\"}" + + "}"; + ElasticsearchException exception = + expectThrows(ElasticsearchException.class, + () -> { + License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON); + }); + assertThat(exception.getMessage(), containsString("malformed signature for license [4056779d-b823-4c12-a9cb-efa4a8d8c422]")); + assertThat(exception.getCause(), instanceOf(BufferUnderflowException.class)); + } + + public void testMalformedSignatureFromXContent() throws Exception { + + String licenseString = "{\"license\": " + + "{\"uid\":\"4056779d-b823-4c12-a9cb-efa4a8d8c422\"," + + "\"type\":\"gold\"," + + "\"issue_date_in_millis\":1546589020459," + + "\"expiry_date_in_millis\":1546596340459," + + "\"max_nodes\":5," + + "\"issued_to\":\"customer\"," + + "\"issuer\":\"elasticsearch\"," + + "\"signature\":\"" + randomAlphaOfLength(10) + "\"}" + + "}"; + ElasticsearchException exception = + expectThrows(ElasticsearchException.class, + () -> { + License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON); + }); + } + + public void testUnableToBase64DecodeFromXContent() throws Exception { + + String licenseString = "{\"license\":" + + "{\"uid\":\"4056779d-b823-4c12-a9cb-efa4a8d8c422\"," + + "\"type\":\"gold\"," + + "\"issue_date_in_millis\":1546589020459," + + "\"expiry_date_in_millis\":1546596340459," + + "\"max_nodes\":5," + + "\"issued_to\":\"customer\"," + + "\"issuer\":\"elasticsearch\"," + + "\"signature\":\"AAAAAgAAAA34V2kfTJVtvdL2LttwAAABmFJ6NGRnbEM3WVQrZVQwNkdKQmR1VytlMTMyM1J0dTZ1WGwyY2ZCVFhqMGtJU2gzZ3pnNTVpOW" + + "F5Y1NaUkwyN2VsTEtCYnlZR2c5WWtjQ0phaDlhRjlDUXViUmUwMWhjSkE2TFcwSGdneTJHbUV4N2RHUWJxV20ybjRsZHRzV2xkN0ZmdDlYblJmNVcxMlBWeU81" + + "V1hLUm1EK0V1dmF3cFdlSGZzTU5SZE1qUmFra3JkS1hCanBWVmVTaFFwV3BVZERzeG9Sci9rYnlJK2toODZXY09tNmFHUVNUL3IyUHExV3VSTlBneWNJcFQ0bX" + + "l0cmhNNnRwbE1CWE4zWjJ5eGFuWFo0NGhsb3B5WFd1eTdYbFFWQkxFVFFPSlBERlB0eVVJYXVSZ0lsR2JpRS9rN1h4MSsvNUpOcGN6cU1NOHN1cHNtSTFIUGN1" + + "bWNGNEcxekhrblhNOXZ2VEQvYmRzQUFwbytUZEpRR3l6QU5oS2ZFSFdSbGxxNDZyZ0xvUHIwRjdBL2JqcnJnNGFlK09Cek9pYlJ5Umc9PQAAAQAth77fQLF7CC" + + "EL7wA6Z0/UuRm/weECcsjW/50kBnPLO8yEs+9/bPa5LSU0bF6byEXOVeO0ebUQfztpjulbXh8TrBDSG+6VdxGtohPo2IYPBaXzGs3LOOor6An/lhptxBWdwYmf" + + "+xHAQ8tyvRqP5G+PRU7tiluEwR/eyHGZV2exdJNzmoGzdPSWwueBM5HK2GexORICH+UFI4cuGz444/hL2MMM1RdpVWQkT0SJ6D9x/VuSmHuYPdtX59Pp41LXvl" + + "bcp0m8mnXZh1vN9rmbTsZXnhBIoPTaRDwUBi3vJ3Ms3iLaEm4S8Slrfmtht2jUjgGZ2vAeZ9OHU2YsGtrSpz6fd\"}"; + ElasticsearchException exception = + expectThrows(ElasticsearchException.class, + () -> { + License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON); + }); + assertThat(exception.getMessage(), containsString("malformed signature for license [4056779d-b823-4c12-a9cb-efa4a8d8c422]")); + assertThat(exception.getCause(), instanceOf(IllegalArgumentException.class)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java index f1de51c95b6e9..28244b523e129 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java @@ -46,6 +46,7 @@ import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SeqNoFieldMapper; +import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -67,8 +68,6 @@ import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.index.mapper.SourceToParse.source; - public class SourceOnlySnapshotShardTests extends IndexShardTestCase { public void testSourceIncomplete() throws IOException { @@ -293,9 +292,9 @@ public IndexShard reindex(DirectoryReader reader, MappingMetaData mapping) throw Uid uid = rootFieldsVisitor.uid(); BytesReference source = rootFieldsVisitor.source(); assert source != null : "_source is null but should have been filtered out at snapshot time"; - Engine.Result result = targetShard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, source - (index, uid.type(), uid.id(), source, XContentHelper.xContentType(source)) - .routing(rootFieldsVisitor.routing()), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, 1, false); + Engine.Result result = targetShard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, + new SourceToParse(index, uid.type(), uid.id(), source, XContentHelper.xContentType(source), + rootFieldsVisitor.routing()), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, 1, false); if (result.getResultType() != Engine.Result.Type.SUCCESS) { throw new IllegalStateException("failed applying post restore operation result: " + result .getResultType(), result.getFailure()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeActionTests.java new file mode 100644 index 0000000000000..9ea68950f4cf5 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeActionTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; + +import java.io.IOException; +import java.util.List; + +public class FreezeActionTests extends AbstractActionTestCase { + + @Override + protected FreezeAction doParseInstance(XContentParser parser) throws IOException { + return FreezeAction.parse(parser); + } + + @Override + protected FreezeAction createTestInstance() { + return new FreezeAction(); + } + + @Override + protected Reader instanceReader() { + return FreezeAction::new; + } + + public void testToSteps() { + FreezeAction action = createTestInstance(); + String phase = randomAlphaOfLengthBetween(1, 10); + StepKey nextStepKey = new StepKey(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); + List steps = action.toSteps(null, phase, nextStepKey); + assertNotNull(steps); + assertEquals(1, steps.size()); + StepKey expectedFirstStepKey = new StepKey(phase, FreezeAction.NAME, FreezeStep.NAME); + FreezeStep firstStep = (FreezeStep) steps.get(0); + assertEquals(expectedFirstStepKey, firstStep.getKey()); + assertEquals(nextStepKey, firstStep.getNextStepKey()); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeStepTests.java new file mode 100644 index 0000000000000..94ca2c2635c63 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeStepTests.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexlifecycle; + + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.AdminClient; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.IndicesAdminClient; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.xpack.core.action.TransportFreezeIndexAction; +import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; +import org.junit.Before; +import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import static org.hamcrest.Matchers.equalTo; + +public class FreezeStepTests extends AbstractStepTestCase { + + private Client client; + + @Before + public void setup() { + client = Mockito.mock(Client.class); + } + + @Override + public FreezeStep createRandomInstance() { + StepKey stepKey = randomStepKey(); + StepKey nextStepKey = randomStepKey(); + + return new FreezeStep(stepKey, nextStepKey, client); + } + + @Override + public FreezeStep mutateInstance(FreezeStep instance) { + StepKey key = instance.getKey(); + StepKey nextKey = instance.getNextStepKey(); + + switch (between(0, 1)) { + case 0: + key = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + break; + case 1: + nextKey = new StepKey(key.getPhase(), key.getAction(), key.getName() + randomAlphaOfLength(5)); + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } + + return new FreezeStep(key, nextKey, instance.getClient()); + } + + @Override + public FreezeStep copyInstance(FreezeStep instance) { + return new FreezeStep(instance.getKey(), instance.getNextStepKey(), instance.getClient()); + } + + public void testIndexSurvives() { + assertTrue(createRandomInstance().indexSurvives()); + } + + public void testFreeze() { + IndexMetaData indexMetaData = IndexMetaData.builder(randomAlphaOfLength(10)).settings(settings(Version.CURRENT)) + .numberOfShards(randomIntBetween(1, 5)).numberOfReplicas(randomIntBetween(0, 5)).build(); + + AdminClient adminClient = Mockito.mock(AdminClient.class); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + + Mockito.when(client.admin()).thenReturn(adminClient); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + Mockito.doAnswer(invocation -> { + assertSame(invocation.getArguments()[0], TransportFreezeIndexAction.FreezeIndexAction.INSTANCE); + TransportFreezeIndexAction.FreezeRequest request = (TransportFreezeIndexAction.FreezeRequest) invocation.getArguments()[1]; + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + assertNotNull(request); + assertEquals(1, request.indices().length); + assertEquals(indexMetaData.getIndex().getName(), request.indices()[0]); + listener.onResponse(null); + return null; + }).when(indicesClient).execute(Mockito.any(), Mockito.any(), Mockito.any()); + + SetOnce actionCompleted = new SetOnce<>(); + + FreezeStep step = createRandomInstance(); + step.performAction(indexMetaData, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + actionCompleted.set(complete); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }); + + assertThat(actionCompleted.get(), equalTo(true)); + + Mockito.verify(client, Mockito.only()).admin(); + Mockito.verify(adminClient, Mockito.only()).indices(); + Mockito.verify(indicesClient, Mockito.only()).execute(Mockito.any(), Mockito.any(), Mockito.any()); + } + + public void testExceptionThrown() { + IndexMetaData indexMetaData = IndexMetaData.builder(randomAlphaOfLength(10)).settings(settings(Version.CURRENT)) + .numberOfShards(randomIntBetween(1, 5)).numberOfReplicas(randomIntBetween(0, 5)).build(); + Exception exception = new RuntimeException(); + + AdminClient adminClient = Mockito.mock(AdminClient.class); + IndicesAdminClient indicesClient = Mockito.mock(IndicesAdminClient.class); + + Mockito.when(client.admin()).thenReturn(adminClient); + Mockito.when(adminClient.indices()).thenReturn(indicesClient); + Mockito.doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(exception); + return null; + } + + }).when(indicesClient).execute(Mockito.any(), Mockito.any(), Mockito.any()); + + SetOnce exceptionThrown = new SetOnce<>(); + FreezeStep step = createRandomInstance(); + step.performAction(indexMetaData, null, new AsyncActionStep.Listener() { + @Override + public void onResponse(boolean complete) { + throw new AssertionError("Unexpected method call"); + } + + @Override + public void onFailure(Exception e) { + assertEquals(exception, e); + exceptionThrown.set(true); + } + }); + + assertThat(exceptionThrown.get(), equalTo(true)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java index 5cb75e132ce92..a2ee5e3e9030d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyMetadataTests.java @@ -43,7 +43,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ForceMergeAction.NAME, ForceMergeAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) )); } @@ -58,7 +59,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java index 9ee7ee8d0acd0..cb952420a408c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/LifecyclePolicyTests.java @@ -52,7 +52,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ForceMergeAction.NAME, ForceMergeAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) )); } @@ -67,7 +68,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) )); return new NamedXContentRegistry(entries); } @@ -112,6 +114,8 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicyWithAllPhases(@Null return RolloverActionTests.randomInstance(); case ShrinkAction.NAME: return ShrinkActionTests.randomInstance(); + case FreezeAction.NAME: + return new FreezeAction(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; @@ -158,6 +162,8 @@ public static LifecyclePolicy randomTimeseriesLifecyclePolicy(@Nullable String l return RolloverActionTests.randomInstance(); case ShrinkAction.NAME: return ShrinkActionTests.randomInstance(); + case FreezeAction.NAME: + return new FreezeAction(); default: throw new IllegalArgumentException("invalid action [" + action + "]"); }}; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java index 2f0c2f8d18b33..8b9a06fbcb2c6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/TimeseriesLifecycleTypeTests.java @@ -38,6 +38,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { private static final RolloverAction TEST_ROLLOVER_ACTION = new RolloverAction(new ByteSizeValue(1), null, null); private static final ShrinkAction TEST_SHRINK_ACTION = new ShrinkAction(1); private static final ReadOnlyAction TEST_READ_ONLY_ACTION = new ReadOnlyAction(); + private static final FreezeAction TEST_FREEZE_ACTION = new FreezeAction(); public void testValidatePhases() { boolean invalid = randomBoolean(); @@ -355,10 +356,11 @@ public void testGetNextActionName() { // Cold Phase assertNextActionName("cold", AllocateAction.NAME, null, new String[] { AllocateAction.NAME }); - assertNextActionName("cold", AllocateAction.NAME, null, new String[] {}); - assertNextActionName("cold", AllocateAction.NAME, null, new String[] {}); + assertNextActionName("cold", AllocateAction.NAME, FreezeAction.NAME, FreezeAction.NAME); + assertNextActionName("cold", FreezeAction.NAME, null); + assertNextActionName("cold", FreezeAction.NAME, null, AllocateAction.NAME); assertInvalidAction("cold", "foo", new String[] { AllocateAction.NAME }); assertInvalidAction("cold", DeleteAction.NAME, new String[] { AllocateAction.NAME }); @@ -415,6 +417,8 @@ private ConcurrentMap convertActionNamesToActions(Strin return new RolloverAction(ByteSizeValue.parseBytesSizeValue("0b", "test"), TimeValue.ZERO, 1L); case ShrinkAction.NAME: return new ShrinkAction(1); + case FreezeAction.NAME: + return new FreezeAction(); } return new DeleteAction(); }).collect(Collectors.toConcurrentMap(LifecycleAction::getWriteableName, Function.identity())); @@ -476,6 +480,8 @@ private LifecycleAction getTestAction(String actionName) { return TEST_ROLLOVER_ACTION; case ShrinkAction.NAME: return TEST_SHRINK_ACTION; + case FreezeAction.NAME: + return TEST_FREEZE_ACTION; default: throw new IllegalArgumentException("unsupported timeseries phase action [" + actionName + "]"); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java index 5df60a7333143..cb547d179d5d5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/action/PutLifecycleRequestTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.AllocateAction; import org.elasticsearch.xpack.core.indexlifecycle.DeleteAction; import org.elasticsearch.xpack.core.indexlifecycle.ForceMergeAction; +import org.elasticsearch.xpack.core.indexlifecycle.FreezeAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecyclePolicy; import org.elasticsearch.xpack.core.indexlifecycle.LifecyclePolicyTests; @@ -64,7 +65,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ForceMergeAction.NAME, ForceMergeAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) )); } @@ -79,7 +81,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlUpgradeRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlUpgradeRequestTests.java new file mode 100644 index 0000000000000..227fc20ec9688 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlUpgradeRequestTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + + +public class MlUpgradeRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected MlUpgradeAction.Request createTestInstance() { + MlUpgradeAction.Request request = new MlUpgradeAction.Request(); + if (randomBoolean()) { + request.setReindexBatchSize(randomIntBetween(1, 10_000)); + } + return request; + } + + @Override + protected Writeable.Reader instanceReader() { + return MlUpgradeAction.Request::new; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionRequestTests.java index f01a360f909a0..d688ee9b5b90d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionRequestTests.java @@ -31,7 +31,6 @@ public void setUpDatafeedId() { protected Request createTestInstance() { DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, randomAlphaOfLength(10)); datafeedConfig.setIndices(Collections.singletonList(randomAlphaOfLength(10))); - datafeedConfig.setTypes(Collections.singletonList(randomAlphaOfLength(10))); return new Request(datafeedConfig.build()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionResponseTests.java index 13e00e27f68aa..ca650914fe146 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedActionResponseTests.java @@ -23,7 +23,6 @@ protected Response createTestInstance() { DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder( DatafeedConfigTests.randomValidDatafeedId(), randomAlphaOfLength(10)); datafeedConfig.setIndices(Arrays.asList(randomAlphaOfLength(10))); - datafeedConfig.setTypes(Arrays.asList(randomAlphaOfLength(10))); return new Response(datafeedConfig.build()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index 7975bd657f9f2..cb2f13e804253 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -88,7 +88,6 @@ public static DatafeedConfig createRandomizedDatafeedConfig(String jobId, long b private static DatafeedConfig.Builder createRandomizedDatafeedConfigBuilder(String jobId, long bucketSpanMillis) { DatafeedConfig.Builder builder = new DatafeedConfig.Builder(randomValidDatafeedId(), jobId); builder.setIndices(randomStringList(1, 10)); - builder.setTypes(randomStringList(0, 10)); if (randomBoolean()) { builder.setParsedQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10))); } @@ -396,7 +395,6 @@ public void testCheckValid_GivenNegativeScrollSize() { public void testBuild_GivenScriptFieldsAndAggregations() { DatafeedConfig.Builder datafeed = new DatafeedConfig.Builder("datafeed1", "job1"); datafeed.setIndices(Collections.singletonList("my_index")); - datafeed.setTypes(Collections.singletonList("my_type")); datafeed.setScriptFields(Collections.singletonList(new SearchSourceBuilder.ScriptField(randomAlphaOfLength(10), mockScript(randomAlphaOfLength(10)), randomBoolean()))); datafeed.setParsedAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo"))); @@ -409,7 +407,6 @@ public void testBuild_GivenScriptFieldsAndAggregations() { public void testHasAggregations_GivenNull() { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1"); builder.setIndices(Collections.singletonList("myIndex")); - builder.setTypes(Collections.singletonList("myType")); DatafeedConfig datafeedConfig = builder.build(); assertThat(datafeedConfig.hasAggregations(), is(false)); @@ -418,7 +415,6 @@ public void testHasAggregations_GivenNull() { public void testHasAggregations_NonEmpty() { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1"); builder.setIndices(Collections.singletonList("myIndex")); - builder.setTypes(Collections.singletonList("myType")); MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); builder.setParsedAggregations(new AggregatorFactories.Builder().addAggregator( AggregationBuilders.dateHistogram("time").interval(300000).subAggregation(maxTime).field("time"))); @@ -430,7 +426,6 @@ public void testHasAggregations_NonEmpty() { public void testBuild_GivenEmptyAggregations() { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1"); builder.setIndices(Collections.singletonList("myIndex")); - builder.setTypes(Collections.singletonList("myType")); builder.setParsedAggregations(new AggregatorFactories.Builder()); ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::build); @@ -441,7 +436,6 @@ public void testBuild_GivenEmptyAggregations() { public void testBuild_GivenHistogramWithDefaultInterval() { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1"); builder.setIndices(Collections.singletonList("myIndex")); - builder.setTypes(Collections.singletonList("myType")); MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); builder.setParsedAggregations(new AggregatorFactories.Builder().addAggregator( AggregationBuilders.histogram("time").subAggregation(maxTime).field("time")) @@ -770,7 +764,6 @@ private static DatafeedConfig createDatafeedWithDateHistogram(Long interval) { private static DatafeedConfig.Builder createDatafeedBuilderWithDateHistogram(DateHistogramAggregationBuilder dateHistogram) { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1"); builder.setIndices(Collections.singletonList("myIndex")); - builder.setTypes(Collections.singletonList("myType")); AggregatorFactories.Builder aggs = new AggregatorFactories.Builder().addAggregator(dateHistogram); DatafeedConfig.validateAggregations(aggs); builder.setParsedAggregations(aggs); @@ -784,7 +777,7 @@ private static DatafeedConfig createDatafeedWithDateHistogram(DateHistogramAggre @Override protected DatafeedConfig mutateInstance(DatafeedConfig instance) throws IOException { DatafeedConfig.Builder builder = new DatafeedConfig.Builder(instance); - switch (between(0, 10)) { + switch (between(0, 9)) { case 0: builder.setId(instance.getId() + randomValidDatafeedId()); break; @@ -807,11 +800,6 @@ protected DatafeedConfig mutateInstance(DatafeedConfig instance) throws IOExcept builder.setIndices(indices); break; case 5: - List types = new ArrayList<>(instance.getTypes()); - types.add(randomAlphaOfLengthBetween(1, 20)); - builder.setTypes(types); - break; - case 6: BoolQueryBuilder query = new BoolQueryBuilder(); if (instance.getParsedQuery() != null) { query.must(instance.getParsedQuery()); @@ -819,7 +807,7 @@ protected DatafeedConfig mutateInstance(DatafeedConfig instance) throws IOExcept query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); builder.setParsedQuery(query); break; - case 7: + case 6: if (instance.hasAggregations()) { builder.setAggregations(null); } else { @@ -834,16 +822,16 @@ protected DatafeedConfig mutateInstance(DatafeedConfig instance) throws IOExcept } } break; - case 8: + case 7: ArrayList scriptFields = new ArrayList<>(instance.getScriptFields()); scriptFields.add(new ScriptField(randomAlphaOfLengthBetween(1, 10), new Script("foo"), true)); builder.setScriptFields(scriptFields); builder.setAggregations(null); break; - case 9: + case 8: builder.setScrollSize(instance.getScrollSize() + between(1, 100)); break; - case 10: + case 9: if (instance.getChunkingConfig() == null || instance.getChunkingConfig().getMode() == Mode.AUTO) { ChunkingConfig newChunkingConfig = ChunkingConfig.newManual(new TimeValue(randomNonNegativeLong())); builder.setChunkingConfig(newChunkingConfig); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index bf61ed541aebb..302bfefc7c42a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -60,9 +60,6 @@ public static DatafeedUpdate createRandomized(String datafeedId, @Nullable Dataf if (randomBoolean()) { builder.setIndices(DatafeedConfigTests.randomStringList(1, 10)); } - if (randomBoolean()) { - builder.setTypes(DatafeedConfigTests.randomStringList(1, 10)); - } if (randomBoolean()) { builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10))); } @@ -145,13 +142,11 @@ public void testApply_givenPartialUpdate() { public void testApply_givenFullUpdateNoAggregations() { DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed"); datafeedBuilder.setIndices(Collections.singletonList("i_1")); - datafeedBuilder.setTypes(Collections.singletonList("t_1")); DatafeedConfig datafeed = datafeedBuilder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId()); update.setJobId("bar"); update.setIndices(Collections.singletonList("i_2")); - update.setTypes(Collections.singletonList("t_2")); update.setQueryDelay(TimeValue.timeValueSeconds(42)); update.setFrequency(TimeValue.timeValueSeconds(142)); update.setQuery(QueryBuilders.termQuery("a", "b")); @@ -164,7 +159,6 @@ public void testApply_givenFullUpdateNoAggregations() { assertThat(updatedDatafeed.getJobId(), equalTo("bar")); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2"))); - assertThat(updatedDatafeed.getTypes(), equalTo(Collections.singletonList("t_2"))); assertThat(updatedDatafeed.getQueryDelay(), equalTo(TimeValue.timeValueSeconds(42))); assertThat(updatedDatafeed.getFrequency(), equalTo(TimeValue.timeValueSeconds(142))); assertThat(updatedDatafeed.getParsedQuery(), equalTo(QueryBuilders.termQuery("a", "b"))); @@ -180,7 +174,6 @@ public void testApply_givenFullUpdateNoAggregations() { public void testApply_givenAggregations() { DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed"); datafeedBuilder.setIndices(Collections.singletonList("i_1")); - datafeedBuilder.setTypes(Collections.singletonList("t_1")); DatafeedConfig datafeed = datafeedBuilder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId()); @@ -191,7 +184,6 @@ public void testApply_givenAggregations() { DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1"))); - assertThat(updatedDatafeed.getTypes(), equalTo(Collections.singletonList("t_1"))); assertThat(updatedDatafeed.getParsedAggregations(), equalTo(new AggregatorFactories.Builder().addAggregator( AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime)))); @@ -219,7 +211,7 @@ public void testApply_GivenRandomUpdates_AssertImmutability() { @Override protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) { DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(instance); - switch (between(0, 10)) { + switch (between(0, 9)) { case 0: builder.setId(instance.getId() + DatafeedConfigTests.randomValidDatafeedId()); break; @@ -251,16 +243,6 @@ protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) { builder.setIndices(indices); break; case 5: - List types; - if (instance.getTypes() == null) { - types = new ArrayList<>(); - } else { - types = new ArrayList<>(instance.getTypes()); - } - types.add(randomAlphaOfLengthBetween(1, 20)); - builder.setTypes(types); - break; - case 6: BoolQueryBuilder query = new BoolQueryBuilder(); if (instance.getQuery() != null) { query.must(instance.getQuery()); @@ -268,7 +250,7 @@ protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) { query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); builder.setQuery(query); break; - case 7: + case 6: if (instance.hasAggregations()) { builder.setAggregations(null); } else { @@ -282,20 +264,20 @@ protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) { } } break; - case 8: + case 7: ArrayList scriptFields = new ArrayList<>(instance.getScriptFields()); scriptFields.add(new ScriptField(randomAlphaOfLengthBetween(1, 10), new Script("foo"), true)); builder.setScriptFields(scriptFields); builder.setAggregations(null); break; - case 9: + case 8: if (instance.getScrollSize() == null) { builder.setScrollSize(between(1, 100)); } else { builder.setScrollSize(instance.getScrollSize() + between(1, 100)); } break; - case 10: + case 9: if (instance.getChunkingConfig() == null || instance.getChunkingConfig().getMode() == Mode.AUTO) { ChunkingConfig newChunkingConfig = ChunkingConfig.newManual(new TimeValue(randomNonNegativeLong())); builder.setChunkingConfig(newChunkingConfig); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java index 1a59971ff9c60..c9c2e4706440e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.core.security.action.token; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -14,7 +13,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import java.io.IOException; @@ -65,48 +63,6 @@ public void testSerialization() throws IOException { } } - public void testSerializationToPre66Version() throws IOException{ - final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_2_0, Version.V_6_5_1); - TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), - Arrays.asList(generateRandomStringArray(20, 15, false, false)), - Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")), - new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))), - randomIntBetween(0, 5)); - InvalidateTokenResponse response = new InvalidateTokenResponse(result); - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(version); - response.writeTo(output); - try (StreamInput input = output.bytes().streamInput()) { - // False as we have errors and previously invalidated tokens - assertThat(input.readBoolean(), equalTo(false)); - } - } - - result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), - Arrays.asList(generateRandomStringArray(20, 15, false, false)), - Collections.emptyList(), randomIntBetween(0, 5)); - response = new InvalidateTokenResponse(result); - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(version); - response.writeTo(output); - try (StreamInput input = output.bytes().streamInput()) { - // False as we have previously invalidated tokens - assertThat(input.readBoolean(), equalTo(false)); - } - } - - result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), - Collections.emptyList(), Collections.emptyList(), randomIntBetween(0, 5)); - response = new InvalidateTokenResponse(result); - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(version); - response.writeTo(output); - try (StreamInput input = output.bytes().streamInput()) { - assertThat(input.readBoolean(), equalTo(true)); - } - } - } - public void testToXContent() throws IOException { List invalidatedTokens = Arrays.asList(generateRandomStringArray(20, 15, false)); List previouslyInvalidatedTokens = Arrays.asList(generateRandomStringArray(20, 15, false)); @@ -118,7 +74,7 @@ public void testToXContent() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); response.toXContent(builder, ToXContent.EMPTY_PARAMS); assertThat(Strings.toString(builder), - equalTo("{\"created\":false," + + equalTo("{" + "\"invalidated_tokens\":" + invalidatedTokens.size() + "," + "\"previously_invalidated_tokens\":" + previouslyInvalidatedTokens.size() + "," + "\"error_count\":2," + diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java index 07cdec4434a36..ae7798815731b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.XPackField; -import org.mockito.Mockito; import java.util.Arrays; import java.util.Collections; @@ -26,6 +25,8 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class DefaultAuthenticationFailureHandlerTests extends ESTestCase { @@ -33,17 +34,17 @@ public void testAuthenticationRequired() { final boolean testDefault = randomBoolean(); final String basicAuthScheme = "Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\""; final String bearerAuthScheme = "Bearer realm=\"" + XPackField.SECURITY + "\""; - final DefaultAuthenticationFailureHandler failuerHandler; + final DefaultAuthenticationFailureHandler failureHandler; if (testDefault) { - failuerHandler = new DefaultAuthenticationFailureHandler(Collections.emptyMap()); + failureHandler = new DefaultAuthenticationFailureHandler(Collections.emptyMap()); } else { - final Map> failureResponeHeaders = new HashMap<>(); - failureResponeHeaders.put("WWW-Authenticate", Arrays.asList(basicAuthScheme, bearerAuthScheme)); - failuerHandler = new DefaultAuthenticationFailureHandler(failureResponeHeaders); + final Map> failureResponseHeaders = new HashMap<>(); + failureResponseHeaders.put("WWW-Authenticate", Arrays.asList(basicAuthScheme, bearerAuthScheme)); + failureHandler = new DefaultAuthenticationFailureHandler(failureResponseHeaders); } - assertThat(failuerHandler, is(notNullValue())); + assertThat(failureHandler, is(notNullValue())); final ElasticsearchSecurityException ese = - failuerHandler.authenticationRequired("someaction", new ThreadContext(Settings.builder().build())); + failureHandler.authenticationRequired("someaction", new ThreadContext(Settings.builder().build())); assertThat(ese, is(notNullValue())); assertThat(ese.getMessage(), equalTo("action [someaction] requires authentication")); assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); @@ -54,15 +55,25 @@ public void testAuthenticationRequired() { } } + public void testMissingToken() { + final DefaultAuthenticationFailureHandler handler = new DefaultAuthenticationFailureHandler(Collections.emptyMap()); + final RestRequest request = mock(RestRequest.class); + when(request.uri()).thenReturn("https://secret.es.shield.gov/"); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final ElasticsearchSecurityException ese = handler.missingToken(request, threadContext); + assertThat(ese, is(notNullValue())); + assertThat(ese.getMessage(), equalTo("missing authentication credentials for REST request [https://secret.es.shield.gov/]")); + } + public void testExceptionProcessingRequest() { final String basicAuthScheme = "Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\""; final String bearerAuthScheme = "Bearer realm=\"" + XPackField.SECURITY + "\""; final String negotiateAuthScheme = randomFrom("Negotiate", "Negotiate Ijoijksdk"); - final Map> failureResponeHeaders = new HashMap<>(); - failureResponeHeaders.put("WWW-Authenticate", Arrays.asList(basicAuthScheme, bearerAuthScheme, negotiateAuthScheme)); - final DefaultAuthenticationFailureHandler failuerHandler = new DefaultAuthenticationFailureHandler(failureResponeHeaders); + final Map> failureResponseHeaders = new HashMap<>(); + failureResponseHeaders.put("WWW-Authenticate", Arrays.asList(basicAuthScheme, bearerAuthScheme, negotiateAuthScheme)); + final DefaultAuthenticationFailureHandler failureHandler = new DefaultAuthenticationFailureHandler(failureResponseHeaders); - assertThat(failuerHandler, is(notNullValue())); + assertThat(failureHandler, is(notNullValue())); final boolean causeIsElasticsearchSecurityException = randomBoolean(); final boolean causeIsEseAndUnauthorized = causeIsElasticsearchSecurityException && randomBoolean(); final ElasticsearchSecurityException eseCause = (causeIsEseAndUnauthorized) @@ -77,7 +88,7 @@ public void testExceptionProcessingRequest() { if (causeIsElasticsearchSecurityException) { if (causeIsEseAndUnauthorized) { - final ElasticsearchSecurityException ese = failuerHandler.exceptionProcessingRequest(Mockito.mock(RestRequest.class), cause, + final ElasticsearchSecurityException ese = failureHandler.exceptionProcessingRequest(mock(RestRequest.class), cause, new ThreadContext(Settings.builder().build())); assertThat(ese, is(notNullValue())); assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); @@ -93,11 +104,11 @@ public void testExceptionProcessingRequest() { } assertThat(ese.getMessage(), equalTo("unauthorized")); } else { - expectThrows(AssertionError.class, () -> failuerHandler.exceptionProcessingRequest(Mockito.mock(RestRequest.class), cause, + expectThrows(AssertionError.class, () -> failureHandler.exceptionProcessingRequest(mock(RestRequest.class), cause, new ThreadContext(Settings.builder().build()))); } } else { - final ElasticsearchSecurityException ese = failuerHandler.exceptionProcessingRequest(Mockito.mock(RestRequest.class), cause, + final ElasticsearchSecurityException ese = failureHandler.exceptionProcessingRequest(mock(RestRequest.class), cause, new ThreadContext(Settings.builder().build())); assertThat(ese, is(notNullValue())); assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); @@ -117,7 +128,7 @@ public void testSortsWWWAuthenticateHeaderValues() { failureResponeHeaders.put("WWW-Authenticate", supportedSchemes); final DefaultAuthenticationFailureHandler failuerHandler = new DefaultAuthenticationFailureHandler(failureResponeHeaders); - final ElasticsearchSecurityException ese = failuerHandler.exceptionProcessingRequest(Mockito.mock(RestRequest.class), null, + final ElasticsearchSecurityException ese = failuerHandler.exceptionProcessingRequest(mock(RestRequest.class), null, new ThreadContext(Settings.builder().build())); assertThat(ese, is(notNullValue())); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index 2290a34752819..cb9996ac90db5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; @@ -55,6 +56,7 @@ /** * Unit tests for the reloading of SSL configuration */ +@TestLogging("org.elasticsearch.watcher:TRACE") public class SSLConfigurationReloaderTests extends ESTestCase { private ThreadPool threadPool; @@ -435,20 +437,20 @@ void reloadSSLContext(SSLConfiguration configuration) { assertThat(sslService.sslContextHolder(config).sslContext(), sameInstance(context)); } - private void validateSSLConfigurationIsReloaded(Settings settings, Environment env, - Consumer preChecks, - Runnable modificationFunction, - Consumer postChecks) - throws Exception { + private void validateSSLConfigurationIsReloaded(Settings settings, Environment env, Consumer preChecks, + Runnable modificationFunction, Consumer postChecks) throws Exception { final CountDownLatch reloadLatch = new CountDownLatch(1); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); new SSLConfigurationReloader(env, sslService, resourceWatcherService) { @Override void reloadSSLContext(SSLConfiguration configuration) { - super.reloadSSLContext(configuration); - reloadLatch.countDown(); + try { + super.reloadSSLContext(configuration); + } finally { + reloadLatch.countDown(); + } } }; // Baseline checks diff --git a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/indexlifecycle/TimeSeriesLifecycleActionsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/indexlifecycle/TimeSeriesLifecycleActionsIT.java index 58300f36c2eb2..779a737c88279 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/indexlifecycle/TimeSeriesLifecycleActionsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/indexlifecycle/TimeSeriesLifecycleActionsIT.java @@ -19,11 +19,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.engine.FrozenEngine; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.indexlifecycle.AllocateAction; import org.elasticsearch.xpack.core.indexlifecycle.DeleteAction; import org.elasticsearch.xpack.core.indexlifecycle.ErrorStep; import org.elasticsearch.xpack.core.indexlifecycle.ForceMergeAction; +import org.elasticsearch.xpack.core.indexlifecycle.FreezeAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecyclePolicy; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleSettings; @@ -423,6 +426,20 @@ public void testShrinkAction() throws Exception { expectThrows(ResponseException.class, this::indexDocument); } + public void testFreezeAction() throws Exception { + createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); + createNewSingletonPolicy("cold", new FreezeAction()); + updatePolicy(index, policy); + assertBusy(() -> { + Map settings = getOnlyIndexSettings(index); + assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); + assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), equalTo("true")); + assertThat(settings.get(IndexSettings.INDEX_SEARCH_THROTTLED.getKey()), equalTo("true")); + assertThat(settings.get(FrozenEngine.INDEX_FROZEN.getKey()), equalTo("true")); + }); + } + @SuppressWarnings("unchecked") public void testNonexistentPolicy() throws Exception { String indexPrefix = randomAlphaOfLengthBetween(5,15).toLowerCase(Locale.ROOT); @@ -474,7 +491,6 @@ public void testNonexistentPolicy() throws Exception { assertEquals("policy [does_not_exist] does not exist", stepInfo.get("reason")); assertEquals("illegal_argument_exception", stepInfo.get("type")); }); - } public void testInvalidPolicyNames() throws UnsupportedEncodingException { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java index 27fb52fe2397f..a2ffac8412eb1 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycle.java @@ -37,6 +37,7 @@ import org.elasticsearch.xpack.core.indexlifecycle.AllocateAction; import org.elasticsearch.xpack.core.indexlifecycle.DeleteAction; import org.elasticsearch.xpack.core.indexlifecycle.ForceMergeAction; +import org.elasticsearch.xpack.core.indexlifecycle.FreezeAction; import org.elasticsearch.xpack.core.indexlifecycle.IndexLifecycleMetadata; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleSettings; @@ -157,7 +158,8 @@ public List getNa new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(DeleteAction.NAME), DeleteAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(DeleteAction.NAME), DeleteAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) ); } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java index 1c30ea841cb77..455f35ceae2d8 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleMetadataTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.indexlifecycle.FreezeAction; import org.elasticsearch.xpack.core.indexlifecycle.OperationMode; import org.elasticsearch.test.AbstractDiffableSerializationTestCase; import org.elasticsearch.xpack.core.indexlifecycle.AllocateAction; @@ -81,7 +82,8 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ForceMergeAction.NAME, ForceMergeAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) )); } @@ -96,7 +98,8 @@ protected NamedXContentRegistry xContentRegistry() { new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ForceMergeAction.NAME), ForceMergeAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ReadOnlyAction.NAME), ReadOnlyAction::parse), new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(RolloverAction.NAME), RolloverAction::parse), - new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse) + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ShrinkAction.NAME), ShrinkAction::parse), + new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(FreezeAction.NAME), FreezeAction::parse) )); return new NamedXContentRegistry(entries); } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleRunnerTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleRunnerTests.java index 7693a752b28a1..f4ab15a30e880 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleRunnerTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleRunnerTests.java @@ -71,6 +71,7 @@ import java.util.function.Function; import java.util.stream.Collectors; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; import static org.elasticsearch.xpack.core.indexlifecycle.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; import static org.elasticsearch.xpack.core.indexlifecycle.LifecyclePolicyTestsUtils.newTestLifecyclePolicy; import static org.hamcrest.Matchers.containsString; @@ -203,7 +204,7 @@ public void testRunStateChangePolicyWithNoNextStep() throws Exception { step.setLatch(latch); runner.runPolicyAfterStateChange(policyName, indexMetaData); - latch.await(5, TimeUnit.SECONDS); + awaitLatch(latch, 5, TimeUnit.SECONDS); ClusterState after = clusterService.state(); assertEquals(before, after); @@ -264,7 +265,7 @@ public void testRunStateChangePolicyWithNextStep() throws Exception { nextStep.setLatch(latch); runner.runPolicyAfterStateChange(policyName, indexMetaData); - assertTrue(latch.await(5, TimeUnit.SECONDS)); + awaitLatch(latch, 5, TimeUnit.SECONDS); // The cluster state can take a few extra milliseconds to update after the steps are executed assertBusy(() -> assertNotEquals(before, clusterService.state())); @@ -373,13 +374,13 @@ public void testRunStateChangePolicyWithAsyncActionNextStep() throws Exception { runner.runPolicyAfterStateChange(policyName, indexMetaData); // Wait for the cluster state action step - latch.await(5, TimeUnit.SECONDS); + awaitLatch(latch, 5, TimeUnit.SECONDS); CountDownLatch asyncLatch = new CountDownLatch(1); nextStep.setLatch(asyncLatch); // Wait for the async action step - asyncLatch.await(5, TimeUnit.SECONDS); + awaitLatch(asyncLatch, 5, TimeUnit.SECONDS); ClusterState after = clusterService.state(); assertNotEquals(before, after); @@ -440,7 +441,7 @@ public void testRunPeriodicStep() throws Exception { CountDownLatch latch = new CountDownLatch(1); step.setLatch(latch); runner.runPeriodicStep(policyName, indexMetaData); - latch.await(5, TimeUnit.SECONDS); + awaitLatch(latch, 5, TimeUnit.SECONDS); ClusterState after = clusterService.state(); diff --git a/x-pack/plugin/ml/cpp-snapshot/build.gradle b/x-pack/plugin/ml/cpp-snapshot/build.gradle index 1c35d9db6f321..e47566cc82cea 100644 --- a/x-pack/plugin/ml/cpp-snapshot/build.gradle +++ b/x-pack/plugin/ml/cpp-snapshot/build.gradle @@ -1,4 +1,3 @@ -import java.net.HttpURLConnection import org.elasticsearch.gradle.VersionProperties apply plugin: 'distribution' diff --git a/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java b/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java index 4d3ec8e2b2470..0cfa6f5ebf04e 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java +++ b/x-pack/plugin/ml/qa/basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java @@ -265,7 +265,6 @@ private Response createDatafeed(String datafeedId, String jobId) throws Exceptio xContentBuilder.startObject(); xContentBuilder.field("job_id", jobId); xContentBuilder.array("indexes", "airline-data"); - xContentBuilder.array("types", "_doc"); xContentBuilder.endObject(); Request request = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId); request.setJsonEntity(Strings.toString(xContentBuilder)); diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index abfed3fd878d0..6e0127f614c9a 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -93,7 +93,9 @@ integTestRunner { 'ml/validate/Test job config that is invalid only because of the job ID', 'ml/validate_detector/Test invalid detector', 'ml/delete_forecast/Test delete on _all forecasts not allow no forecasts', - 'ml/delete_forecast/Test delete forecast on missing forecast' + 'ml/delete_forecast/Test delete forecast on missing forecast', + 'ml/ml_upgrade/Upgrade results when there is nothing to upgrade', + 'ml/ml_upgrade/Upgrade results when there is nothing to upgrade not waiting for results' ].join(',') } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index 59867283f0c84..188086afcf14a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.LoggedExec - apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java index 9c19ffe639f7d..3d49e03321892 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java @@ -92,7 +92,6 @@ public void testBasicCategorization() throws Exception { String datafeedId = job.getId() + "-feed"; DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId()); datafeedConfig.setIndices(Collections.singletonList(DATA_INDEX)); - datafeedConfig.setTypes(Collections.singletonList(DATA_TYPE)); DatafeedConfig datafeed = datafeedConfig.build(); registerDatafeed(datafeed); putDatafeed(datafeed); @@ -138,7 +137,6 @@ public void testCategorizationWithFilters() throws Exception { String datafeedId = job.getId() + "-feed"; DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId()); datafeedConfig.setIndices(Collections.singletonList(DATA_INDEX)); - datafeedConfig.setTypes(Collections.singletonList(DATA_TYPE)); DatafeedConfig datafeed = datafeedConfig.build(); registerDatafeed(datafeed); putDatafeed(datafeed); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 69668f4543ee3..64d8f73c4a3e5 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; @@ -1177,7 +1178,7 @@ DatafeedBuilder setChunkingTimespan(String timespan) { Response build() throws IOException { Request request = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId); request.setJsonEntity("{" - + "\"job_id\": \"" + jobId + "\",\"indexes\":[\"" + index + "\"],\"types\":[\"" + type + "\"]" + + "\"job_id\": \"" + jobId + "\",\"indexes\":[\"" + index + "\"]" + (source ? ",\"_source\":true" : "") + (scriptedFields == null ? "" : ",\"script_fields\":" + scriptedFields) + (aggregations == null ? "" : ",\"aggs\":" + aggregations) @@ -1196,6 +1197,7 @@ private void bulkIndex(String bulk) throws IOException { bulkRequest.setJsonEntity(bulk); bulkRequest.addParameter("refresh", "true"); bulkRequest.addParameter("pretty", null); + bulkRequest.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); String bulkResponse = EntityUtils.toString(client().performRequest(bulkRequest).getEntity()); assertThat(bulkResponse, not(containsString("\"errors\": false"))); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 15086317f8eaa..f2ca43bf53c26 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -43,13 +43,12 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { private static final String DATA_INDEX = "delete-expired-data-test-data"; - private static final String DATA_TYPE = "my_type"; + private static final String DATA_TYPE = "doc"; @Before public void setUpData() throws IOException { @@ -111,7 +110,6 @@ public void testDeleteExpiredData() throws Exception { String datafeedId = job.getId() + "-feed"; DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId()); datafeedConfig.setIndices(Arrays.asList(DATA_INDEX)); - datafeedConfig.setTypes(Arrays.asList(DATA_TYPE)); DatafeedConfig datafeed = datafeedConfig.build(); registerDatafeed(datafeed); putDatafeed(datafeed); @@ -241,11 +239,15 @@ public void testDeleteExpiredData() throws Exception { } // Verify .ml-state doesn't contain unused state documents - SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexName()) + SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) .setFetchSource(false) + .setTrackTotalHits(true) .setSize(10000) .get(); - assertThat(stateDocsResponse.getHits().getTotalHits().value, lessThan(10000L)); + + // Assert at least one state doc for each job + assertThat(stateDocsResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(5L)); + for (SearchHit hit : stateDocsResponse.getHits().getHits()) { assertThat(hit.getId().startsWith("non_existing_job"), is(false)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index c80ec7048204f..2d8c6a4128b8d 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -41,7 +41,6 @@ public void tearDownData() throws Exception { cleanUp(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/36258") public void testSingleSeries() throws Exception { Detector.Builder detector = new Detector.Builder("mean", "value"); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java index 9570f8d2887d5..42ae65f1c9597 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java @@ -109,7 +109,7 @@ private static Map createRecord(long timestamp, String byFieldVa private void assertNoInterimResults(String jobId) { String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); - SearchResponse search = client().prepareSearch(indexName).setTypes("result").setSize(1000) + SearchResponse search = client().prepareSearch(indexName).setSize(1000) .setQuery(QueryBuilders.termQuery("is_interim", true)).get(); assertThat(search.getHits().getTotalHits().value, equalTo(0L)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 4b0f9e7aac304..9f38791bb9f07 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; @@ -22,7 +23,9 @@ import org.junit.After; import java.io.IOException; +import java.util.Collections; import java.util.Locale; +import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Matcher; @@ -111,6 +114,21 @@ public void testGetJobs_GivenMultipleJobs() throws Exception { assertThat(implicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-3\"")); } + // tests the _xpack/usage endpoint + public void testUsage() throws IOException { + createFarequoteJob("job-1"); + createFarequoteJob("job-2"); + Map usage = entityAsMap(client().performRequest(new Request("GET", "_xpack/usage"))); + assertEquals(2, XContentMapValues.extractValue("ml.jobs._all.count", usage)); + assertEquals(2, XContentMapValues.extractValue("ml.jobs.closed.count", usage)); + Response openResponse = client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/job-1/_open")); + assertEquals(Collections.singletonMap("opened", true), entityAsMap(openResponse)); + usage = entityAsMap(client().performRequest(new Request("GET", "_xpack/usage"))); + assertEquals(2, XContentMapValues.extractValue("ml.jobs._all.count", usage)); + assertEquals(1, XContentMapValues.extractValue("ml.jobs.closed.count", usage)); + assertEquals(1, XContentMapValues.extractValue("ml.jobs.opened.count", usage)); + } + private Response createFarequoteJob(String jobId) throws IOException { Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); request.setJsonEntity( diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index e824fa2917012..cd33e1d80769e 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; -import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; @@ -27,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.plugins.Plugin; @@ -121,7 +121,7 @@ protected Collection> nodePlugins() { @Override protected Collection> transportClientPlugins() { - return Arrays.asList(XPackClientPlugin.class, Netty4Plugin.class); + return Arrays.asList(XPackClientPlugin.class, Netty4Plugin.class, ReindexPlugin.class); } @Override @@ -348,17 +348,19 @@ protected void waitForecastToFinish(String jobId, String forecastId) throws Exce } protected ForecastRequestStats getForecastStats(String jobId, String forecastId) { - GetResponse getResponse = client().prepareGet() - .setIndex(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)) - .setId(ForecastRequestStats.documentId(jobId, forecastId)) - .execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)) + .setQuery(QueryBuilders.idsQuery().addIds(ForecastRequestStats.documentId(jobId, forecastId))) + .get(); - if (getResponse.isExists() == false) { + if (searchResponse.getHits().getHits().length == 0) { return null; } + + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - getResponse.getSourceAsBytesRef().streamInput())) { + searchResponse.getHits().getHits()[0].getSourceRef().streamInput())) { return ForecastRequestStats.STRICT_PARSER.apply(parser, null); } catch (IOException e) { throw new IllegalStateException(e); @@ -398,7 +400,6 @@ protected long countForecastDocs(String jobId, String forecastId) { protected List getForecasts(String jobId, ForecastRequestStats forecastRequestStats) { List forecasts = new ArrayList<>(); - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*") .setSize((int) forecastRequestStats.getRecordCount()) .setQuery(QueryBuilders.boolQuery() diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlUpgradeIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlUpgradeIT.java new file mode 100644 index 0000000000000..a2a05ea1686fa --- /dev/null +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlUpgradeIT.java @@ -0,0 +1,378 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.index.reindex.ReindexRequest; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.action.MlUpgradeAction; +import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; +import org.elasticsearch.xpack.ml.ResultsIndexUpgradeService; +import org.junit.After; +import org.junit.Assert; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase.createDatafeedBuilder; +import static org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase.createScheduledJob; +import static org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase.indexDocs; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.core.Is.is; + +public class MlUpgradeIT extends MlNativeAutodetectIntegTestCase { + + @After + public void cleanup() throws Exception { + cleanUp(); + } + + public void testMigrationWhenItIsNotNecessary() throws Exception { + String jobId1 = "no-migration-test1"; + String jobId2 = "no-migration-test2"; + String jobId3 = "no-migration-test3"; + + String dataIndex = createDataIndex().v2(); + List jobs = createJobsWithData(jobId1, jobId2, jobId3, dataIndex); + Job job1 = jobs.get(0); + Job job2 = jobs.get(1); + Job job3 = jobs.get(2); + + String job1Index = job1.getResultsIndexName(); + String job2Index = job2.getResultsIndexName(); + String job3Index = job3.getResultsIndexName(); + + assertThat(indexExists(job1Index), is(true)); + assertThat(indexExists(job2Index), is(true)); + assertThat(indexExists(job3Index), is(true)); + + long job1Total = getTotalDocCount(job1Index); + long job2Total = getTotalDocCount(job2Index); + long job3Total = getTotalDocCount(job3Index); + + AcknowledgedResponse resp = ESIntegTestCase.client().execute(MlUpgradeAction.INSTANCE, + new MlUpgradeAction.Request()).actionGet(); + assertThat(resp.isAcknowledged(), is(true)); + + // Migration should have done nothing + assertThat(indexExists(job1Index), is(true)); + assertThat(indexExists(job2Index), is(true)); + assertThat(indexExists(job3Index), is(true)); + + assertThat(getTotalDocCount(job1Index), equalTo(job1Total)); + assertThat(getTotalDocCount(job2Index), equalTo(job2Total)); + assertThat(getTotalDocCount(job3Index), equalTo(job3Total)); + + ClusterState state = admin().cluster().state(new ClusterStateRequest()).actionGet().getState(); + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); + String[] indices = indexNameExpressionResolver.concreteIndexNames(state, + IndicesOptions.strictExpandOpenAndForbidClosed(), + AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*"); + + // Our backing index size should be two as we have a shared and custom index + assertThat(indices.length, equalTo(2)); + } + + public void testMigration() throws Exception { + String jobId1 = "migration-test1"; + String jobId2 = "migration-test2"; + String jobId3 = "migration-test3"; + + String dataIndex = createDataIndex().v2(); + List jobs = createJobsWithData(jobId1, jobId2, jobId3, dataIndex); + Job job1 = jobs.get(0); + Job job2 = jobs.get(1); + Job job3 = jobs.get(2); + + String job1Index = job1.getResultsIndexName(); + String job2Index = job2.getResultsIndexName(); + String job3Index = job3.getResultsIndexName(); + + assertThat(indexExists(job1Index), is(true)); + assertThat(indexExists(job2Index), is(true)); + assertThat(indexExists(job3Index), is(true)); + + long job1Total = getJobResultsCount(job1.getId()); + long job2Total = getJobResultsCount(job2.getId()); + long job3Total = getJobResultsCount(job3.getId()); + + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); + + ResultsIndexUpgradeService resultsIndexUpgradeService = new ResultsIndexUpgradeService(indexNameExpressionResolver, + ThreadPool.Names.SAME, + indexMetaData -> true); + + PlainActionFuture future = PlainActionFuture.newFuture(); + + resultsIndexUpgradeService.upgrade(ESIntegTestCase.client(), + new MlUpgradeAction.Request(), + ESIntegTestCase.client().admin().cluster().prepareState().get().getState(), + future); + + AcknowledgedResponse response = future.get(); + assertThat(response.isAcknowledged(), is(true)); + + assertThat(indexExists(job1Index), is(false)); + assertThat(indexExists(job2Index), is(false)); + assertThat(indexExists(job3Index), is(false)); + + ClusterState state = admin().cluster().state(new ClusterStateRequest()).actionGet().getState(); + String[] indices = indexNameExpressionResolver.concreteIndexNames(state, + IndicesOptions.strictExpandOpenAndForbidClosed(), + AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*"); + + // Our backing index size should be four as we have a shared and custom index and upgrading doubles the number of indices + Assert.assertThat(indices.length, equalTo(4)); + + refresh(indices); + assertThat(getJobResultsCount(job1.getId()), equalTo(job1Total)); + assertThat(getJobResultsCount(job2.getId()), equalTo(job2Total)); + assertThat(getJobResultsCount(job3.getId()), equalTo(job3Total)); + + + // WE should still be able to write, and the aliases should allow to read from the appropriate indices + postDataToJob(jobId1); + postDataToJob(jobId2); + postDataToJob(jobId3); + // We should also be able to create new jobs and old jobs should be unaffected. + String jobId4 = "migration-test4"; + Job job4 = createAndOpenJobAndStartDataFeedWithData(jobId4, dataIndex, false); + waitUntilJobIsClosed(jobId4); + + indices = indexNameExpressionResolver.concreteIndexNames(state, + IndicesOptions.strictExpandOpenAndForbidClosed(), + AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*"); + refresh(indices); + + long newJob1Total = getJobResultsCount(job1.getId()); + assertThat(newJob1Total, greaterThan(job1Total)); + + long newJob2Total = getJobResultsCount(job2.getId()); + assertThat(newJob2Total, greaterThan(job2Total)); + + long newJob3Total = getJobResultsCount(job3.getId()); + assertThat(newJob3Total, greaterThan(job3Total)); + + assertThat(getJobResultsCount(jobId4), greaterThan(0L)); + assertThat(getJobResultsCount(jobId1), equalTo(newJob1Total)); + assertThat(getJobResultsCount(jobId2), equalTo(newJob2Total)); + assertThat(getJobResultsCount(jobId3), equalTo(newJob3Total)); + } + + //I think this test name could be a little bit longer.... + public void testMigrationWithManuallyCreatedIndexThatNeedsMigrating() throws Exception { + String jobId1 = "migration-failure-test1"; + String jobId2 = "migration-failure-test2"; + String jobId3 = "migration-failure-test3"; + + String dataIndex = createDataIndex().v2(); + List jobs = createJobsWithData(jobId1, jobId2, jobId3, dataIndex); + Job job1 = jobs.get(0); + Job job2 = jobs.get(1); + Job job3 = jobs.get(2); + + String job1Index = job1.getResultsIndexName(); + String job2Index = job2.getResultsIndexName(); + String job3Index = job3.getResultsIndexName(); + + // This index name should match one of the automatically created migration indices + String manuallyCreatedIndex = job1Index + "-" + Version.CURRENT.major; + client().admin().indices().prepareCreate(manuallyCreatedIndex).execute().actionGet(); + + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); + + ResultsIndexUpgradeService resultsIndexUpgradeService = new ResultsIndexUpgradeService(indexNameExpressionResolver, + ThreadPool.Names.SAME, + indexMetaData -> true); //indicates that this manually created index needs migrated + + resultsIndexUpgradeService.upgrade(ESIntegTestCase.client(), + new MlUpgradeAction.Request(), + ESIntegTestCase.client().admin().cluster().prepareState().get().getState(), + ActionListener.wrap( + resp -> fail(), + exception -> { + assertThat(exception, instanceOf(IllegalStateException.class)); + assertThat(exception.getMessage(), + equalTo("Index [" + manuallyCreatedIndex + "] already exists and is not the current version.")); + } + )); + } + + public void testMigrationWithExistingIndexWithData() throws Exception { + String jobId1 = "partial-migration-test1"; + String jobId2 = "partial-migration-test2"; + String jobId3 = "partial-migration-test3"; + + String dataIndex = createDataIndex().v2(); + List jobs = createJobsWithData(jobId1, jobId2, jobId3, dataIndex); + Job job1 = jobs.get(0); + Job job2 = jobs.get(1); + Job job3 = jobs.get(2); + + String job1Index = job1.getResultsIndexName(); + String job2Index = job2.getResultsIndexName(); + String job3Index = job3.getResultsIndexName(); + + assertThat(indexExists(job1Index), is(true)); + assertThat(indexExists(job2Index), is(true)); + assertThat(indexExists(job3Index), is(true)); + + long job1Total = getJobResultsCount(job1.getId()); + long job2Total = getJobResultsCount(job2.getId()); + long job3Total = getJobResultsCount(job3.getId()); + + //lets manually create a READ index with reindexed data already + // Should still get aliased appropriately without any additional/duplicate data. + String alreadyMigratedIndex = job1Index + "-" + Version.CURRENT.major + "r"; + ReindexRequest reindexRequest = new ReindexRequest(); + reindexRequest.setSourceIndices(job1Index); + reindexRequest.setDestIndex(alreadyMigratedIndex); + client().execute(ReindexAction.INSTANCE, reindexRequest).actionGet(); + + //New write index as well, should still get aliased appropriately + String alreadyMigratedWriteIndex = job1Index + "-" + Version.CURRENT.major; + client().admin().indices().prepareCreate(alreadyMigratedWriteIndex).execute().actionGet(); + + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); + + ResultsIndexUpgradeService resultsIndexUpgradeService = new ResultsIndexUpgradeService(indexNameExpressionResolver, + ThreadPool.Names.SAME, + //indicates that this manually created index is already migrated and should not be included in our migration steps + indexMetaData -> !(indexMetaData.getIndex().getName().equals(alreadyMigratedIndex) || + indexMetaData.getIndex().getName().equals(alreadyMigratedWriteIndex))); + + PlainActionFuture future = PlainActionFuture.newFuture(); + + resultsIndexUpgradeService.upgrade(ESIntegTestCase.client(), + new MlUpgradeAction.Request(), + ESIntegTestCase.client().admin().cluster().prepareState().get().getState(), + future); + + AcknowledgedResponse response = future.get(); + assertThat(response.isAcknowledged(), is(true)); + + assertThat(indexExists(job1Index), is(false)); + assertThat(indexExists(job2Index), is(false)); + assertThat(indexExists(job3Index), is(false)); + + ClusterState state = admin().cluster().state(new ClusterStateRequest()).actionGet().getState(); + String[] indices = indexNameExpressionResolver.concreteIndexNames(state, + IndicesOptions.strictExpandOpenAndForbidClosed(), + AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*"); + + // Our backing index size should be four as we have a shared and custom index and upgrading doubles the number of indices + Assert.assertThat(indices.length, equalTo(4)); + refresh(indices); + + assertThat(getJobResultsCount(job1.getId()), equalTo(job1Total)); + assertThat(getJobResultsCount(job2.getId()), equalTo(job2Total)); + assertThat(getJobResultsCount(job3.getId()), equalTo(job3Total)); + + // WE should still be able to write, and the aliases should allow to read from the appropriate indices + postDataToJob(jobId1); + postDataToJob(jobId2); + postDataToJob(jobId3); + + refresh(indices); + + long newJob1Total = getJobResultsCount(job1.getId()); + assertThat(newJob1Total, greaterThan(job1Total)); + + long newJob2Total = getJobResultsCount(job2.getId()); + assertThat(newJob2Total, greaterThan(job2Total)); + + long newJob3Total = getJobResultsCount(job3.getId()); + assertThat(newJob3Total, greaterThan(job3Total)); + } + + private long getTotalDocCount(String indexName) { + SearchResponse searchResponse = ESIntegTestCase.client().prepareSearch(indexName) + .setSize(10_000) + .setTrackTotalHits(true) + .setQuery(QueryBuilders.matchAllQuery()) + .execute().actionGet(); + return searchResponse.getHits().getTotalHits().value; + } + + private long getJobResultsCount(String jobId) { + String index = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + jobId; + return getTotalDocCount(index); + } + + private void postDataToJob(String jobId) throws Exception { + openJob(jobId); + ESTestCase.assertBusy(() -> Assert.assertEquals(getJobStats(jobId).get(0).getState(), JobState.OPENED)); + startDatafeed(jobId + "-datafeed", 0L, System.currentTimeMillis()); + waitUntilJobIsClosed(jobId); + } + + private Job createAndOpenJobAndStartDataFeedWithData(String jobId, String dataIndex, boolean isCustom) throws Exception { + Job.Builder jobbuilder = createScheduledJob(jobId); + if (isCustom) { + jobbuilder.setResultsIndexName(jobId); + } + registerJob(jobbuilder); + + Job job = putJob(jobbuilder).getResponse(); + + openJob(job.getId()); + ESTestCase.assertBusy(() -> Assert.assertEquals(getJobStats(job.getId()).get(0).getState(), JobState.OPENED)); + + DatafeedConfig.Builder builder = createDatafeedBuilder(job.getId() + "-datafeed", + job.getId(), + Collections.singletonList(dataIndex)); + builder.setQueryDelay(TimeValue.timeValueSeconds(5)); + builder.setFrequency(TimeValue.timeValueSeconds(5)); + DatafeedConfig datafeedConfig = builder.build(); + registerDatafeed(datafeedConfig); + putDatafeed(datafeedConfig); + startDatafeed(datafeedConfig.getId(), 0L, System.currentTimeMillis()); + waitUntilJobIsClosed(jobId); + return job; + } + + private Tuple createDataIndex() { + ESIntegTestCase.client().admin().indices().prepareCreate("data-for-migration-1") + .addMapping("type", "time", "type=date") + .get(); + long numDocs = ESTestCase.randomIntBetween(32, 512); + long now = System.currentTimeMillis(); + long oneWeekAgo = now - 604800000; + long twoWeeksAgo = oneWeekAgo - 604800000; + indexDocs(logger, "data-for-migration-1", numDocs, twoWeeksAgo, oneWeekAgo); + return new Tuple<>(numDocs, "data-for-migration-1"); + } + + private List createJobsWithData(String sharedJobId1, String sharedJobId2, String customJobId, String dataIndex) throws Exception { + + Job job1 = createAndOpenJobAndStartDataFeedWithData(sharedJobId1, dataIndex, false); + Job job2 = createAndOpenJobAndStartDataFeedWithData(sharedJobId2, dataIndex, false); + Job job3 = createAndOpenJobAndStartDataFeedWithData(customJobId, dataIndex, true); + + return Arrays.asList(job1, job2, job3); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 7cb74c4df5eda..11d302470c708 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -96,6 +96,7 @@ import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction; import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.action.PutJobAction; +import org.elasticsearch.xpack.core.ml.action.MlUpgradeAction; import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; @@ -150,6 +151,7 @@ import org.elasticsearch.xpack.ml.action.TransportPutDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportPutFilterAction; import org.elasticsearch.xpack.ml.action.TransportPutJobAction; +import org.elasticsearch.xpack.ml.action.TransportMlUpgradeAction; import org.elasticsearch.xpack.ml.action.TransportRevertModelSnapshotAction; import org.elasticsearch.xpack.ml.action.TransportStartDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportStopDatafeedAction; @@ -165,6 +167,7 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedManager; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.JobManagerHolder; import org.elasticsearch.xpack.ml.job.UpdateJobProcessNotifier; import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizer; import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizerFactory; @@ -228,6 +231,7 @@ import org.elasticsearch.xpack.ml.rest.results.RestGetInfluencersAction; import org.elasticsearch.xpack.ml.rest.results.RestGetOverallBucketsAction; import org.elasticsearch.xpack.ml.rest.results.RestGetRecordsAction; +import org.elasticsearch.xpack.ml.rest.results.RestUpgradeMlAction; import org.elasticsearch.xpack.ml.rest.validate.RestValidateDetectorAction; import org.elasticsearch.xpack.ml.rest.validate.RestValidateJobConfigAction; @@ -375,7 +379,8 @@ public Collection createComponents(Client client, ClusterService cluster NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { if (enabled == false || transportClientMode) { - return emptyList(); + // special holder for @link(MachineLearningFeatureSetUsage) which needs access to job manager, empty if ML is disabled + return Collections.singletonList(new JobManagerHolder()); } Auditor auditor = new Auditor(client, clusterService.getNodeName()); @@ -385,6 +390,9 @@ public Collection createComponents(Client client, ClusterService cluster UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(client, clusterService, threadPool); JobManager jobManager = new JobManager(env, settings, jobResultsProvider, clusterService, auditor, threadPool, client, notifier); + // special holder for @link(MachineLearningFeatureSetUsage) which needs access to job manager if ML is enabled + JobManagerHolder jobManagerHolder = new JobManagerHolder(jobManager); + JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(client); JobResultsPersister jobResultsPersister = new JobResultsPersister(client); @@ -424,7 +432,7 @@ public Collection createComponents(Client client, ClusterService cluster DatafeedJobBuilder datafeedJobBuilder = new DatafeedJobBuilder(client, settings, xContentRegistry, auditor, System::currentTimeMillis); DatafeedManager datafeedManager = new DatafeedManager(threadPool, client, clusterService, datafeedJobBuilder, - System::currentTimeMillis, auditor); + System::currentTimeMillis, auditor, autodetectProcessManager); this.datafeedManager.set(datafeedManager); MlLifeCycleService mlLifeCycleService = new MlLifeCycleService(environment, clusterService, datafeedManager, autodetectProcessManager); @@ -443,6 +451,7 @@ public Collection createComponents(Client client, ClusterService cluster jobConfigProvider, datafeedConfigProvider, jobManager, + jobManagerHolder, autodetectProcessManager, new MlInitializationService(settings, threadPool, clusterService, client), jobDataCountsPersister, @@ -464,7 +473,7 @@ public List> getPersistentTasksExecutor(ClusterServic return Arrays.asList( new TransportOpenJobAction.OpenJobPersistentTasksExecutor(settings, clusterService, autodetectProcessManager.get(), memoryTracker.get(), client), - new TransportStartDatafeedAction.StartDatafeedPersistentTasksExecutor( datafeedManager.get()) + new TransportStartDatafeedAction.StartDatafeedPersistentTasksExecutor(datafeedManager.get()) ); } @@ -535,7 +544,8 @@ public List getRestHandlers(Settings settings, RestController restC new RestPutCalendarJobAction(settings, restController), new RestGetCalendarEventsAction(settings, restController), new RestPostCalendarEventAction(settings, restController), - new RestFindFileStructureAction(settings, restController) + new RestFindFileStructureAction(settings, restController), + new RestUpgradeMlAction(settings, restController) ); } @@ -593,7 +603,8 @@ public List getRestHandlers(Settings settings, RestController restC new ActionHandler<>(GetCalendarEventsAction.INSTANCE, TransportGetCalendarEventsAction.class), new ActionHandler<>(PostCalendarEventsAction.INSTANCE, TransportPostCalendarEventsAction.class), new ActionHandler<>(PersistJobAction.INSTANCE, TransportPersistJobAction.class), - new ActionHandler<>(FindFileStructureAction.INSTANCE, TransportFindFileStructureAction.class) + new ActionHandler<>(FindFileStructureAction.INSTANCE, TransportFindFileStructureAction.class), + new ActionHandler<>(MlUpgradeAction.INSTANCE, TransportMlUpgradeAction.class) ); } @Override @@ -691,7 +702,7 @@ public UnaryOperator> getIndexTemplateMetaDat try (XContentBuilder stateMapping = ElasticsearchMappings.stateMapping()) { IndexTemplateMetaData stateTemplate = IndexTemplateMetaData.builder(AnomalyDetectorsIndex.jobStateIndexName()) - .patterns(Collections.singletonList(AnomalyDetectorsIndex.jobStateIndexName())) + .patterns(Collections.singletonList(AnomalyDetectorsIndex.jobStateIndexPattern())) // TODO review these settings .settings(Settings.builder() .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java index 5f937609e8cc9..16a8e946e7abb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java @@ -25,12 +25,12 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage; -import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.job.JobManagerHolder; import org.elasticsearch.xpack.ml.process.NativeController; import org.elasticsearch.xpack.ml.process.NativeControllerHolder; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; @@ -47,6 +47,7 @@ import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeoutException; +import java.util.stream.Collectors; public class MachineLearningFeatureSet implements XPackFeatureSet { @@ -60,15 +61,17 @@ public class MachineLearningFeatureSet implements XPackFeatureSet { private final XPackLicenseState licenseState; private final ClusterService clusterService; private final Client client; + private final JobManagerHolder jobManagerHolder; private final Map nativeCodeInfo; @Inject public MachineLearningFeatureSet(Environment environment, ClusterService clusterService, Client client, - @Nullable XPackLicenseState licenseState) { + @Nullable XPackLicenseState licenseState, JobManagerHolder jobManagerHolder) { this.enabled = XPackSettings.MACHINE_LEARNING_ENABLED.get(environment.settings()); this.clusterService = Objects.requireNonNull(clusterService); this.client = Objects.requireNonNull(client); this.licenseState = licenseState; + this.jobManagerHolder = jobManagerHolder; Map nativeCodeInfo = NativeController.UNKNOWN_NATIVE_CODE_INFO; // Don't try to get the native code version if ML is disabled - it causes too much controversy // if ML has been disabled because of some OS incompatibility. Also don't try to get the native @@ -133,7 +136,7 @@ public Map nativeCodeInfo() { @Override public void usage(ActionListener listener) { ClusterState state = clusterService.state(); - new Retriever(client, MlMetadata.getMlMetadata(state), available(), enabled(), mlNodeCount(state)).execute(listener); + new Retriever(client, jobManagerHolder, available(), enabled(), mlNodeCount(state)).execute(listener); } private int mlNodeCount(final ClusterState clusterState) { @@ -153,16 +156,16 @@ private int mlNodeCount(final ClusterState clusterState) { public static class Retriever { private final Client client; - private final MlMetadata mlMetadata; + private final JobManagerHolder jobManagerHolder; private final boolean available; private final boolean enabled; private Map jobsUsage; private Map datafeedsUsage; private int nodeCount; - public Retriever(Client client, MlMetadata mlMetadata, boolean available, boolean enabled, int nodeCount) { + public Retriever(Client client, JobManagerHolder jobManagerHolder, boolean available, boolean enabled, int nodeCount) { this.client = Objects.requireNonNull(client); - this.mlMetadata = mlMetadata; + this.jobManagerHolder = jobManagerHolder; this.available = available; this.enabled = enabled; this.jobsUsage = new LinkedHashMap<>(); @@ -171,7 +174,8 @@ public Retriever(Client client, MlMetadata mlMetadata, boolean available, boolea } public void execute(ActionListener listener) { - if (enabled == false) { + // empty holder means either ML disabled or transport client mode + if (jobManagerHolder.isEmpty()) { listener.onResponse( new MachineLearningFeatureSetUsage(available, enabled, Collections.emptyMap(), Collections.emptyMap(), 0)); return; @@ -191,20 +195,19 @@ public void execute(ActionListener listener) { GetJobsStatsAction.Request jobStatsRequest = new GetJobsStatsAction.Request(MetaData.ALL); ActionListener jobStatsListener = ActionListener.wrap( response -> { - addJobsUsage(response); - GetDatafeedsStatsAction.Request datafeedStatsRequest = - new GetDatafeedsStatsAction.Request(GetDatafeedsStatsAction.ALL); - client.execute(GetDatafeedsStatsAction.INSTANCE, datafeedStatsRequest, - datafeedStatsListener); - }, - listener::onFailure - ); + jobManagerHolder.getJobManager().expandJobs(MetaData.ALL, true, ActionListener.wrap(jobs -> { + addJobsUsage(response, jobs.results()); + GetDatafeedsStatsAction.Request datafeedStatsRequest = new GetDatafeedsStatsAction.Request( + GetDatafeedsStatsAction.ALL); + client.execute(GetDatafeedsStatsAction.INSTANCE, datafeedStatsRequest, datafeedStatsListener); + }, listener::onFailure)); + }, listener::onFailure); // Step 0. Kick off the chain of callbacks by requesting jobs stats client.execute(GetJobsStatsAction.INSTANCE, jobStatsRequest, jobStatsListener); } - private void addJobsUsage(GetJobsStatsAction.Response response) { + private void addJobsUsage(GetJobsStatsAction.Response response, List jobs) { StatsAccumulator allJobsDetectorsStats = new StatsAccumulator(); StatsAccumulator allJobsModelSizeStats = new StatsAccumulator(); ForecastStats allJobsForecastStats = new ForecastStats(); @@ -214,11 +217,11 @@ private void addJobsUsage(GetJobsStatsAction.Response response) { Map modelSizeStatsByState = new HashMap<>(); Map forecastStatsByState = new HashMap<>(); - Map jobs = mlMetadata.getJobs(); List jobsStats = response.getResponse().results(); + Map jobMap = jobs.stream().collect(Collectors.toMap(Job::getId, item -> item)); for (GetJobsStatsAction.Response.JobStats jobStats : jobsStats) { ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); - int detectorsCount = jobs.get(jobStats.getJobId()).getAnalysisConfig() + int detectorsCount = jobMap.get(jobStats.getJobId()).getAnalysisConfig() .getDetectors().size(); double modelSize = modelSizeStats == null ? 0.0 : jobStats.getModelSizeStats().getModelBytes(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java index f2fe80f377649..bb3735f8aa3f1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java @@ -37,13 +37,14 @@ import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider; -import org.elasticsearch.xpack.ml.utils.ChainTaskExecutor; +import org.elasticsearch.xpack.ml.utils.VoidChainTaskExecutor; import java.io.IOException; import java.util.ArrayList; @@ -178,9 +179,9 @@ public void migrateConfigsWithoutTasks(ClusterState clusterState, ActionListener } private void migrateBatches(List batches, ActionListener listener) { - ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(EsExecutors.newDirectExecutorService(), true); + VoidChainTaskExecutor voidChainTaskExecutor = new VoidChainTaskExecutor(EsExecutors.newDirectExecutorService(), true); for (JobsAndDatafeeds batch : batches) { - chainTaskExecutor.add(chainedListener -> writeConfigToIndex(batch.datafeedConfigs, batch.jobs, ActionListener.wrap( + voidChainTaskExecutor.add(chainedListener -> writeConfigToIndex(batch.datafeedConfigs, batch.jobs, ActionListener.wrap( failedDocumentIds -> { List successfulJobWrites = filterFailedJobConfigWrites(failedDocumentIds, batch.jobs); List successfulDatafeedWrites = @@ -190,7 +191,7 @@ private void migrateBatches(List batches, ActionListener listener.onResponse(true), listener::onFailure)); + voidChainTaskExecutor.execute(ActionListener.wrap(aVoids -> listener.onResponse(true), listener::onFailure)); } // Exposed for testing @@ -403,11 +404,23 @@ public static Job updateJobForMigration(Job job) { Map custom = job.getCustomSettings() == null ? new HashMap<>() : new HashMap<>(job.getCustomSettings()); custom.put(MIGRATED_FROM_VERSION, job.getJobVersion()); builder.setCustomSettings(custom); + // Increase the model memory limit for 6.1 - 6.3 jobs + Version jobVersion = job.getJobVersion(); + if (jobVersion != null && jobVersion.onOrAfter(Version.V_6_1_0) && jobVersion.before(Version.V_6_3_0)) { + // Increase model memory limit if < 512MB + if (job.getAnalysisLimits() != null && job.getAnalysisLimits().getModelMemoryLimit() != null && + job.getAnalysisLimits().getModelMemoryLimit() < 512L) { + long updatedModelMemoryLimit = (long) (job.getAnalysisLimits().getModelMemoryLimit() * 1.3); + AnalysisLimits limits = new AnalysisLimits(updatedModelMemoryLimit, + job.getAnalysisLimits().getCategorizationExamplesLimit()); + builder.setAnalysisLimits(limits); + } + } // Pre v5.5 (ml beta) jobs do not have a version. // These jobs cannot be opened, we rely on the missing version // to indicate this. // See TransportOpenJobAction.validate() - if (job.getJobVersion() != null) { + if (jobVersion != null) { builder.setJobVersion(Version.CURRENT); } return builder.build(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/ResultsIndexUpgradeService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/ResultsIndexUpgradeService.java new file mode 100644 index 0000000000000..ccbaed13feca0 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/ResultsIndexUpgradeService.java @@ -0,0 +1,513 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.index.reindex.ReindexRequest; +import org.elasticsearch.index.reindex.ScrollableHitSource; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.ml.action.MlUpgradeAction; +import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; +import org.elasticsearch.xpack.ml.utils.TypedChainTaskExecutor; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +/** + * ML Job results index upgrade service + */ +public class ResultsIndexUpgradeService { + + private static final Logger logger = LogManager.getLogger(ResultsIndexUpgradeService.class); + + // Adjust the following constants as necessary for various versions and backports. + private static final int INDEX_VERSION = Version.CURRENT.major; + private static final Version MIN_REQUIRED_VERSION = Version.CURRENT.minimumCompatibilityVersion(); + + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final Predicate shouldUpgrade; + private final String executor; + + /** + * Construct a new upgrade service + * + * @param indexNameExpressionResolver Index expression resolver for the request + * @param executor Where to execute client calls + * @param shouldUpgrade Given IndexMetadata indicate if it should be upgraded or not + * {@code true} indicates that it SHOULD upgrade + */ + public ResultsIndexUpgradeService(IndexNameExpressionResolver indexNameExpressionResolver, + String executor, + Predicate shouldUpgrade) { + this.indexNameExpressionResolver = indexNameExpressionResolver; + this.shouldUpgrade = shouldUpgrade; + this.executor = executor; + } + + public static boolean wasIndexCreatedInCurrentMajorVersion(IndexMetaData indexMetaData) { + return indexMetaData.getCreationVersion().major == INDEX_VERSION; + } + + /** + * There are two reasons for these indices to exist: + * 1. The upgrade process has ran before and either failed for some reason, or the end user is simply running it again. + * Either way, it should be ok to proceed as this action SHOULD be idempotent, + * unless the shouldUpgrade predicate is poorly formed + * 2. This index was created manually by the user. If the index was created manually and actually needs upgrading, then + * we consider the "new index" to be invalid as the passed predicate indicates that it still needs upgrading. + * + * @param metaData Cluster metadata + * @param newIndexName The index to check + * @param shouldUpgrade Should be index be upgraded + * @return {@code true} if the "new index" is valid + */ + private static boolean validNewIndex(MetaData metaData, String newIndexName, Predicate shouldUpgrade) { + return (metaData.hasIndex(newIndexName) && shouldUpgrade.test(metaData.index(newIndexName))) == false; + } + + private static void validateMinNodeVersion(ClusterState clusterState) { + if (clusterState.nodes().getMinNodeVersion().before(MIN_REQUIRED_VERSION)) { + throw new IllegalStateException("All nodes should have at least version [" + MIN_REQUIRED_VERSION + "] to upgrade"); + } + } + + // This method copies the behavior of the normal {index}/_upgrade rest response handler + private static Tuple getStatusAndCause(BulkByScrollResponse response) { + /* + * Return the highest numbered rest status under the assumption that higher numbered statuses are "more error" + * and thus more interesting to the user. + */ + RestStatus status = RestStatus.OK; + Throwable cause = null; + if (response.isTimedOut()) { + status = RestStatus.REQUEST_TIMEOUT; + cause = new ElasticsearchTimeoutException("Reindex request timed out"); + } + for (BulkItemResponse.Failure failure : response.getBulkFailures()) { + if (failure.getStatus().getStatus() > status.getStatus()) { + status = failure.getStatus(); + cause = failure.getCause(); + } + } + for (ScrollableHitSource.SearchFailure failure : response.getSearchFailures()) { + RestStatus failureStatus = ExceptionsHelper.status(failure.getReason()); + if (failureStatus.getStatus() > status.getStatus()) { + status = failureStatus; + cause = failure.getReason(); + } + } + return new Tuple<>(status, cause); + } + + /** + * Upgrade the indices given in the request. + * + * @param client The client to use when making calls + * @param request The upgrade request + * @param state The current cluster state + * @param listener The listener to alert when actions have completed + */ + public void upgrade(Client client, MlUpgradeAction.Request request, ClusterState state, + ActionListener listener) { + try { + validateMinNodeVersion(state); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), request.indices()); + MetaData metaData = state.getMetaData(); + + List indicesToUpgrade = Arrays.stream(concreteIndices) + .filter(indexName -> shouldUpgrade.test(metaData.index(indexName))) + .collect(Collectors.toList()); + + // All the internal indices are up to date + if (indicesToUpgrade.isEmpty()) { + listener.onResponse(new AcknowledgedResponse(true)); + return; + } + + IndexNameAndAliasProvider indexNameAndAliasProvider = new IndexNameAndAliasProvider(indicesToUpgrade, metaData); + Exception validationException = indexNameAndAliasProvider.validate(metaData, shouldUpgrade); + if (validationException != null) { + listener.onFailure(validationException); + return; + } + + // <7> Now that we have deleted the old indices, we are complete, alert the user + ActionListener deleteIndicesListener = ActionListener.wrap( + listener::onResponse, + error -> { + String msg = "Failed to delete old indices: " + Strings.collectionToCommaDelimitedString(indicesToUpgrade); + logger.error(msg, error); + listener.onFailure(new ElasticsearchException(msg, error)); + } + ); + + // <6> Now that aliases are moved, need to delete the old indices + ActionListener readAliasListener = ActionListener.wrap( + resp -> deleteOldIndices(client, indicesToUpgrade, deleteIndicesListener), + error -> { + String msg = "Failed adjusting aliases from old indices to new."; + logger.error(msg, error); + listener.onFailure(new ElasticsearchException(msg, error)); + } + ); + + // <5> Documents are now reindexed, time to move read aliases + ActionListener reindexListener = ActionListener.wrap( + resp -> + // Need to make indices writable again so that the aliases can be removed from them + removeReadOnlyBlock(client, indicesToUpgrade, + ActionListener.wrap( + rrob -> adjustAliases(client, + indexNameAndAliasProvider.oldIndicesWithReadAliases(), + indexNameAndAliasProvider.newReadIndicesWithReadAliases(), + readAliasListener), + rrobFailure -> { + String msg = "Failed making old indices writable again so that aliases can be moved."; + logger.error(msg, rrobFailure); + listener.onFailure(new ElasticsearchException(msg, rrobFailure)); + }) + ), + error -> { + logger.error("Failed to reindex old read-only indices", error); + removeReadOnlyBlock(client, indicesToUpgrade, ActionListener.wrap( + empty -> listener.onFailure(error), + removeReadOnlyBlockError -> { + String msg = "Failed making old indices read/write again after failing to reindex: " + error.getMessage(); + logger.error(msg, removeReadOnlyBlockError); + listener.onFailure(new ElasticsearchException(msg, removeReadOnlyBlockError)); + } + )); + } + ); + + // <4> Old indexes are now readOnly, Time to reindex + ActionListener readOnlyListener = ActionListener.wrap( + ack -> reindexOldReadIndicesToNewIndices(client, indexNameAndAliasProvider.needsReindex(), request, reindexListener), + listener::onFailure + ); + + // <3> Set old indices to readOnly + ActionListener writeAliasesMovedListener = ActionListener.wrap( + resp -> setReadOnlyBlock(client, indicesToUpgrade, readOnlyListener), + listener::onFailure + ); + + // <2> Move write index alias to new write indices + ActionListener createWriteIndicesAndSetReadAliasListener = ActionListener.wrap( + resp -> adjustAliases(client, + indexNameAndAliasProvider.oldIndicesWithWriteAliases(), + indexNameAndAliasProvider.newWriteIndicesWithWriteAliases(), + writeAliasesMovedListener), + listener::onFailure + ); + + // <1> Create the new write indices and set the read aliases to include them + createNewWriteIndicesIfNecessary(client, metaData, indexNameAndAliasProvider.newWriteIndices(), + ActionListener.wrap( + indicesCreated -> adjustAliases(client, + Collections.emptyMap(), + indexNameAndAliasProvider.newWriteIndicesWithReadAliases(), + createWriteIndicesAndSetReadAliasListener), + listener::onFailure + )); + + } catch (Exception e) { + listener.onFailure(e); + } + + } + + private void createNewWriteIndicesIfNecessary(Client client, + MetaData metaData, + Collection newWriteIndices, + ActionListener createIndexListener) { + TypedChainTaskExecutor chainTaskExecutor = + new TypedChainTaskExecutor<>( + client.threadPool().executor(executor), + (createIndexResponse -> true), //We always want to complete all our tasks + (exception -> + // Short circuit execution IF the exception is NOT a ResourceAlreadyExistsException + // This should be rare, as it requires the index to be created between our previous check and this exception + exception instanceof ResourceAlreadyExistsException == false + )); + newWriteIndices.forEach((index) -> { + // If the index already exists, don't try and created it + // We have already verified that IF this index exists, that it does not require upgrading + // So, if it was created between that check and this one, we can assume it is the correct version as it was JUST created + if (metaData.hasIndex(index) == false) { + CreateIndexRequest request = new CreateIndexRequest(index); + chainTaskExecutor.add(listener -> + executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ML_ORIGIN, + request, + listener, + client.admin().indices()::create)); + } + }); + + chainTaskExecutor.execute(ActionListener.wrap( + createIndexResponses -> createIndexListener.onResponse(true), + createIndexListener::onFailure + )); + } + + /** + * Makes the indices readonly if it's not set as a readonly yet + */ + private void setReadOnlyBlock(Client client, List indices, ActionListener listener) { + Settings settings = Settings.builder().put(IndexMetaData.INDEX_READ_ONLY_SETTING.getKey(), true).build(); + UpdateSettingsRequest request = new UpdateSettingsRequest(indices.toArray(new String[0])); + request.settings(settings); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ML_ORIGIN, + request, + listener, + client.admin().indices()::updateSettings); + } + + private void removeReadOnlyBlock(Client client, List indices, + ActionListener listener) { + Settings settings = Settings.builder().put(IndexMetaData.INDEX_READ_ONLY_SETTING.getKey(), false).build(); + UpdateSettingsRequest request = new UpdateSettingsRequest(indices.toArray(new String[0])); + request.settings(settings); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ML_ORIGIN, + request, + listener, + client.admin().indices()::updateSettings); + } + + private void reindexOldReadIndicesToNewIndices(Client client, + Map reindexIndices, + MlUpgradeAction.Request request, + ActionListener listener) { + TypedChainTaskExecutor chainTaskExecutor = + new TypedChainTaskExecutor<>( + client.threadPool().executor(executor), + (createIndexResponse) -> { // If there are errors in the reindex, we should stop + Tuple status = getStatusAndCause(createIndexResponse); + return status.v1().equals(RestStatus.OK); + }, + (exception -> true)); // Short circuit and call onFailure for any exception + + List newIndices = new ArrayList<>(reindexIndices.size()); + reindexIndices.forEach((oldIndex, newIndex) -> { + ReindexRequest reindexRequest = new ReindexRequest(); + reindexRequest.setSourceBatchSize(request.getReindexBatchSize()); + reindexRequest.setSourceIndices(oldIndex); + reindexRequest.setDestIndex(newIndex); + reindexRequest.setSourceDocTypes(ElasticsearchMappings.DOC_TYPE); + reindexRequest.setDestDocType(ElasticsearchMappings.DOC_TYPE); + // Don't worry if these indices already exist, we validated settings.index.created.version earlier + reindexRequest.setAbortOnVersionConflict(false); + // If the document exists already in the new index, don't want to update or overwrite as we are pulling from "old data" + reindexRequest.setDestOpType(DocWriteRequest.OpType.CREATE.getLowercase()); + newIndices.add(newIndex); + chainTaskExecutor.add(chainedListener -> + executeAsyncWithOrigin(client, + ML_ORIGIN, + ReindexAction.INSTANCE, + reindexRequest, + chainedListener)); + }); + + chainTaskExecutor.execute(ActionListener.wrap( + bulkScrollingResponses -> { + BulkByScrollResponse response = bulkScrollingResponses.get(bulkScrollingResponses.size() - 1); + Tuple status = getStatusAndCause(response); + if (status.v1().equals(RestStatus.OK)) { + listener.onResponse(true); + } else { + logger.error("Failed to reindex old results indices.", status.v2()); + listener.onFailure(new ElasticsearchException("Failed to reindex old results indices.",status.v2())); + } + }, + failure -> { + List createdIndices = newIndices.subList(0, chainTaskExecutor.getCollectedResponses().size()); + logger.error( + "Failed to reindex all old read indices. Successfully reindexed: [" + + Strings.collectionToCommaDelimitedString(createdIndices) + "]", + failure); + listener.onFailure(failure); + } + )); + + } + + private void deleteOldIndices(Client client, + List oldIndices, + ActionListener deleteIndicesListener) { + DeleteIndexRequest request = new DeleteIndexRequest(oldIndices.toArray(new String[0])); + request.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ML_ORIGIN, + request, + deleteIndicesListener, + client.admin().indices()::delete); + } + + private void adjustAliases(Client client, + Map> oldAliases, + Map> newAliases, + ActionListener indicesAliasListener) { + IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); + oldAliases.forEach((oldIndex, aliases) -> + { + if (aliases.isEmpty() == false) { //if the aliases are empty, that means there are none to remove + indicesAliasesRequest.addAliasAction(IndicesAliasesRequest + .AliasActions + .remove() + .index(oldIndex) + .aliases(aliases.stream().map(Alias::name).toArray(String[]::new))); + } + } + ); + newAliases.forEach((newIndex, aliases) -> + aliases.forEach(alias -> { + IndicesAliasesRequest.AliasActions action = IndicesAliasesRequest.AliasActions.add().index(newIndex); + if (alias.filter() != null) { + action.filter(alias.filter()); + } + action.alias(alias.name()); + indicesAliasesRequest.addAliasAction(action); + }) + ); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), + ML_ORIGIN, + indicesAliasesRequest, + indicesAliasListener, + client.admin().indices()::aliases); + } + + + private static class IndexNameAndAliasProvider { + + private final List oldIndices; + private final Map> writeAliases = new HashMap<>(); + private final Map> readAliases = new HashMap<>(); + + private IndexNameAndAliasProvider(List oldIndices, MetaData metaData) { + this.oldIndices = oldIndices; + oldIndices.forEach(index -> { + IndexMetaData indexMetaData = metaData.index(index); + List writes = new ArrayList<>(); + List reads = new ArrayList<>(); + indexMetaData.getAliases().forEach(aliasCursor -> { + Alias alias = new Alias(aliasCursor.value.alias()); + if (aliasCursor.value.filteringRequired()) { + alias.filter(aliasCursor.value.getFilter().string()); //Set the read alias jobId filter + } + if (alias.name().contains(".write-")) { + writes.add(alias); + } else { + reads.add(alias); + } + }); + + writeAliases.put(index, writes); + readAliases.put(index, reads); + }); + } + + private Exception validate(MetaData metaData, Predicate shouldUpgrade) { + for (String index : oldIndices) { + String newWriteName = newWriteName(index); + // If the "new" indices exist, either they were created from a previous run of the upgrade process or the end user + if (validNewIndex(metaData, newWriteName, shouldUpgrade) == false) { + return new IllegalStateException("Index [" + newWriteName + "] already exists and is not the current version."); + } + + String newReadName = newReadName(index); + if (validNewIndex(metaData, newReadName, shouldUpgrade) == false) { + return new IllegalStateException("Index [" + newReadName + "] already exists and is not the current version."); + } + } + return null; + } + + private String newReadName(String oldIndexName) { + return oldIndexName + "-" + INDEX_VERSION + "r"; + } + + private String newWriteName(String oldIndexName) { + return oldIndexName + "-" + INDEX_VERSION; + } + + private List newWriteIndices() { + return oldIndices.stream().map(this::newWriteName).collect(Collectors.toList()); + } + + private List readAliases(String oldIndex) { + return readAliases.get(oldIndex); + } + + private List writeAliases(String oldIndex) { + return writeAliases.get(oldIndex); + } + + private Map> newWriteIndicesWithReadAliases() { + return oldIndices.stream().collect(Collectors.toMap(this::newWriteName, this::readAliases)); + } + + private Map> oldIndicesWithWriteAliases() { + return writeAliases; + } + + private Map> newWriteIndicesWithWriteAliases() { + return oldIndices.stream().collect(Collectors.toMap(this::newWriteName, this::writeAliases)); + } + + private Map> oldIndicesWithReadAliases() { + return readAliases; + } + + private Map> newReadIndicesWithReadAliases() { + return oldIndices.stream().collect(Collectors.toMap(this::newReadName, this::readAliases)); + } + + private Map needsReindex() { + return oldIndices.stream().collect(Collectors.toMap(Function.identity(), this::newReadName)); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index b186ea2184845..876f2cd1aaccd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -387,7 +386,7 @@ private void deleteJobDocuments(ParentTaskAssigningClient parentTaskClient, Stri failureHandler); // Step 2. Delete state done, delete the quantiles - ActionListener deleteStateHandler = ActionListener.wrap( + ActionListener deleteStateHandler = ActionListener.wrap( bulkResponse -> deleteQuantiles(parentTaskClient, jobId, deleteQuantilesHandler), failureHandler); @@ -397,7 +396,7 @@ private void deleteJobDocuments(ParentTaskAssigningClient parentTaskClient, Stri private void deleteQuantiles(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener finishedHandler) { // The quantiles type and doc ID changed in v5.5 so delete both the old and new format - DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); + DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern()); // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace IdsQueryBuilder query = new IdsQueryBuilder().addIds(Quantiles.documentId(jobId)); request.setQuery(query); @@ -417,7 +416,7 @@ private void deleteQuantiles(ParentTaskAssigningClient parentTaskClient, String })); } - private void deleteModelState(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener listener) { + private void deleteModelState(ParentTaskAssigningClient parentTaskClient, String jobId, ActionListener listener) { GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null); request.setPageParams(new PageParams(0, MAX_SNAPSHOTS_TO_DELETE)); executeAsyncWithOrigin(parentTaskClient, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap( @@ -432,7 +431,7 @@ private void deleteModelState(ParentTaskAssigningClient parentTaskClient, String private void deleteCategorizerState(ParentTaskAssigningClient parentTaskClient, String jobId, int docNum, ActionListener finishedHandler) { // The categorizer state type and doc ID changed in v5.5 so delete both the old and new format - DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexName()); + DeleteByQueryRequest request = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern()); // Just use ID here, not type, as trying to delete different types spams the logs with an exception stack trace IdsQueryBuilder query = new IdsQueryBuilder().addIds(CategorizerState.documentId(jobId, docNum)); request.setQuery(query); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index 61592e4ddfa77..9904ecad0999f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -7,12 +7,12 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; @@ -79,9 +79,9 @@ protected void doExecute(Task task, DeleteModelSnapshotAction.Request request, // Delete the snapshot and any associated state files JobDataDeleter deleter = new JobDataDeleter(client, request.getJobId()); deleter.deleteModelSnapshots(Collections.singletonList(deleteCandidate), - new ActionListener() { + new ActionListener() { @Override - public void onResponse(BulkResponse bulkResponse) { + public void onResponse(BulkByScrollResponse bulkResponse) { String msg = Messages.getMessage(Messages.JOB_AUDIT_SNAPSHOT_DELETED, deleteCandidate.getSnapshotId(), deleteCandidate.getDescription()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java index d6c03d6c93fbf..9d76844121cbb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFinalizeJobExecutionAction.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.utils.ChainTaskExecutor; +import org.elasticsearch.xpack.ml.utils.VoidChainTaskExecutor; import java.util.Collections; import java.util.Date; @@ -65,7 +65,7 @@ protected void masterOperation(FinalizeJobExecutionAction.Request request, Clust String jobIdString = String.join(",", request.getJobIds()); logger.debug("finalizing jobs [{}]", jobIdString); - ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.executor( + VoidChainTaskExecutor voidChainTaskExecutor = new VoidChainTaskExecutor(threadPool.executor( MachineLearning.UTILITY_THREAD_POOL_NAME), true); Map update = Collections.singletonMap(Job.FINISHED_TIME.getPreferredName(), new Date()); @@ -77,7 +77,7 @@ protected void masterOperation(FinalizeJobExecutionAction.Request request, Clust updateRequest.doc(update); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - chainTaskExecutor.add(chainedListener -> { + voidChainTaskExecutor.add(chainedListener -> { executeAsyncWithOrigin(client, ML_ORIGIN, UpdateAction.INSTANCE, updateRequest, ActionListener.wrap( updateResponse -> chainedListener.onResponse(null), chainedListener::onFailure @@ -85,8 +85,8 @@ protected void masterOperation(FinalizeJobExecutionAction.Request request, Clust }); } - chainTaskExecutor.execute(ActionListener.wrap( - aVoid -> { + voidChainTaskExecutor.execute(ActionListener.wrap( + aVoids -> { logger.debug("finalized job [{}]", jobIdString); listener.onResponse(new AcknowledgedResponse(true)); }, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index c9a6d717f319f..da24af0ca74fe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -37,6 +37,7 @@ import java.time.Duration; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.Set; @@ -91,6 +92,7 @@ protected GetJobsStatsAction.Response newResponse(GetJobsStatsAction.Request req for (QueryPage task : tasks) { stats.addAll(task.results()); } + Collections.sort(stats, Comparator.comparing(GetJobsStatsAction.Response.JobStats::getJobId)); return new GetJobsStatsAction.Response(taskOperationFailures, failedNodeExceptions, new QueryPage<>(stats, stats.size(), Job.RESULTS_FIELD)); } @@ -148,6 +150,7 @@ void gatherStatsForClosedJobs(GetJobsStatsAction.Request request, GetJobsStatsAc if (counter.decrementAndGet() == 0) { List results = response.getResponse().results(); results.addAll(jobStats.asList()); + Collections.sort(results, Comparator.comparing(GetJobsStatsAction.Response.JobStats::getJobId)); listener.onResponse(new GetJobsStatsAction.Response(response.getTaskFailures(), response.getNodeFailures(), new QueryPage<>(results, results.size(), Job.RESULTS_FIELD))); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlUpgradeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlUpgradeAction.java new file mode 100644 index 0000000000000..2b676277aa690 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlUpgradeAction.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ParentTaskAssigningClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.action.MlUpgradeAction; +import org.elasticsearch.xpack.ml.ResultsIndexUpgradeService; + +import static org.elasticsearch.xpack.ml.ResultsIndexUpgradeService.wasIndexCreatedInCurrentMajorVersion; + +public class TransportMlUpgradeAction + extends TransportMasterNodeReadAction { + + private final Client client; + private final ResultsIndexUpgradeService resultsIndexUpgradeService; + + @Inject + public TransportMlUpgradeAction(TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, Client client, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(MlUpgradeAction.NAME, transportService, clusterService, threadPool, + actionFilters, MlUpgradeAction.Request::new, indexNameExpressionResolver); + this.client = client; + this.resultsIndexUpgradeService = new ResultsIndexUpgradeService(indexNameExpressionResolver, + executor(), + indexMetadata -> wasIndexCreatedInCurrentMajorVersion(indexMetadata) == false); + } + + @Override + protected void masterOperation(Task task, MlUpgradeAction.Request request, ClusterState state, + ActionListener listener) { + TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); + ParentTaskAssigningClient parentAwareClient = new ParentTaskAssigningClient(client, taskId); + try { + resultsIndexUpgradeService.upgrade(parentAwareClient, request, state, listener); + } catch (Exception e) { + listener.onFailure(e); + } + } + + @Override + protected final void masterOperation(MlUpgradeAction.Request request, ClusterState state, + ActionListener listener) { + throw new UnsupportedOperationException("the task parameter is required"); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + @Override + protected ClusterBlockException checkBlock(MlUpgradeAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 21f97cbb5dc99..b7b4fb3aad4c9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; @@ -55,9 +56,6 @@ import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; -import org.elasticsearch.xpack.core.ml.action.PutJobAction; -import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; -import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.DetectionRule; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; @@ -228,31 +226,13 @@ static PersistentTasksCustomMetaData.Assignment selectLeastLoadedMlNode(String j Collection> assignedTasks = persistentTasks.findTasks( MlTasks.JOB_TASK_NAME, task -> node.getId().equals(task.getExecutorNode())); for (PersistentTasksCustomMetaData.PersistentTask assignedTask : assignedTasks) { - JobTaskState jobTaskState = (JobTaskState) assignedTask.getState(); - JobState jobState; - if (jobTaskState == null) { - // executor node didn't have the chance to set job status to OPENING - ++numberOfAllocatingJobs; - jobState = JobState.OPENING; - } else { - jobState = jobTaskState.getState(); - if (jobTaskState.isStatusStale(assignedTask)) { - // the job is re-locating - if (jobState == JobState.CLOSING) { - // previous executor node failed while the job was closing - it won't - // be reopened, so consider it CLOSED for resource usage purposes - jobState = JobState.CLOSED; - } else if (jobState != JobState.FAILED) { - // previous executor node failed and current executor node didn't - // have the chance to set job status to OPENING - ++numberOfAllocatingJobs; - jobState = JobState.OPENING; - } - } - } + JobState jobState = MlTasks.getJobStateModifiedForReassignments(assignedTask); if (jobState.isAnyOf(JobState.CLOSED, JobState.FAILED) == false) { // Don't count CLOSED or FAILED jobs, as they don't consume native memory ++numberOfAssignedJobs; + if (jobState == JobState.OPENING) { + ++numberOfAllocatingJobs; + } OpenJobAction.JobParams params = (OpenJobAction.JobParams) assignedTask.getParams(); Long jobMemoryRequirement = memoryTracker.getJobMemoryRequirement(params.getJobId()); if (jobMemoryRequirement == null) { @@ -389,13 +369,14 @@ static String nodeNameAndMlAttributes(DiscoveryNode node) { static String[] indicesOfInterest(String resultsIndex) { if (resultsIndex == null) { - return new String[]{AnomalyDetectorsIndex.jobStateIndexName(), MlMetaIndex.INDEX_NAME}; + return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), MlMetaIndex.INDEX_NAME}; } - return new String[]{AnomalyDetectorsIndex.jobStateIndexName(), resultsIndex, MlMetaIndex.INDEX_NAME}; + return new String[]{AnomalyDetectorsIndex.jobStateIndexPattern(), resultsIndex, MlMetaIndex.INDEX_NAME}; } static List verifyIndicesPrimaryShardsAreActive(String resultsIndex, ClusterState clusterState) { - String[] indices = indicesOfInterest(resultsIndex); + IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(); + String[] indices = resolver.concreteIndexNames(clusterState, IndicesOptions.lenientExpandOpen(), indicesOfInterest(resultsIndex)); List unavailableIndices = new ArrayList<>(indices.length); for (String index : indices) { // Indices are created on demand from templates. @@ -540,50 +521,16 @@ public void onFailure(Exception e) { ); // Tell the job tracker to refresh the memory requirement for this job and all other jobs that have persistent tasks - ActionListener jobUpdateListener = ActionListener.wrap( + ActionListener jobUpdateListener = ActionListener.wrap( response -> memoryTracker.refreshJobMemoryAndAllOthers(jobParams.getJobId(), memoryRequirementRefreshListener), listener::onFailure ); - // Increase the model memory limit for 6.1 - 6.3 jobs - ActionListener missingMappingsListener = ActionListener.wrap( - response -> { - Job job = jobParams.getJob(); - if (job != null) { - Version jobVersion = job.getJobVersion(); - if (jobVersion != null && - (jobVersion.onOrAfter(Version.V_6_1_0) && jobVersion.before(Version.V_6_3_0))) { - // Increase model memory limit if < 512MB - if (job.getAnalysisLimits() != null && job.getAnalysisLimits().getModelMemoryLimit() != null && - job.getAnalysisLimits().getModelMemoryLimit() < 512L) { - - long updatedModelMemoryLimit = (long) (job.getAnalysisLimits().getModelMemoryLimit() * 1.3); - AnalysisLimits limits = new AnalysisLimits(updatedModelMemoryLimit, - job.getAnalysisLimits().getCategorizationExamplesLimit()); - - JobUpdate update = new JobUpdate.Builder(job.getId()).setJobVersion(Version.CURRENT) - .setAnalysisLimits(limits).build(); - UpdateJobAction.Request updateRequest = UpdateJobAction.Request.internal(job.getId(), update); - executeAsyncWithOrigin(client, ML_ORIGIN, UpdateJobAction.INSTANCE, updateRequest, - jobUpdateListener); - } else { - jobUpdateListener.onResponse(null); - } - } - else { - jobUpdateListener.onResponse(null); - } - } else { - jobUpdateListener.onResponse(null); - } - }, listener::onFailure - ); - // Try adding state doc mapping ActionListener resultsPutMappingHandler = ActionListener.wrap( response -> { addDocMappingIfMissing(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings::stateMapping, - state, missingMappingsListener); + state, jobUpdateListener); }, listener::onFailure ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java index 91ce79b4c7583..35878f1199586 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java @@ -31,7 +31,8 @@ import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; -import org.elasticsearch.xpack.core.security.user.SystemUser; +import org.elasticsearch.xpack.core.ml.job.results.Bucket; +import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetector; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; @@ -189,24 +190,27 @@ private void checkForMissingDataIfNecessary() { long totalRecordsMissing = missingDataBuckets.stream() .mapToLong(BucketWithMissingData::getMissingDocumentCount) .sum(); - Date endTime = missingDataBuckets.get(missingDataBuckets.size() - 1).getBucket().getTimestamp(); - Annotation annotation = createAnnotation(missingDataBuckets.get(0).getBucket().getTimestamp(), - endTime, - totalRecordsMissing); + Bucket lastBucket = missingDataBuckets.get(missingDataBuckets.size() - 1).getBucket(); + // Get the end of the last bucket and make it milliseconds + Date endTime = new Date((lastBucket.getEpoch() + lastBucket.getBucketSpan()) * 1000); + + String msg = Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_MISSING_DATA, totalRecordsMissing, + XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(lastBucket.getTimestamp().getTime())); + + Annotation annotation = createAnnotation(missingDataBuckets.get(0).getBucket().getTimestamp(), endTime, msg); // Have we an annotation that covers the same area with the same message? // Cannot use annotation.equals(other) as that checks createTime if (lastDataCheckAnnotation != null && annotation.getAnnotation().equals(lastDataCheckAnnotation.getAnnotation()) && annotation.getTimestamp().equals(lastDataCheckAnnotation.getTimestamp()) - && annotation.getEndTimestamp().equals(lastDataCheckAnnotation.getTimestamp())) { + && annotation.getEndTimestamp().equals(lastDataCheckAnnotation.getEndTimestamp())) { return; } // Creating a warning in addition to updating/creating our annotation. This allows the issue to be plainly visible // in the job list page. - auditor.warning(jobId, Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_MISSING_DATA, totalRecordsMissing, - XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(endTime.getTime()))); + auditor.warning(jobId, msg); if (lastDataCheckAnnotationId != null) { updateAnnotation(annotation); @@ -217,17 +221,16 @@ private void checkForMissingDataIfNecessary() { } } - private Annotation createAnnotation(Date startTime, Date endTime, long recordsMissing) { - String msg = Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_MISSING_DATA, recordsMissing, - XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(endTime.getTime())); + private Annotation createAnnotation(Date startTime, Date endTime, String msg) { + Date currentTime = new Date(currentTimeSupplier.get()); return new Annotation(msg, - new Date(currentTimeSupplier.get()), - SystemUser.NAME, + currentTime, + XPackUser.NAME, startTime, endTime, jobId, - null, - null, + currentTime, + XPackUser.NAME, "annotation"); } @@ -235,9 +238,11 @@ private String addAndSetDelayedDataAnnotation(Annotation annotation) { try (XContentBuilder xContentBuilder = annotation.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) { IndexRequest request = new IndexRequest(AnnotationIndex.WRITE_ALIAS_NAME); request.source(xContentBuilder); - IndexResponse response = client.index(request).actionGet(); - lastDataCheckAnnotation = annotation; - return response.getId(); + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + IndexResponse response = client.index(request).actionGet(); + lastDataCheckAnnotation = annotation; + return response.getId(); + } } catch (IOException ex) { String errorMessage = "[" + jobId + "] failed to create annotation for delayed data checker."; LOGGER.error(errorMessage, ex); @@ -248,7 +253,7 @@ private String addAndSetDelayedDataAnnotation(Annotation annotation) { private void updateAnnotation(Annotation annotation) { Annotation updatedAnnotation = new Annotation(lastDataCheckAnnotation); - updatedAnnotation.setModifiedUsername(SystemUser.NAME); + updatedAnnotation.setModifiedUsername(XPackUser.NAME); updatedAnnotation.setModifiedTime(new Date(currentTimeSupplier.get())); updatedAnnotation.setAnnotation(annotation.getAnnotation()); updatedAnnotation.setTimestamp(annotation.getTimestamp()); @@ -257,8 +262,10 @@ private void updateAnnotation(Annotation annotation) { IndexRequest indexRequest = new IndexRequest(AnnotationIndex.WRITE_ALIAS_NAME); indexRequest.id(lastDataCheckAnnotationId); indexRequest.source(xContentBuilder); - client.index(indexRequest).actionGet(); - lastDataCheckAnnotation = updatedAnnotation; + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + client.index(indexRequest).actionGet(); + lastDataCheckAnnotation = updatedAnnotation; + } } catch (IOException ex) { String errorMessage = "[" + jobId + "] failed to update annotation for delayed data checker."; LOGGER.error(errorMessage, ex); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java index 4a0f3da060d02..e004a718b13fa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java @@ -27,9 +27,11 @@ import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.action.TransportStartDatafeedAction; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.notifications.Auditor; import java.util.ArrayList; @@ -62,16 +64,18 @@ public class DatafeedManager { private final ConcurrentMap runningDatafeedsOnThisNode = new ConcurrentHashMap<>(); private final DatafeedJobBuilder datafeedJobBuilder; private final TaskRunner taskRunner = new TaskRunner(); + private final AutodetectProcessManager autodetectProcessManager; private volatile boolean isolated; public DatafeedManager(ThreadPool threadPool, Client client, ClusterService clusterService, DatafeedJobBuilder datafeedJobBuilder, - Supplier currentTimeSupplier, Auditor auditor) { + Supplier currentTimeSupplier, Auditor auditor, AutodetectProcessManager autodetectProcessManager) { this.client = Objects.requireNonNull(client); this.clusterService = Objects.requireNonNull(clusterService); this.threadPool = threadPool; this.currentTimeSupplier = Objects.requireNonNull(currentTimeSupplier); this.auditor = Objects.requireNonNull(auditor); this.datafeedJobBuilder = Objects.requireNonNull(datafeedJobBuilder); + this.autodetectProcessManager = autodetectProcessManager; clusterService.addListener(taskRunner); } @@ -161,7 +165,7 @@ public void onFailure(Exception e) { protected void doRun() { Long next = null; try { - next = holder.executeLoopBack(startTime, endTime); + next = holder.executeLookBack(startTime, endTime); } catch (DatafeedJob.ExtractionProblemException e) { if (endTime == null) { next = e.nextDelayInMsSinceEpoch; @@ -253,7 +257,22 @@ private String getJobId(TransportStartDatafeedAction.DatafeedTask task) { } private JobState getJobState(PersistentTasksCustomMetaData tasks, TransportStartDatafeedAction.DatafeedTask datafeedTask) { - return MlTasks.getJobState(getJobId(datafeedTask), tasks); + return MlTasks.getJobStateModifiedForReassignments(getJobId(datafeedTask), tasks); + } + + private boolean jobHasOpenAutodetectCommunicator(PersistentTasksCustomMetaData tasks, + TransportStartDatafeedAction.DatafeedTask datafeedTask) { + PersistentTasksCustomMetaData.PersistentTask jobTask = MlTasks.getJobTask(getJobId(datafeedTask), tasks); + if (jobTask == null) { + return false; + } + + JobTaskState state = (JobTaskState) jobTask.getState(); + if (state == null || state.isStatusStale(jobTask)) { + return false; + } + + return autodetectProcessManager.hasOpenAutodetectCommunicator(jobTask.getAllocationId()); } private TimeValue computeNextDelay(long next) { @@ -272,7 +291,7 @@ public class Holder { private final TransportStartDatafeedAction.DatafeedTask task; private final long allocationId; private final String datafeedId; - // To ensure that we wait until loopback / realtime search has completed before we stop the datafeed + // To ensure that we wait until lookback / realtime search has completed before we stop the datafeed private final ReentrantLock datafeedJobLock = new ReentrantLock(true); private final DatafeedJob datafeedJob; private final boolean autoCloseJob; @@ -352,7 +371,7 @@ public void setRelocating() { isRelocating = true; } - private Long executeLoopBack(long startTime, Long endTime) throws Exception { + private Long executeLookBack(long startTime, Long endTime) throws Exception { datafeedJobLock.lock(); try { if (isRunning() && !isIsolated()) { @@ -446,7 +465,7 @@ private class TaskRunner implements ClusterStateListener { private void runWhenJobIsOpened(TransportStartDatafeedAction.DatafeedTask datafeedTask) { ClusterState clusterState = clusterService.state(); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - if (getJobState(tasks, datafeedTask) == JobState.OPENED) { + if (getJobState(tasks, datafeedTask) == JobState.OPENED && jobHasOpenAutodetectCommunicator(tasks, datafeedTask)) { runTask(datafeedTask); } else { logger.info("Datafeed [{}] is waiting for job [{}] to be opened", @@ -485,10 +504,10 @@ public void clusterChanged(ClusterChangedEvent event) { continue; } JobState jobState = getJobState(currentTasks, datafeedTask); - if (jobState == JobState.OPENED) { - runTask(datafeedTask); - } else if (jobState == JobState.OPENING) { + if (jobState == JobState.OPENING || jobHasOpenAutodetectCommunicator(currentTasks, datafeedTask) == false) { remainingTasks.add(datafeedTask); + } else if (jobState == JobState.OPENED) { + runTask(datafeedTask); } else { logger.warn("Datafeed [{}] is stopping because job [{}] state is [{}]", datafeedTask.getDatafeedId(), getJobId(datafeedTask), jobState); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java index 8705c1beee867..74f05851c7d8a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java @@ -24,11 +24,8 @@ class AggregationDataExtractor extends AbstractAggregationDataExtractor fields; final String[] indices; - final String[] types; final QueryBuilder query; final AggregatorFactories.Builder aggs; final long start; @@ -27,14 +26,13 @@ class AggregationDataExtractorContext { final boolean includeDocCount; final Map headers; - AggregationDataExtractorContext(String jobId, String timeField, Set fields, List indices, List types, - QueryBuilder query, AggregatorFactories.Builder aggs, long start, long end, boolean includeDocCount, + AggregationDataExtractorContext(String jobId, String timeField, Set fields, List indices, QueryBuilder query, + AggregatorFactories.Builder aggs, long start, long end, boolean includeDocCount, Map headers) { this.jobId = Objects.requireNonNull(jobId); this.timeField = Objects.requireNonNull(timeField); this.fields = Objects.requireNonNull(fields); this.indices = indices.toArray(new String[indices.size()]); - this.types = types.toArray(new String[types.size()]); this.query = Objects.requireNonNull(query); this.aggs = Objects.requireNonNull(aggs); this.start = start; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java index 376e9507dcb7c..12c4a47228f10 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java @@ -34,7 +34,6 @@ public DataExtractor newExtractor(long start, long end) { job.getDataDescription().getTimeField(), job.getAnalysisConfig().analysisFields(), datafeedConfig.getIndices(), - datafeedConfig.getTypes(), datafeedConfig.getParsedQuery(), datafeedConfig.getParsedAggregations(), Intervals.alignToCeil(start, histogramInterval), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java index f0ee22ce85eae..d5290611ab062 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java @@ -56,7 +56,6 @@ public DataExtractor newExtractor(long start, long end) { job.getDataDescription().getTimeField(), job.getAnalysisConfig().analysisFields(), datafeedConfig.getIndices(), - datafeedConfig.getTypes(), datafeedConfig.getParsedQuery(), datafeedConfig.getParsedAggregations(), Intervals.alignToCeil(start, histogramInterval), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java index d9dd1c6e1ee13..618ae6ee9a30d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java @@ -191,7 +191,7 @@ private DataSummary buildDataSummary() throws IOException { } private DataSummary newScrolledDataSummary() throws IOException { - SearchRequestBuilder searchRequestBuilder = rangeSearchRequest().setTypes(context.types); + SearchRequestBuilder searchRequestBuilder = rangeSearchRequest(); SearchResponse response = executeSearchRequest(searchRequestBuilder); LOGGER.debug("[{}] Scrolling Data summary response was obtained", context.jobId); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java index bb32b40f7cde3..1c1ac49ad2749 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java @@ -23,7 +23,6 @@ interface TimeAligner { final String jobId; final String timeField; final String[] indices; - final String[] types; final QueryBuilder query; final int scrollSize; final long start; @@ -34,14 +33,12 @@ interface TimeAligner { final boolean hasAggregations; final Long histogramInterval; - ChunkedDataExtractorContext(String jobId, String timeField, List indices, List types, - QueryBuilder query, int scrollSize, long start, long end, @Nullable TimeValue chunkSpan, - TimeAligner timeAligner, Map headers, boolean hasAggregations, - @Nullable Long histogramInterval) { + ChunkedDataExtractorContext(String jobId, String timeField, List indices, QueryBuilder query, int scrollSize, long start, + long end, @Nullable TimeValue chunkSpan, TimeAligner timeAligner, Map headers, + boolean hasAggregations, @Nullable Long histogramInterval) { this.jobId = Objects.requireNonNull(jobId); this.timeField = Objects.requireNonNull(timeField); this.indices = indices.toArray(new String[indices.size()]); - this.types = types.toArray(new String[types.size()]); this.query = Objects.requireNonNull(query); this.scrollSize = scrollSize; this.start = start; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java index 68161507ed742..76a05e6b4d16a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java @@ -35,7 +35,6 @@ public DataExtractor newExtractor(long start, long end) { job.getId(), job.getDataDescription().getTimeField(), datafeedConfig.getIndices(), - datafeedConfig.getTypes(), datafeedConfig.getParsedQuery(), datafeedConfig.getScrollSize(), timeAligner.alignToCeil(start), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index d890ce8a3fe74..5e6eb96637deb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -106,7 +106,6 @@ private SearchRequestBuilder buildSearchRequest(long start) { .setScroll(SCROLL_TIMEOUT) .addSort(context.extractedFields.timeField(), SortOrder.ASC) .setIndices(context.indices) - .setTypes(context.types) .setSize(context.scrollSize) .setQuery(ExtractorUtils.wrapInTimeRangeQuery( context.query, context.extractedFields.timeField(), start, context.end)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java index 08e693849ec05..ac05c13308674 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java @@ -18,7 +18,6 @@ class ScrollDataExtractorContext { final String jobId; final TimeBasedExtractedFields extractedFields; final String[] indices; - final String[] types; final QueryBuilder query; final List scriptFields; final int scrollSize; @@ -26,13 +25,12 @@ class ScrollDataExtractorContext { final long end; final Map headers; - ScrollDataExtractorContext(String jobId, TimeBasedExtractedFields extractedFields, List indices, List types, - QueryBuilder query, List scriptFields, int scrollSize, - long start, long end, Map headers) { + ScrollDataExtractorContext(String jobId, TimeBasedExtractedFields extractedFields, List indices, QueryBuilder query, + List scriptFields, int scrollSize, long start, long end, + Map headers) { this.jobId = Objects.requireNonNull(jobId); this.extractedFields = Objects.requireNonNull(extractedFields); this.indices = indices.toArray(new String[indices.size()]); - this.types = types.toArray(new String[types.size()]); this.query = Objects.requireNonNull(query); this.scriptFields = Objects.requireNonNull(scriptFields); this.scrollSize = scrollSize; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java index 986387c2ed808..763d718bcc79e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java @@ -43,7 +43,6 @@ public DataExtractor newExtractor(long start, long end) { job.getId(), extractedFields, datafeedConfig.getIndices(), - datafeedConfig.getTypes(), datafeedConfig.getParsedQuery(), datafeedConfig.getScriptFields(), datafeedConfig.getScrollSize(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index 33047c1fca39a..53559aee4701b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -55,7 +55,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.UpdateParams; import org.elasticsearch.xpack.ml.notifications.Auditor; -import org.elasticsearch.xpack.ml.utils.ChainTaskExecutor; +import org.elasticsearch.xpack.ml.utils.VoidChainTaskExecutor; import java.io.IOException; import java.util.ArrayList; @@ -397,16 +397,16 @@ private void postJobUpdate(UpdateJobAction.Request request, Job updatedJob, Acti } private void validate(Job job, JobUpdate jobUpdate, ActionListener handler) { - ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(client.threadPool().executor( + VoidChainTaskExecutor voidChainTaskExecutor = new VoidChainTaskExecutor(client.threadPool().executor( MachineLearning.UTILITY_THREAD_POOL_NAME), true); - validateModelSnapshotIdUpdate(job, jobUpdate.getModelSnapshotId(), chainTaskExecutor); - validateAnalysisLimitsUpdate(job, jobUpdate.getAnalysisLimits(), chainTaskExecutor); - chainTaskExecutor.execute(handler); + validateModelSnapshotIdUpdate(job, jobUpdate.getModelSnapshotId(), voidChainTaskExecutor); + validateAnalysisLimitsUpdate(job, jobUpdate.getAnalysisLimits(), voidChainTaskExecutor); + voidChainTaskExecutor.execute(ActionListener.wrap(aVoids -> handler.onResponse(null), handler::onFailure)); } - private void validateModelSnapshotIdUpdate(Job job, String modelSnapshotId, ChainTaskExecutor chainTaskExecutor) { + private void validateModelSnapshotIdUpdate(Job job, String modelSnapshotId, VoidChainTaskExecutor voidChainTaskExecutor) { if (modelSnapshotId != null) { - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { jobResultsProvider.getModelSnapshot(job.getId(), modelSnapshotId, newModelSnapshot -> { if (newModelSnapshot == null) { String message = Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, modelSnapshotId, @@ -428,12 +428,12 @@ private void validateModelSnapshotIdUpdate(Job job, String modelSnapshotId, Chai } } - private void validateAnalysisLimitsUpdate(Job job, AnalysisLimits newLimits, ChainTaskExecutor chainTaskExecutor) { + private void validateAnalysisLimitsUpdate(Job job, AnalysisLimits newLimits, VoidChainTaskExecutor voidChainTaskExecutor) { if (newLimits == null || newLimits.getModelMemoryLimit() == null) { return; } Long newModelMemoryLimit = newLimits.getModelMemoryLimit(); - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { if (isJobOpen(clusterService.state(), job.getId())) { listener.onFailure(ExceptionsHelper.badRequestException("Cannot update " + Job.ANALYSIS_LIMITS.getPreferredName() + " while the job is open")); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManagerHolder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManagerHolder.java new file mode 100644 index 0000000000000..cf54f2852275d --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManagerHolder.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.job; + +import org.elasticsearch.ElasticsearchException; + +public class JobManagerHolder { + + private final JobManager instance; + + /** + * Create an empty holder which also means that no job manager gets created. + */ + public JobManagerHolder() { + this.instance = null; + } + + /** + * Create a holder that allows lazy creation of a job manager. + * + */ + public JobManagerHolder(JobManager jobManager) { + this.instance = jobManager; + } + + public boolean isEmpty() { + return instance == null; + } + + /** + * Get the instance of the held JobManager. + * + * @return job manager instance + * @throws ElasticsearchException if holder has been created with the empty constructor + */ + public JobManager getJobManager() { + if (instance == null) { + throw new ElasticsearchException("Tried to get job manager although Machine Learning is disabled"); + } + return instance; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index b65feb68da056..c96388213c8c0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -8,26 +8,28 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.index.reindex.BulkByScrollTask; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.results.Result; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Set; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -50,27 +52,34 @@ public JobDataDeleter(Client client, String jobId) { * * @param modelSnapshots the model snapshots to delete */ - public void deleteModelSnapshots(List modelSnapshots, ActionListener listener) { + public void deleteModelSnapshots(List modelSnapshots, ActionListener listener) { if (modelSnapshots.isEmpty()) { - listener.onResponse(new BulkResponse(new BulkItemResponse[0], 0L)); + listener.onResponse(new BulkByScrollResponse(TimeValue.ZERO, + new BulkByScrollTask.Status(Collections.emptyList(), null), + Collections.emptyList(), + Collections.emptyList(), + false)); return; } - String stateIndexName = AnomalyDetectorsIndex.jobStateIndexName(); + String stateIndexName = AnomalyDetectorsIndex.jobStateIndexPattern(); - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + List idsToDelete = new ArrayList<>(); + Set indices = new HashSet<>(); + indices.add(stateIndexName); for (ModelSnapshot modelSnapshot : modelSnapshots) { - for (String stateDocId : modelSnapshot.stateDocumentIds()) { - bulkRequestBuilder.add(client.prepareDelete(stateIndexName, ElasticsearchMappings.DOC_TYPE, stateDocId)); - } - - bulkRequestBuilder.add(client.prepareDelete(AnomalyDetectorsIndex.jobResultsAliasedName(modelSnapshot.getJobId()), - ElasticsearchMappings.DOC_TYPE, ModelSnapshot.documentId(modelSnapshot))); + idsToDelete.addAll(modelSnapshot.stateDocumentIds()); + idsToDelete.add(ModelSnapshot.documentId(modelSnapshot)); + indices.add(AnomalyDetectorsIndex.jobResultsAliasedName(modelSnapshot.getJobId())); } - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(indices.toArray(new String[0])) + .setRefresh(true) + .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .setQuery(new IdsQueryBuilder().addIds(idsToDelete.toArray(new String[0]))); + try { - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), listener); + executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, deleteByQueryRequest, listener); } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java index 32a52410f2d25..e57d85aefa72c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java @@ -305,7 +305,7 @@ public void commitResultWrites(String jobId) { * @param jobId The job Id * */ public void commitStateWrites(String jobId) { - String indexName = AnomalyDetectorsIndex.jobStateIndexName(); + String indexName = AnomalyDetectorsIndex.jobStateIndexPattern(); // Refresh should wait for Lucene to make the data searchable logger.trace("[{}] ES API CALL: refresh index {}", jobId, indexName); RefreshRequest refreshRequest = new RefreshRequest(indexName); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index cc75d48b81c0b..b942c49c14e73 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -157,14 +157,14 @@ public JobResultsProvider(Client client, Settings settings) { */ public void checkForLeftOverDocuments(Job job, ActionListener listener) { - SearchRequestBuilder stateDocSearch = client.prepareSearch(AnomalyDetectorsIndex.jobStateIndexName()) + SearchRequestBuilder stateDocSearch = client.prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) .setQuery(QueryBuilders.idsQuery().addIds(CategorizerState.documentId(job.getId(), 1), CategorizerState.v54DocumentId(job.getId(), 1))) - .setIndicesOptions(IndicesOptions.lenientExpandOpen()); + .setIndicesOptions(IndicesOptions.strictExpand()); - SearchRequestBuilder quantilesDocSearch = client.prepareSearch(AnomalyDetectorsIndex.jobStateIndexName()) + SearchRequestBuilder quantilesDocSearch = client.prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) .setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(job.getId()), Quantiles.v54DocumentId(job.getId()))) - .setIndicesOptions(IndicesOptions.lenientExpandOpen()); + .setIndicesOptions(IndicesOptions.strictExpand()); String resultsIndexName = job.getResultsIndexName(); SearchRequestBuilder resultDocSearch = client.prepareSearch(resultsIndexName) @@ -396,7 +396,7 @@ public void getAutodetectParams(Job job, Consumer consumer, Co AutodetectParams.Builder paramsBuilder = new AutodetectParams.Builder(job.getId()); String resultsIndex = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); - String stateIndex = AnomalyDetectorsIndex.jobStateIndexName(); + String stateIndex = AnomalyDetectorsIndex.jobStateIndexPattern(); MultiSearchRequestBuilder msearch = client.prepareMultiSearch() .add(createLatestDataCountsSearch(resultsIndex, jobId)) @@ -490,20 +490,6 @@ private T parseSearchHit(SearchHit hit, BiFunction } } - private T parseGetHit(GetResponse getResponse, BiFunction objectParser, - Consumer errorHandler) { - BytesReference source = getResponse.getSourceAsBytesRef(); - - try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { - return objectParser.apply(parser, null); - } catch (IOException e) { - errorHandler.accept(new ElasticsearchParseException("failed to parse " + getResponse.getType(), e)); - return null; - } - } - /** * Search for buckets with the parameters in the {@link BucketsQueryBuilder} * Uses the internal client, so runs as the _xpack user @@ -957,19 +943,6 @@ private void searchSingleResult(String jobId, String resultDescription, S ), client::search); } - private void getResult(String jobId, String resultDescription, GetRequest get, BiFunction objectParser, - Consumer> handler, Consumer errorHandler, Supplier notFoundSupplier) { - - executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, get, ActionListener.wrap(getDocResponse -> { - if (getDocResponse.isExists()) { - handler.accept(new Result<>(getDocResponse.getIndex(), parseGetHit(getDocResponse, objectParser, errorHandler))); - } else { - LOGGER.trace("No {} for job with id {}", resultDescription, jobId); - handler.accept(new Result<>(null, notFoundSupplier.get())); - } - }, errorHandler), client::get); - } - private SearchRequestBuilder createLatestModelSizeStatsSearch(String indexName) { return client.prepareSearch(indexName) .setSize(1) @@ -1115,11 +1088,14 @@ public void scheduledEvents(ScheduledEventsQueryBuilder query, ActionListener handler, Consumer errorHandler) { String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); - GetRequest getRequest = new GetRequest(indexName, ElasticsearchMappings.DOC_TYPE, - ForecastRequestStats.documentId(jobId, forecastId)); - - getResult(jobId, ForecastRequestStats.RESULTS_FIELD.getPreferredName(), getRequest, ForecastRequestStats.LENIENT_PARSER, - result -> handler.accept(result.result), errorHandler, () -> null); + SearchRequestBuilder forecastSearch = client.prepareSearch(indexName) + .setQuery(QueryBuilders.idsQuery().addIds(ForecastRequestStats.documentId(jobId, forecastId))); + + searchSingleResult(jobId, + ForecastRequestStats.RESULTS_FIELD.getPreferredName(), + forecastSearch, + ForecastRequestStats.LENIENT_PARSER,result -> handler.accept(result.result), + errorHandler, () -> null); } public void getForecastStats(String jobId, Consumer handler, Consumer errorHandler) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java index 9a2c6a4938b2f..3ed91412042c7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java @@ -9,10 +9,11 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; -import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; @@ -62,7 +63,7 @@ public void cancel() { * @param restoreStream the stream to write the state to */ public void restoreStateToStream(String jobId, ModelSnapshot modelSnapshot, OutputStream restoreStream) throws IOException { - String indexName = AnomalyDetectorsIndex.jobStateIndexName(); + String indexName = AnomalyDetectorsIndex.jobStateIndexPattern(); // First try to restore model state. for (String stateDocId : modelSnapshot.stateDocumentIds()) { @@ -73,13 +74,16 @@ public void restoreStateToStream(String jobId, ModelSnapshot modelSnapshot, Outp LOGGER.trace("ES API CALL: get ID {} from index {}", stateDocId, indexName); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { - GetResponse stateResponse = client.prepareGet(indexName, ElasticsearchMappings.DOC_TYPE, stateDocId).get(); - if (!stateResponse.isExists()) { + SearchResponse stateResponse = client.prepareSearch(indexName) + .setTypes(ElasticsearchMappings.DOC_TYPE) + .setSize(1) + .setQuery(QueryBuilders.idsQuery().addIds(stateDocId)).get(); + if (stateResponse.getHits().getHits().length == 0) { LOGGER.error("Expected {} documents for model state for {} snapshot {} but failed to find {}", modelSnapshot.getSnapshotDocCount(), jobId, modelSnapshot.getSnapshotId(), stateDocId); break; } - writeStateToStream(stateResponse.getSourceAsBytesRef(), restoreStream); + writeStateToStream(stateResponse.getHits().getAt(0).getSourceRef(), restoreStream); } } @@ -97,11 +101,14 @@ public void restoreStateToStream(String jobId, ModelSnapshot modelSnapshot, Outp LOGGER.trace("ES API CALL: get ID {} from index {}", docId, indexName); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { - GetResponse stateResponse = client.prepareGet(indexName, ElasticsearchMappings.DOC_TYPE, docId).get(); - if (!stateResponse.isExists()) { + SearchResponse stateResponse = client.prepareSearch(indexName) + .setTypes(ElasticsearchMappings.DOC_TYPE) + .setSize(1) + .setQuery(QueryBuilders.idsQuery().addIds(docId)).get(); + if (stateResponse.getHits().getHits().length == 0) { break; } - writeStateToStream(stateResponse.getSourceAsBytesRef(), restoreStream); + writeStateToStream(stateResponse.getHits().getAt(0).getSourceRef(), restoreStream); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 251a2a5224ae9..32507df53cef7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -212,6 +212,13 @@ public void killAllProcessesOnThisNode() { */ public void persistJob(JobTask jobTask, Consumer handler) { AutodetectCommunicator communicator = getOpenAutodetectCommunicator(jobTask); + if (communicator == null) { + String message = String.format(Locale.ROOT, "Cannot persist because job [%s] does not have a corresponding autodetect process", + jobTask.getJobId()); + logger.debug(message); + handler.accept(ExceptionsHelper.conflictStatusException(message)); + return; + } communicator.persistJob((aVoid, e) -> handler.accept(e)); } @@ -239,7 +246,8 @@ public void processData(JobTask jobTask, AnalysisRegistry analysisRegistry, Inpu XContentType xContentType, DataLoadParams params, BiConsumer handler) { AutodetectCommunicator communicator = getOpenAutodetectCommunicator(jobTask); if (communicator == null) { - throw ExceptionsHelper.conflictStatusException("Cannot process data because job [" + jobTask.getJobId() + "] is not open"); + throw ExceptionsHelper.conflictStatusException("Cannot process data because job [" + jobTask.getJobId() + + "] does not have a corresponding autodetect process"); } communicator.writeToJob(input, analysisRegistry, xContentType, params, handler); } @@ -257,7 +265,8 @@ public void flushJob(JobTask jobTask, FlushJobParams params, ActionListener handler) { AutodetectCommunicator communicator = getOpenAutodetectCommunicator(jobTask); if (communicator == null) { - String message = "Cannot process update model debug config because job [" + jobTask.getJobId() + "] is not open"; + String message = "Cannot process update model debug config because job [" + jobTask.getJobId() + + "] does not have a corresponding autodetect process"; logger.debug(message); handler.accept(ExceptionsHelper.conflictStatusException(message)); return; @@ -663,6 +674,14 @@ private AutodetectCommunicator getOpenAutodetectCommunicator(JobTask jobTask) { return null; } + public boolean hasOpenAutodetectCommunicator(long jobAllocationId) { + ProcessContext processContext = processByAllocation.get(jobAllocationId); + if (processContext != null && processContext.getState() == ProcessContext.ProcessStateName.RUNNING) { + return processContext.getAutodetectCommunicator() != null; + } + return false; + } + public Optional jobOpenTime(JobTask jobTask) { AutodetectCommunicator communicator = getAutodetectCommunicator(jobTask); if (communicator == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java index ea094dfe6b4f1..249d3761b5842 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java @@ -8,21 +8,27 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.DeleteByQueryAction; +import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; +import org.elasticsearch.xpack.ml.job.persistence.BatchedJobsIterator; import org.elasticsearch.xpack.ml.job.persistence.BatchedStateDocIdsIterator; +import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; +import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; @@ -48,9 +54,9 @@ public UnusedStateRemover(Client client, ClusterService clusterService) { @Override public void remove(ActionListener listener) { try { - BulkRequestBuilder deleteUnusedStateRequestBuilder = findUnusedStateDocs(); - if (deleteUnusedStateRequestBuilder.numberOfActions() > 0) { - executeDeleteUnusedStateDocs(deleteUnusedStateRequestBuilder, listener); + List unusedStateDocIds = findUnusedStateDocIds(); + if (unusedStateDocIds.size() > 0) { + executeDeleteUnusedStateDocs(unusedStateDocIds, listener); } else { listener.onResponse(true); } @@ -59,10 +65,11 @@ public void remove(ActionListener listener) { } } - private BulkRequestBuilder findUnusedStateDocs() { + private List findUnusedStateDocIds() { Set jobIds = getJobIds(); - BulkRequestBuilder deleteUnusedStateRequestBuilder = client.prepareBulk(); - BatchedStateDocIdsIterator stateDocIdsIterator = new BatchedStateDocIdsIterator(client, AnomalyDetectorsIndex.jobStateIndexName()); + List stateDocIdsToDelete = new ArrayList<>(); + BatchedStateDocIdsIterator stateDocIdsIterator = new BatchedStateDocIdsIterator(client, + AnomalyDetectorsIndex.jobStateIndexPattern()); while (stateDocIdsIterator.hasNext()) { Deque stateDocIds = stateDocIdsIterator.next(); for (String stateDocId : stateDocIds) { @@ -72,39 +79,51 @@ private BulkRequestBuilder findUnusedStateDocs() { continue; } if (jobIds.contains(jobId) == false) { - deleteUnusedStateRequestBuilder.add(new DeleteRequest( - AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, stateDocId)); + stateDocIdsToDelete.add(stateDocId); } } } - return deleteUnusedStateRequestBuilder; + return stateDocIdsToDelete; } private Set getJobIds() { - return MlMetadata.getMlMetadata(clusterService.state()).getJobs().keySet(); + Set jobIds = new HashSet<>(); + + // TODO Once at 8.0, we can stop searching for jobs in cluster state + // and remove cluster service as a member all together. + jobIds.addAll(MlMetadata.getMlMetadata(clusterService.state()).getJobs().keySet()); + + BatchedJobsIterator jobsIterator = new BatchedJobsIterator(client, AnomalyDetectorsIndex.configIndexName()); + while (jobsIterator.hasNext()) { + Deque jobs = jobsIterator.next(); + jobs.stream().map(Job.Builder::getId).forEach(jobIds::add); + } + return jobIds; } - private void executeDeleteUnusedStateDocs(BulkRequestBuilder deleteUnusedStateRequestBuilder, ActionListener listener) { + private void executeDeleteUnusedStateDocs(List unusedDocIds, ActionListener listener) { LOGGER.info("Found [{}] unused state documents; attempting to delete", - deleteUnusedStateRequestBuilder.numberOfActions()); - deleteUnusedStateRequestBuilder.execute(new ActionListener() { - @Override - public void onResponse(BulkResponse bulkItemResponses) { - if (bulkItemResponses.hasFailures()) { + unusedDocIds.size()); + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(AnomalyDetectorsIndex.jobStateIndexPattern()) + .types(ElasticsearchMappings.DOC_TYPE) + .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .setQuery(QueryBuilders.idsQuery().addIds(unusedDocIds.toArray(new String[0]))); + client.execute(DeleteByQueryAction.INSTANCE, deleteByQueryRequest, ActionListener.wrap( + response -> { + if (response.getBulkFailures().size() > 0 || response.getSearchFailures().size() > 0) { LOGGER.error("Some unused state documents could not be deleted due to failures: {}", - bulkItemResponses.buildFailureMessage()); + Strings.collectionToCommaDelimitedString(response.getBulkFailures()) + + "," + Strings.collectionToCommaDelimitedString(response.getSearchFailures())); } else { LOGGER.info("Successfully deleted all unused state documents"); } listener.onResponse(true); - } - - @Override - public void onFailure(Exception e) { + }, + e -> { LOGGER.error("Error deleting unused model state documents: ", e); listener.onFailure(e); } - }); + )); } private static class JobIdExtractor { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java index 54a7400375fe9..441317bcbe207 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java @@ -20,6 +20,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; +import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.JobManager; @@ -269,15 +270,16 @@ public void refreshJobMemory(String jobId, ActionListener listener) { private void setJobMemoryToLimit(String jobId, ActionListener listener) { jobManager.getJob(jobId, ActionListener.wrap(job -> { - Long memoryLimitMb = job.getAnalysisLimits().getModelMemoryLimit(); - if (memoryLimitMb != null) { - Long memoryRequirementBytes = ByteSizeUnit.MB.toBytes(memoryLimitMb) + Job.PROCESS_MEMORY_OVERHEAD.getBytes(); - memoryRequirementByJob.put(jobId, memoryRequirementBytes); - listener.onResponse(memoryRequirementBytes); - } else { - memoryRequirementByJob.remove(jobId); - listener.onResponse(null); + Long memoryLimitMb = (job.getAnalysisLimits() != null) ? job.getAnalysisLimits().getModelMemoryLimit() : null; + // Although recent versions of the code enforce a non-null model_memory_limit + // when parsing, the job could have been streamed from an older version node in + // a mixed version cluster + if (memoryLimitMb == null) { + memoryLimitMb = AnalysisLimits.PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB; } + Long memoryRequirementBytes = ByteSizeUnit.MB.toBytes(memoryLimitMb) + Job.PROCESS_MEMORY_OVERHEAD.getBytes(); + memoryRequirementByJob.put(jobId, memoryRequirementBytes); + listener.onResponse(memoryRequirementBytes); }, e -> { if (e instanceof ResourceNotFoundException) { // TODO: does this also happen if the .ml-config index exists but is unavailable? diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestUpgradeMlAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestUpgradeMlAction.java new file mode 100644 index 0000000000000..cad82ce325c27 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestUpgradeMlAction.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.results; + +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.tasks.LoggingTaskListener; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.ml.action.MlUpgradeAction; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.io.IOException; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestUpgradeMlAction extends BaseRestHandler { + + private static final DeprecationLogger deprecationLogger = + new DeprecationLogger(LogManager.getLogger(RestUpgradeMlAction.class)); + + public RestUpgradeMlAction(Settings settings, RestController controller) { + super(settings); + controller.registerWithDeprecatedHandler( + POST, + MachineLearning.BASE_PATH + "_upgrade", + this, + POST, + MachineLearning.PRE_V7_BASE_PATH + "_upgrade", + deprecationLogger); + } + + @Override + public String getName() { + return "xpack_ml_upgrade_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + MlUpgradeAction.Request parsedRequest = new MlUpgradeAction.Request(); + if (restRequest.hasContent()) { + XContentParser parser = restRequest.contentParser(); + parsedRequest = MlUpgradeAction.Request.PARSER.apply(parser, null); + } + final MlUpgradeAction.Request upgradeRequest = parsedRequest; + + if (restRequest.paramAsBoolean("wait_for_completion", false)) { + return channel -> client.execute(MlUpgradeAction.INSTANCE, upgradeRequest, new RestToXContentListener<>(channel)); + } else { + upgradeRequest.setShouldStoreResult(true); + + Task task = client.executeLocally(MlUpgradeAction.INSTANCE, upgradeRequest, LoggingTaskListener.instance()); + // Send task description id instead of waiting for the message + return channel -> { + try (XContentBuilder builder = channel.newBuilder()) { + builder.startObject(); + builder.field("task", client.getLocalNodeId() + ":" + task.getId()); + builder.endObject(); + channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + } + }; + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutor.java deleted file mode 100644 index 9a0ddb5dd4add..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutor.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.utils; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; - -import java.util.LinkedList; -import java.util.Objects; -import java.util.concurrent.ExecutorService; - -/** - * A utility that allows chained (serial) execution of a number of tasks - * in async manner. - */ -public class ChainTaskExecutor { - - public interface ChainTask { - void run(ActionListener listener); - } - - private final ExecutorService executorService; - private final boolean shortCircuit; - private final LinkedList tasks = new LinkedList<>(); - - public ChainTaskExecutor(ExecutorService executorService, boolean shortCircuit) { - this.executorService = Objects.requireNonNull(executorService); - this.shortCircuit = shortCircuit; - } - - public synchronized void add(ChainTask task) { - tasks.add(task); - } - - public synchronized void execute(ActionListener listener) { - if (tasks.isEmpty()) { - listener.onResponse(null); - return; - } - ChainTask task = tasks.pop(); - executorService.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - if (shortCircuit) { - listener.onFailure(e); - } else { - execute(listener); - } - } - - @Override - protected void doRun() { - task.run(ActionListener.wrap(nullValue -> execute(listener), this::onFailure)); - } - }); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/TypedChainTaskExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/TypedChainTaskExecutor.java new file mode 100644 index 0000000000000..5af9c53649853 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/TypedChainTaskExecutor.java @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.utils; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.ExecutorService; +import java.util.function.Predicate; + +/** + * A utility that allows chained (serial) execution of a number of tasks + * in async manner. + */ +public class TypedChainTaskExecutor { + + public interface ChainTask { + void run(ActionListener listener); + } + + private final ExecutorService executorService; + private final LinkedList> tasks = new LinkedList<>(); + private final Predicate failureShortCircuitPredicate; + private final Predicate continuationPredicate; + private final List collectedResponses; + + /** + * Creates a new TypedChainTaskExecutor. + * Each chainedTask is executed in order serially and after each execution the continuationPredicate is tested. + * + * On failures the failureShortCircuitPredicate is tested. + * + * @param executorService The service where to execute the tasks + * @param continuationPredicate The predicate to test on whether to execute the next task or not. + * {@code true} means continue on to the next task. + * Must be able to handle null values. + * @param failureShortCircuitPredicate The predicate on whether to short circuit execution on a give exception. + * {@code true} means that no more tasks should execute and the the listener::onFailure should be + * called. + */ + public TypedChainTaskExecutor(ExecutorService executorService, + Predicate continuationPredicate, + Predicate failureShortCircuitPredicate) { + this.executorService = Objects.requireNonNull(executorService); + this.continuationPredicate = continuationPredicate; + this.failureShortCircuitPredicate = failureShortCircuitPredicate; + this.collectedResponses = new ArrayList<>(); + } + + public synchronized void add(ChainTask task) { + tasks.add(task); + } + + private synchronized void execute(T previousValue, ActionListener> listener) { + collectedResponses.add(previousValue); + if (continuationPredicate.test(previousValue)) { + if (tasks.isEmpty()) { + listener.onResponse(Collections.unmodifiableList(new ArrayList<>(collectedResponses))); + return; + } + ChainTask task = tasks.pop(); + executorService.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + if (failureShortCircuitPredicate.test(e)) { + listener.onFailure(e); + } else { + execute(null, listener); + } + } + + @Override + protected void doRun() { + task.run(ActionListener.wrap(value -> execute(value, listener), this::onFailure)); + } + }); + } else { + listener.onResponse(Collections.unmodifiableList(new ArrayList<>(collectedResponses))); + } + } + + /** + * Execute all the chained tasks serially, notify listener when completed + * + * @param listener The ActionListener to notify when all executions have been completed, + * or when no further tasks should be executed. + * The resulting list COULD contain null values depending on if execution is continued + * on exceptions or not. + */ + public synchronized void execute(ActionListener> listener) { + if (tasks.isEmpty()) { + listener.onResponse(Collections.emptyList()); + return; + } + collectedResponses.clear(); + ChainTask task = tasks.pop(); + executorService.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + if (failureShortCircuitPredicate.test(e)) { + listener.onFailure(e); + } else { + execute(null, listener); + } + } + + @Override + protected void doRun() { + task.run(ActionListener.wrap(value -> execute(value, listener), this::onFailure)); + } + }); + } + + public synchronized List getCollectedResponses() { + return Collections.unmodifiableList(new ArrayList<>(collectedResponses)); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutor.java new file mode 100644 index 0000000000000..8351c0a81aaf6 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutor.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.utils; + +import java.util.concurrent.ExecutorService; +import java.util.function.Predicate; + +/** + * A utility that allows chained (serial) execution of a number of tasks + * in async manner. + */ +public class VoidChainTaskExecutor extends TypedChainTaskExecutor { + + public VoidChainTaskExecutor(ExecutorService executorService, boolean shortCircuit) { + this(executorService, (a) -> true, (e) -> shortCircuit); + } + + VoidChainTaskExecutor(ExecutorService executorService, + Predicate continuationPredicate, + Predicate failureShortCircuitPredicate) { + super(executorService, continuationPredicate, failureShortCircuitPredicate); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java index 4ac5ce45dc227..30471403754ab 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MachineLearningField; -import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; @@ -46,6 +45,8 @@ import org.elasticsearch.xpack.core.ml.stats.ForecastStats; import org.elasticsearch.xpack.core.ml.stats.ForecastStatsTests; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; +import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.JobManagerHolder; import org.junit.Before; import java.util.Arrays; @@ -62,6 +63,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; import static org.mockito.Matchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -72,6 +74,8 @@ public class MachineLearningFeatureSetTests extends ESTestCase { private Settings commonSettings; private ClusterService clusterService; private Client client; + private JobManager jobManager; + private JobManagerHolder jobManagerHolder; private XPackLicenseState licenseState; @Before @@ -82,7 +86,11 @@ public void init() { .build(); clusterService = mock(ClusterService.class); client = mock(Client.class); + jobManager = mock(JobManager.class); + jobManagerHolder = new JobManagerHolder(jobManager); licenseState = mock(XPackLicenseState.class); + ClusterState clusterState = new ClusterState.Builder(ClusterState.EMPTY_STATE).build(); + when(clusterService.state()).thenReturn(clusterState); givenJobs(Collections.emptyList(), Collections.emptyList()); givenDatafeeds(Collections.emptyList()); } @@ -104,7 +112,7 @@ public void testIsRunningOnMlPlatform() { public void testAvailable() throws Exception { MachineLearningFeatureSet featureSet = new MachineLearningFeatureSet(TestEnvironment.newEnvironment(commonSettings), clusterService, - client, licenseState); + client, licenseState, jobManagerHolder); boolean available = randomBoolean(); when(licenseState.isMachineLearningAllowed()).thenReturn(available); assertThat(featureSet.available(), is(available)); @@ -129,7 +137,7 @@ public void testEnabled() throws Exception { } boolean expected = enabled || useDefault; MachineLearningFeatureSet featureSet = new MachineLearningFeatureSet(TestEnvironment.newEnvironment(settings.build()), - clusterService, client, licenseState); + clusterService, client, licenseState, jobManagerHolder); assertThat(featureSet.enabled(), is(expected)); PlainActionFuture future = new PlainActionFuture<>(); featureSet.usage(future); @@ -163,7 +171,7 @@ public void testUsage() throws Exception { )); MachineLearningFeatureSet featureSet = new MachineLearningFeatureSet(TestEnvironment.newEnvironment(settings.build()), - clusterService, client, licenseState); + clusterService, client, licenseState, jobManagerHolder); PlainActionFuture future = new PlainActionFuture<>(); featureSet.usage(future); XPackFeatureSet.Usage mlUsage = future.get(); @@ -232,6 +240,28 @@ public void testUsage() throws Exception { } } + public void testUsageDisabledML() throws Exception { + when(licenseState.isMachineLearningAllowed()).thenReturn(true); + Settings.Builder settings = Settings.builder().put(commonSettings); + settings.put("xpack.ml.enabled", false); + + JobManagerHolder emptyJobManagerHolder = new JobManagerHolder(); + MachineLearningFeatureSet featureSet = new MachineLearningFeatureSet(TestEnvironment.newEnvironment(settings.build()), + clusterService, client, licenseState, emptyJobManagerHolder); + PlainActionFuture future = new PlainActionFuture<>(); + featureSet.usage(future); + XPackFeatureSet.Usage mlUsage = future.get(); + BytesStreamOutput out = new BytesStreamOutput(); + mlUsage.writeTo(out); + XPackFeatureSet.Usage serializedUsage = new MachineLearningFeatureSetUsage(out.bytes().streamInput()); + + for (XPackFeatureSet.Usage usage : Arrays.asList(mlUsage, serializedUsage)) { + assertThat(usage, is(notNullValue())); + assertThat(usage.name(), is(XPackField.MACHINE_LEARNING)); + assertThat(usage.enabled(), is(false)); + } + } + public void testNodeCount() throws Exception { when(licenseState.isMachineLearningAllowed()).thenReturn(true); int nodeCount = randomIntBetween(1, 3); @@ -239,7 +269,7 @@ public void testNodeCount() throws Exception { Settings.Builder settings = Settings.builder().put(commonSettings); settings.put("xpack.ml.enabled", true); MachineLearningFeatureSet featureSet = new MachineLearningFeatureSet(TestEnvironment.newEnvironment(settings.build()), - clusterService, client, licenseState); + clusterService, client, licenseState, jobManagerHolder); PlainActionFuture future = new PlainActionFuture<>(); featureSet.usage(future); @@ -282,7 +312,7 @@ public void testUsageGivenMlMetadataNotInstalled() throws Exception { when(clusterService.state()).thenReturn(ClusterState.EMPTY_STATE); MachineLearningFeatureSet featureSet = new MachineLearningFeatureSet(TestEnvironment.newEnvironment(settings.build()), - clusterService, client, licenseState); + clusterService, client, licenseState, jobManagerHolder); PlainActionFuture future = new PlainActionFuture<>(); featureSet.usage(future); XPackFeatureSet.Usage usage = future.get(); @@ -319,15 +349,14 @@ public void testUsageGivenMlMetadataNotInstalled() throws Exception { } private void givenJobs(List jobs, List jobsStats) { - MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); - for (Job job : jobs) { - mlMetadataBuilder.putJob(job, false); - } - ClusterState clusterState = new ClusterState.Builder(ClusterState.EMPTY_STATE) - .metaData(new MetaData.Builder() - .putCustom(MlMetadata.TYPE, mlMetadataBuilder.build())) - .build(); - when(clusterService.state()).thenReturn(clusterState); + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener> jobListener = + (ActionListener>) invocationOnMock.getArguments()[2]; + jobListener.onResponse( + new QueryPage<>(jobs, jobs.size(), Job.RESULTS_FIELD)); + return Void.TYPE; + }).when(jobManager).expandJobs(eq(MetaData.ALL), eq(true), any(ActionListener.class)); doAnswer(invocationOnMock -> { ActionListener listener = diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index c7ca2ff805eba..2377bc5921eb2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -282,7 +282,6 @@ public void testUpdateDatafeed() { DatafeedConfig updatedDatafeed = updatedMetadata.getDatafeed(datafeedConfig1.getId()); assertThat(updatedDatafeed.getJobId(), equalTo(datafeedConfig1.getJobId())); assertThat(updatedDatafeed.getIndices(), equalTo(datafeedConfig1.getIndices())); - assertThat(updatedDatafeed.getTypes(), equalTo(datafeedConfig1.getTypes())); assertThat(updatedDatafeed.getScrollSize(), equalTo(5000)); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index cfb16254a9dde..04dfa5f27502d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -575,10 +575,16 @@ public void testJobTaskMatcherMatch() { } public static void addJobTask(String jobId, String nodeId, JobState jobState, PersistentTasksCustomMetaData.Builder builder) { + addJobTask(jobId, nodeId, jobState, builder, false); + } + + public static void addJobTask(String jobId, String nodeId, JobState jobState, PersistentTasksCustomMetaData.Builder builder, + boolean isStale) { builder.addTask(MlTasks.jobTaskId(jobId), MlTasks.JOB_TASK_NAME, new OpenJobAction.JobParams(jobId), - new Assignment(nodeId, "test assignment")); + new Assignment(nodeId, "test assignment")); if (jobState != null) { - builder.updateTaskState(MlTasks.jobTaskId(jobId), new JobTaskState(jobState, builder.getLastAllocationId())); + builder.updateTaskState(MlTasks.jobTaskId(jobId), + new JobTaskState(jobState, builder.getLastAllocationId() - (isStale ? 1 : 0))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java index a1887f3c41814..2540ab8cde8ef 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.results.Bucket; -import org.elasticsearch.xpack.core.security.user.SystemUser; +import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetector; import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; @@ -45,6 +45,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; +import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; @@ -226,6 +227,8 @@ public void testRealtimeRun() throws Exception { flushJobResponse = new FlushJobAction.Response(true, new Date(2000)); Bucket bucket = mock(Bucket.class); when(bucket.getTimestamp()).thenReturn(new Date(2000)); + when(bucket.getEpoch()).thenReturn(2L); + when(bucket.getBucketSpan()).thenReturn(4L); when(flushJobFuture.actionGet()).thenReturn(flushJobResponse); when(client.execute(same(FlushJobAction.INSTANCE), flushJobRequests.capture())).thenReturn(flushJobFuture); when(delayedDataDetector.detectMissingData(2000)) @@ -268,12 +271,12 @@ public void testRealtimeRun() throws Exception { Annotation expectedAnnotation = new Annotation(msg, new Date(currentTime), - SystemUser.NAME, - bucket.getTimestamp(), + XPackUser.NAME, bucket.getTimestamp(), + new Date((bucket.getEpoch() + bucket.getBucketSpan()) * 1000), jobId, - null, - null, + new Date(currentTime), + XPackUser.NAME, "annotation"); IndexRequest request = new IndexRequest(AnnotationIndex.WRITE_ALIAS_NAME); @@ -286,8 +289,13 @@ public void testRealtimeRun() throws Exception { assertThat(request.source(), equalTo(indexRequestArgumentCaptor.getValue().source())); // Execute a fourth time, this time we return a new delayedDataDetector response to verify annotation gets updated + Bucket bucket2 = mock(Bucket.class); + when(bucket2.getTimestamp()).thenReturn(new Date(6000)); + when(bucket2.getEpoch()).thenReturn(6L); + when(bucket2.getBucketSpan()).thenReturn(4L); when(delayedDataDetector.detectMissingData(2000)) - .thenReturn(Collections.singletonList(BucketWithMissingData.fromMissingAndBucket(15, bucket))); + .thenReturn(Arrays.asList(BucketWithMissingData.fromMissingAndBucket(10, bucket), + BucketWithMissingData.fromMissingAndBucket(5, bucket2))); currentTime = currentTime + DatafeedJob.MISSING_DATA_CHECK_INTERVAL_MS + 1; inputStream = new ByteArrayInputStream(contentBytes); when(dataExtractor.hasNext()).thenReturn(true).thenReturn(false); @@ -297,14 +305,15 @@ public void testRealtimeRun() throws Exception { msg = Messages.getMessage(Messages.JOB_AUDIT_DATAFEED_MISSING_DATA, 15, - XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(2000)); + XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(6000)); // What we expect the updated annotation to be indexed as IndexRequest indexRequest = new IndexRequest(AnnotationIndex.WRITE_ALIAS_NAME); indexRequest.id(annotationDocId); Annotation updatedAnnotation = new Annotation(expectedAnnotation); updatedAnnotation.setAnnotation(msg); updatedAnnotation.setModifiedTime(new Date(currentTime)); - updatedAnnotation.setModifiedUsername(SystemUser.NAME); + updatedAnnotation.setModifiedUsername(XPackUser.NAME); + updatedAnnotation.setEndTimestamp(new Date((bucket2.getEpoch() + bucket2.getBucketSpan()) * 1000)); try (XContentBuilder xContentBuilder = updatedAnnotation.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) { indexRequest.source(xContentBuilder); } @@ -317,6 +326,17 @@ public void testRealtimeRun() throws Exception { assertThat(indexRequest.source().utf8ToString(), equalTo(updateRequestArgumentCaptor.getValue().source().utf8ToString())); assertThat(updateRequestArgumentCaptor.getValue().opType(), equalTo(DocWriteRequest.OpType.INDEX)); + + // Execute a fifth time, no changes should occur as annotation is the same + currentTime = currentTime + DatafeedJob.MISSING_DATA_CHECK_INTERVAL_MS + 1; + inputStream = new ByteArrayInputStream(contentBytes); + when(dataExtractor.hasNext()).thenReturn(true).thenReturn(false); + when(dataExtractor.next()).thenReturn(Optional.of(inputStream)); + when(dataExtractorFactory.newExtractor(anyLong(), anyLong())).thenReturn(dataExtractor); + datafeedJob.runRealtime(); + + // We should not get 3 index requests for the annotations + verify(client, atMost(2)).index(any()); } public void testEmptyDataCountGivenlookback() throws Exception { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java index 4007671bbbc92..3c34c2e1d6790 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java @@ -229,7 +229,6 @@ private static DatafeedConfig.Builder createValidDatafeedConfigWithAggs(double i private static DatafeedConfig.Builder createValidDatafeedConfig() { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("my-datafeed", "my-job"); builder.setIndices(Collections.singletonList("myIndex")); - builder.setTypes(Collections.singletonList("myType")); return builder; } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java index 54aa3ade8e1b9..858f7e0f7d1c9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.ml.action.TransportStartDatafeedAction.DatafeedTask; import org.elasticsearch.xpack.ml.action.TransportStartDatafeedActionTests; import org.elasticsearch.xpack.ml.job.persistence.MockClientBuilder; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -48,6 +49,7 @@ import java.util.Date; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import static org.elasticsearch.xpack.ml.action.TransportOpenJobActionTests.addJobTask; @@ -74,13 +76,14 @@ public class DatafeedManagerTests extends ESTestCase { private long currentTime = 120000; private Auditor auditor; private ArgumentCaptor capturedClusterStateListener = ArgumentCaptor.forClass(ClusterStateListener.class); + private AtomicBoolean hasOpenAutodetectCommunicator; @Before @SuppressWarnings("unchecked") public void setUpTests() { Job.Builder job = createDatafeedJob().setCreateTime(new Date()); - PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); + PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask(job.getId(), "node_id", JobState.OPENED, tasksBuilder); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DiscoveryNodes nodes = DiscoveryNodes.builder() @@ -128,7 +131,12 @@ public void setUpTests() { return null; }).when(datafeedJobBuilder).build(any(), any()); - datafeedManager = new DatafeedManager(threadPool, client, clusterService, datafeedJobBuilder, () -> currentTime, auditor); + hasOpenAutodetectCommunicator = new AtomicBoolean(true); + AutodetectProcessManager autodetectProcessManager = mock(AutodetectProcessManager.class); + doAnswer(invocation -> hasOpenAutodetectCommunicator.get()).when(autodetectProcessManager).hasOpenAutodetectCommunicator(anyLong()); + + datafeedManager = new DatafeedManager(threadPool, client, clusterService, datafeedJobBuilder, () -> currentTime, auditor, + autodetectProcessManager); verify(clusterService).addListener(capturedClusterStateListener.capture()); } @@ -222,7 +230,7 @@ public void testRealTime_GivenNonStoppingAnalysisProblem() throws Exception { assertThat(datafeedManager.isRunning(task.getAllocationId()), is(true)); } - public void testStart_GivenNewlyCreatedJobLoopBackAndRealtime() throws Exception { + public void testStart_GivenNewlyCreatedJobLookBackAndRealtime() throws Exception { when(datafeedJob.runLookBack(anyLong(), anyLong())).thenReturn(1L); when(datafeedJob.runRealtime()).thenReturn(1L); @@ -259,7 +267,7 @@ public void testDatafeedTaskWaitsUntilJobIsOpened() { // Verify datafeed has not started running yet as job is still opening verify(threadPool, never()).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); - tasksBuilder = PersistentTasksCustomMetaData.builder(); + tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); addJobTask("another_job", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder anotherJobCs = ClusterState.builder(clusterService.state()) @@ -270,7 +278,7 @@ public void testDatafeedTaskWaitsUntilJobIsOpened() { // Still no run verify(threadPool, never()).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); - tasksBuilder = PersistentTasksCustomMetaData.builder(); + tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder jobOpenedCs = ClusterState.builder(clusterService.state()) .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); @@ -278,12 +286,86 @@ public void testDatafeedTaskWaitsUntilJobIsOpened() { capturedClusterStateListener.getValue().clusterChanged( new ClusterChangedEvent("_source", jobOpenedCs.build(), anotherJobCs.build())); + // Now it should run as the job state changed to OPENED + verify(threadPool, times(1)).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); + } + + public void testDatafeedTaskWaitsUntilAutodetectCommunicatorIsOpen() { + + hasOpenAutodetectCommunicator.set(false); + + PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); + addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); + ClusterState.Builder cs = ClusterState.builder(clusterService.state()) + .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + when(clusterService.state()).thenReturn(cs.build()); + + Consumer handler = mockConsumer(); + DatafeedTask task = createDatafeedTask("datafeed_id", 0L, 60000L); + datafeedManager.run(task, handler); + + // Verify datafeed has not started running yet as job doesn't have an open autodetect communicator + verify(threadPool, never()).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); + + tasksBuilder = PersistentTasksCustomMetaData.builder(); + addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); + addJobTask("another_job", "node_id", JobState.OPENED, tasksBuilder); + ClusterState.Builder anotherJobCs = ClusterState.builder(clusterService.state()) + .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + + capturedClusterStateListener.getValue().clusterChanged(new ClusterChangedEvent("_source", anotherJobCs.build(), cs.build())); + + // Still no run + verify(threadPool, never()).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); + + hasOpenAutodetectCommunicator.set(true); + + capturedClusterStateListener.getValue().clusterChanged( + new ClusterChangedEvent("_source", cs.build(), anotherJobCs.build())); + + // Now it should run as the autodetect communicator is open + verify(threadPool, times(1)).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); + } + + public void testDatafeedTaskWaitsUntilJobIsNotStale() { + PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); + addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder, true); + ClusterState.Builder cs = ClusterState.builder(clusterService.state()) + .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + when(clusterService.state()).thenReturn(cs.build()); + + Consumer handler = mockConsumer(); + DatafeedTask task = createDatafeedTask("datafeed_id", 0L, 60000L); + datafeedManager.run(task, handler); + + // Verify datafeed has not started running yet as job is stale (i.e. even though opened it is part way through relocating) + verify(threadPool, never()).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); + + tasksBuilder = PersistentTasksCustomMetaData.builder(); + addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder, true); + addJobTask("another_job", "node_id", JobState.OPENED, tasksBuilder); + ClusterState.Builder anotherJobCs = ClusterState.builder(clusterService.state()) + .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + + capturedClusterStateListener.getValue().clusterChanged(new ClusterChangedEvent("_source", anotherJobCs.build(), cs.build())); + + // Still no run + verify(threadPool, never()).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); + + tasksBuilder = PersistentTasksCustomMetaData.builder(); + addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); + ClusterState.Builder jobOpenedCs = ClusterState.builder(clusterService.state()) + .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); + + capturedClusterStateListener.getValue().clusterChanged( + new ClusterChangedEvent("_source", jobOpenedCs.build(), anotherJobCs.build())); + // Now it should run as the job state chanded to OPENED verify(threadPool, times(1)).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); } public void testDatafeedTaskStopsBecauseJobFailedWhileOpening() { - PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); + PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); @@ -296,7 +378,7 @@ public void testDatafeedTaskStopsBecauseJobFailedWhileOpening() { // Verify datafeed has not started running yet as job is still opening verify(threadPool, never()).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); - tasksBuilder = PersistentTasksCustomMetaData.builder(); + tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.FAILED, tasksBuilder); ClusterState.Builder updatedCs = ClusterState.builder(clusterService.state()) .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); @@ -309,7 +391,7 @@ public void testDatafeedTaskStopsBecauseJobFailedWhileOpening() { } public void testDatafeedGetsStoppedWhileWaitingForJobToOpen() { - PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); + PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); @@ -326,7 +408,7 @@ public void testDatafeedGetsStoppedWhileWaitingForJobToOpen() { datafeedManager.stopDatafeed(task, "test", StopDatafeedAction.DEFAULT_TIMEOUT); // Update job state to opened - tasksBuilder = PersistentTasksCustomMetaData.builder(); + tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENED, tasksBuilder); ClusterState.Builder updatedCs = ClusterState.builder(clusterService.state()) .metaData(new MetaData.Builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())); @@ -340,7 +422,6 @@ public void testDatafeedGetsStoppedWhileWaitingForJobToOpen() { public static DatafeedConfig.Builder createDatafeedConfig(String datafeedId, String jobId) { DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, jobId); datafeedConfig.setIndices(Collections.singletonList("myIndex")); - datafeedConfig.setTypes(Collections.singletonList("myType")); return datafeedConfig; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactoryTests.java index 3b1ca4c3071e1..d776b720ed288 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DelayedDataDetectorFactoryTests.java @@ -89,7 +89,6 @@ private Job createJob(TimeValue bucketSpan) { private DatafeedConfig createDatafeed(boolean shouldDetectDelayedData, TimeValue delayedDatacheckWindow) { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("id", "jobId"); builder.setIndices(Collections.singletonList("index1")); - builder.setTypes(Collections.singletonList("doc")); if (shouldDetectDelayedData) { builder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(delayedDatacheckWindow)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java index 6561cfd56e23b..bb7d1d1729cd4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java @@ -51,7 +51,6 @@ public class AggregationDataExtractorTests extends ESTestCase { private String jobId; private String timeField; private Set fields; - private List types; private List indices; private QueryBuilder query; private AggregatorFactories.Builder aggs; @@ -84,7 +83,6 @@ public void setUpTests() { fields = new HashSet<>(); fields.addAll(Arrays.asList("time", "airline", "responsetime")); indices = Arrays.asList("index-1", "index-2"); - types = Arrays.asList("type-1", "type-2"); query = QueryBuilders.matchAllQuery(); aggs = new AggregatorFactories.Builder() .addAggregator(AggregationBuilders.histogram("time").field("time").interval(1000).subAggregation( @@ -267,7 +265,7 @@ public void testExtractionGivenInitSearchResponseEncounteredUnavailableShards() } private AggregationDataExtractorContext createContext(long start, long end) { - return new AggregationDataExtractorContext(jobId, timeField, fields, indices, types, query, aggs, start, end, true, + return new AggregationDataExtractorContext(jobId, timeField, fields, indices, query, aggs, start, end, true, Collections.emptyMap()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java index 20a5783dff7d2..c8e53dfcf7d3c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java @@ -49,7 +49,6 @@ public class ChunkedDataExtractorTests extends ESTestCase { private List capturedSearchRequests; private String jobId; private String timeField; - private List types; private List indices; private QueryBuilder query; private int scrollSize; @@ -86,7 +85,6 @@ public void setUpTests() { jobId = "test-job"; timeField = "time"; indices = Arrays.asList("index-1", "index-2"); - types = Arrays.asList("type-1", "type-2"); query = QueryBuilders.matchAllQuery(); scrollSize = 1000; chunkSpan = null; @@ -561,7 +559,7 @@ private ChunkedDataExtractorContext createContext(long start, long end) { } private ChunkedDataExtractorContext createContext(long start, long end, boolean hasAggregations, Long histogramInterval) { - return new ChunkedDataExtractorContext(jobId, timeField, indices, types, query, scrollSize, start, end, chunkSpan, + return new ChunkedDataExtractorContext(jobId, timeField, indices, query, scrollSize, start, end, chunkSpan, ChunkedDataExtractorFactory.newIdentityTimeAligner(), Collections.emptyMap(), hasAggregations, histogramInterval); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java index 5e388afad282a..1fd6db3de566a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java @@ -120,7 +120,6 @@ public void testBuildGivenMixtureOfTypes() { DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("feed", jobBuilder.getId()); datafeedBuilder.setIndices(Collections.singletonList("foo")); - datafeedBuilder.setTypes(Collections.singletonList("doc")); datafeedBuilder.setScriptFields(Collections.singletonList(new SearchSourceBuilder.ScriptField("airport", null, false))); Map timeCaps = new HashMap<>(); @@ -200,7 +199,6 @@ public void testBuildGivenTimeFieldIsNotAggregatable() { DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("feed", jobBuilder.getId()); datafeedBuilder.setIndices(Collections.singletonList("foo")); - datafeedBuilder.setTypes(Collections.singletonList("doc")); Map timeCaps = new HashMap<>(); timeCaps.put("date", createFieldCaps(false)); @@ -220,7 +218,6 @@ public void testBuildGivenTimeFieldIsNotAggregatableInSomeIndices() { DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("feed", jobBuilder.getId()); datafeedBuilder.setIndices(Collections.singletonList("foo")); - datafeedBuilder.setTypes(Collections.singletonList("doc")); Map timeCaps = new HashMap<>(); timeCaps.put("date", createFieldCaps(true)); @@ -241,7 +238,6 @@ public void testBuildGivenFieldWithoutMappings() { DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("feed", jobBuilder.getId()); datafeedBuilder.setIndices(Collections.singletonList("foo")); - datafeedBuilder.setTypes(Collections.singletonList("doc")); Map timeCaps = new HashMap<>(); timeCaps.put("date", createFieldCaps(true)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java index fbe721b6592bb..170c95d1cab6b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java @@ -65,7 +65,6 @@ public class ScrollDataExtractorTests extends ESTestCase { private ArgumentCaptor capturedClearScrollRequests; private String jobId; private TimeBasedExtractedFields extractedFields; - private List types; private List indices; private QueryBuilder query; private List scriptFields; @@ -134,7 +133,6 @@ public void setUpTests() { extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField, ExtractedField.newField("field_1", ExtractedField.ExtractionMethod.DOC_VALUE))); indices = Arrays.asList("index-1", "index-2"); - types = Arrays.asList("type-1", "type-2"); query = QueryBuilders.matchAllQuery(); scriptFields = Collections.emptyList(); scrollSize = 1000; @@ -422,7 +420,7 @@ public void testDomainSplitScriptField() throws IOException { List sFields = Arrays.asList(withoutSplit, withSplit); ScrollDataExtractorContext context = new ScrollDataExtractorContext(jobId, extractedFields, indices, - types, query, sFields, scrollSize, 1000, 2000, Collections.emptyMap()); + query, sFields, scrollSize, 1000, 2000, Collections.emptyMap()); TestDataExtractor extractor = new TestDataExtractor(context); @@ -467,7 +465,7 @@ public void testDomainSplitScriptField() throws IOException { } private ScrollDataExtractorContext createContext(long start, long end) { - return new ScrollDataExtractorContext(jobId, extractedFields, indices, types, query, scriptFields, scrollSize, start, end, + return new ScrollDataExtractorContext(jobId, extractedFields, indices, query, scriptFields, scrollSize, start, end, Collections.emptyMap()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java index 8518096d64478..ea581f663462f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimeoutCheckerTests.java @@ -72,6 +72,9 @@ public void testWatchdog() { } finally { TimeoutChecker.watchdog.unregister(); } + } finally { + // ensure the interrupted flag is cleared to stop it making subsequent tests fail + Thread.interrupted(); } } @@ -89,6 +92,9 @@ public void testGrokCaptures() throws Exception { assertEquals("Aborting grok captures test during [should timeout] as it has taken longer than the timeout of [" + timeout + "]", e.getMessage()); }); + } finally { + // ensure the interrupted flag is cleared to stop it making subsequent tests fail + Thread.interrupted(); } } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index 3843181a0bc3c..02cc738477cfb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -379,7 +379,7 @@ public void testGetAutodetectParams() throws Exception { Quantiles quantiles = new Quantiles(jobId, new Date(), "quantile-state"); indexQuantiles(quantiles); - client().admin().indices().prepareRefresh(MlMetaIndex.INDEX_NAME, AnomalyDetectorsIndex.jobStateIndexName(), + client().admin().indices().prepareRefresh(MlMetaIndex.INDEX_NAME, AnomalyDetectorsIndex.jobStateIndexPattern(), AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).get(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java index 87c0e4ac824ce..33b2484766669 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java @@ -6,8 +6,8 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.Version; -import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -52,6 +53,7 @@ import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.eq; @@ -308,12 +310,17 @@ public void testMigrateConfigsWithoutTasks_GivenMigrationIsDisabled() throws Int } public void assertSnapshot(MlMetadata expectedMlMetadata) throws IOException { - GetResponse getResponse = client() - .prepareGet(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, "ml-config").get(); + client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.jobStateIndexPattern()).execute(); + SearchResponse searchResponse = client() + .prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) + .setTypes(ElasticsearchMappings.DOC_TYPE) + .setSize(1) + .setQuery(QueryBuilders.idsQuery().addIds("ml-config")) + .get(); - assertTrue(getResponse.isExists()); + assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); - try (InputStream stream = getResponse.getSourceAsBytesRef().streamInput(); + try (InputStream stream = searchResponse.getHits().getAt(0).getSourceRef().streamInput(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { MlMetadata recoveredMeta = MlMetadata.LENIENT_PARSER.apply(parser, null).build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java index 4ab369120e019..3304963ae3570 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java @@ -93,8 +93,9 @@ public void testJobRelocation() throws Exception { assertEquals(newJobNode, finalJobNode); // The job running on the original node should have been killed, and hence should not have persisted quantiles - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexName()) + SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) .setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(job.getId()))) + .setTrackTotalHits(true) .setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().actionGet(); assertEquals(0L, searchResponse.getHits().getTotalHits().value); @@ -103,8 +104,9 @@ public void testJobRelocation() throws Exception { assertTrue(closeJobResponse.isClosed()); // The relocated job was closed rather than killed, and hence should have persisted quantiles - searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexName()) + searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) .setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(job.getId()))) + .setTrackTotalHits(true) .setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().actionGet(); assertEquals(1L, searchResponse.getHits().getTotalHits().value); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index c2bda603724d6..8532cfc4feac4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; @@ -834,13 +833,6 @@ private JobResultsProvider createProvider(Client client) { return new JobResultsProvider(client, Settings.EMPTY); } - private static GetResponse createGetResponse(boolean exists, Map source) throws IOException { - GetResponse getResponse = mock(GetResponse.class); - when(getResponse.isExists()).thenReturn(exists); - when(getResponse.getSourceAsBytesRef()).thenReturn(BytesReference.bytes(XContentFactory.jsonBuilder().map(source))); - return getResponse; - } - private static SearchResponse createSearchResponse(List> source) throws IOException { SearchResponse response = mock(SearchResponse.class); List list = new ArrayList<>(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java index 627465f1d4f21..4a4284e2d1456 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java @@ -271,6 +271,11 @@ public MockClientBuilder prepareSearch(String index, String type, int from, int return this; } + public MockClientBuilder prepareSearches(String index, SearchRequestBuilder first, SearchRequestBuilder... searches) { + when(client.prepareSearch(eq(index))).thenReturn(first, searches); + return this; + } + /** * Creates a {@link SearchResponse} with a {@link SearchHit} for each element of {@code docs} * @param indexName Index being searched diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java index beae3959308e9..1629a8bcdbad5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java @@ -5,14 +5,20 @@ */ package org.elasticsearch.xpack.ml.job.persistence; -import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.mock.orig.Mockito; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelState; @@ -21,9 +27,14 @@ import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -36,25 +47,25 @@ public void testRestoreStateToStream() throws Exception { String snapshotId = "123"; Map categorizerState = new HashMap<>(); categorizerState.put("catName", "catVal"); - GetResponse categorizerStateGetResponse1 = createGetResponse(true, categorizerState); - GetResponse categorizerStateGetResponse2 = createGetResponse(false, null); - Map modelState = new HashMap<>(); - modelState.put("modName", "modVal1"); - GetResponse modelStateGetResponse1 = createGetResponse(true, modelState); - modelState.put("modName", "modVal2"); - GetResponse modelStateGetResponse2 = createGetResponse(true, modelState); - - MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME).addClusterStatusYellowResponse() - .prepareGet(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, - CategorizerState.documentId(JOB_ID, 1), categorizerStateGetResponse1) - .prepareGet(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, - CategorizerState.documentId(JOB_ID, 2), categorizerStateGetResponse2) - .prepareGet(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, - ModelState.documentId(JOB_ID, snapshotId, 1), modelStateGetResponse1) - .prepareGet(AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, - ModelState.documentId(JOB_ID, snapshotId, 2), modelStateGetResponse2); + Map modelState1 = new HashMap<>(); + modelState1.put("modName1", "modVal1"); + Map modelState2 = new HashMap<>(); + modelState2.put("modName2", "modVal2"); + SearchRequestBuilder builder1 = prepareSearchBuilder(createSearchResponse(Collections.singletonList(modelState1)), + QueryBuilders.idsQuery().addIds(ModelState.documentId(JOB_ID, snapshotId, 1))); + SearchRequestBuilder builder2 = prepareSearchBuilder(createSearchResponse(Collections.singletonList(modelState2)), + QueryBuilders.idsQuery().addIds(ModelState.documentId(JOB_ID, snapshotId, 2))); + SearchRequestBuilder builder3 = prepareSearchBuilder(createSearchResponse(Collections.singletonList(categorizerState)), + QueryBuilders.idsQuery().addIds(CategorizerState.documentId(JOB_ID, 1))); + SearchRequestBuilder builder4 = prepareSearchBuilder(createSearchResponse(Collections.emptyList()), + QueryBuilders.idsQuery().addIds(CategorizerState.documentId(JOB_ID, 2))); + + MockClientBuilder clientBuilder = new MockClientBuilder(CLUSTER_NAME) + .addClusterStatusYellowResponse() + .prepareSearches(AnomalyDetectorsIndex.jobStateIndexPattern(), builder1, builder2, builder3, builder4); + ModelSnapshot modelSnapshot = new ModelSnapshot.Builder(JOB_ID).setSnapshotId(snapshotId).setSnapshotDocCount(2).build(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); @@ -64,8 +75,8 @@ public void testRestoreStateToStream() throws Exception { String[] restoreData = stream.toString(StandardCharsets.UTF_8.name()).split("\0"); assertEquals(3, restoreData.length); - assertEquals("{\"modName\":\"modVal1\"}", restoreData[0]); - assertEquals("{\"modName\":\"modVal2\"}", restoreData[1]); + assertEquals("{\"modName1\":\"modVal1\"}", restoreData[0]); + assertEquals("{\"modName2\":\"modVal2\"}", restoreData[1]); assertEquals("{\"catName\":\"catVal\"}", restoreData[2]); } @@ -80,10 +91,32 @@ public void testCancelBeforeRestoreWasCalled() throws IOException { Mockito.verifyNoMoreInteractions(outputStream); } - private static GetResponse createGetResponse(boolean exists, Map source) throws IOException { - GetResponse getResponse = mock(GetResponse.class); - when(getResponse.isExists()).thenReturn(exists); - when(getResponse.getSourceAsBytesRef()).thenReturn(BytesReference.bytes(XContentFactory.jsonBuilder().map(source))); - return getResponse; + private static SearchResponse createSearchResponse(List> source) throws IOException { + SearchResponse searchResponse = mock(SearchResponse.class); + SearchHit[] hits = new SearchHit[source.size()]; + int i = 0; + for (Map s : source) { + SearchHit hit = new SearchHit(1).sourceRef(BytesReference.bytes(XContentFactory.jsonBuilder().map(s))); + hits[i++] = hit; + } + SearchHits searchHits = new SearchHits(hits, null, (float)0.0); + when(searchResponse.getHits()).thenReturn(searchHits); + return searchResponse; + } + + private static SearchRequestBuilder prepareSearchBuilder(SearchResponse response, QueryBuilder queryBuilder) { + SearchRequestBuilder builder = mock(SearchRequestBuilder.class); + when(builder.setTypes(any())).thenReturn(builder); + when(builder.addSort(any(SortBuilder.class))).thenReturn(builder); + when(builder.setQuery(queryBuilder)).thenReturn(builder); + when(builder.setPostFilter(any())).thenReturn(builder); + when(builder.setFrom(anyInt())).thenReturn(builder); + when(builder.setSize(anyInt())).thenReturn(builder); + when(builder.setFetchSource(eq(true))).thenReturn(builder); + when(builder.addDocValueField(any(String.class))).thenReturn(builder); + when(builder.addDocValueField(any(String.class), any(String.class))).thenReturn(builder); + when(builder.addSort(any(String.class), any(SortOrder.class))).thenReturn(builder); + when(builder.get()).thenReturn(response); + return builder; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectStateProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectStateProcessorTests.java index e4fb5a7f07456..4f5477a75f863 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectStateProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectStateProcessorTests.java @@ -39,13 +39,13 @@ public class AutodetectStateProcessorTests extends ESTestCase { private static final String STATE_SAMPLE = "" - + "{\"index\": {\"_index\": \"test\", \"_type\": \"type1\", \"_id\": \"1\"}}\n" + + "{\"index\": {\"_index\": \"test\", \"_id\": \"1\"}}\n" + "{ \"field\" : \"value1\" }\n" + "\0" - + "{\"index\": {\"_index\": \"test\", \"_type\": \"type1\", \"_id\": \"2\"}}\n" + + "{\"index\": {\"_index\": \"test\", \"_id\": \"2\"}}\n" + "{ \"field\" : \"value2\" }\n" + "\0" - + "{\"index\": {\"_index\": \"test\", \"_type\": \"type1\", \"_id\": \"3\"}}\n" + + "{\"index\": {\"_index\": \"test\", \"_id\": \"3\"}}\n" + "{ \"field\" : \"value3\" }\n" + "\0"; @@ -118,7 +118,7 @@ public void testStateReadGivenConsecutiveSpacesFollowedByZeroByte() throws IOExc public void testLargeStateRead() throws Exception { StringBuilder builder = new StringBuilder(NUM_LARGE_DOCS * (LARGE_DOC_SIZE + 10)); // 10 for header and separators for (int docNum = 1; docNum <= NUM_LARGE_DOCS; ++docNum) { - builder.append("{\"index\":{\"_index\":\"header").append(docNum).append("\",\"_type\":\"type\"}}\n"); + builder.append("{\"index\":{\"_index\":\"header").append(docNum).append("\"}}\n"); for (int count = 0; count < (LARGE_DOC_SIZE / "data".length()); ++count) { builder.append("data"); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/MlMemoryTrackerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/MlMemoryTrackerTests.java index 197fa469bed7c..3e54994ac043b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/MlMemoryTrackerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/MlMemoryTrackerTests.java @@ -123,9 +123,10 @@ public void testRefreshOne() { return null; }).when(jobResultsProvider).getEstablishedMemoryUsage(eq(jobId), any(), any(), any(Consumer.class), any()); - long modelMemoryLimitMb = 2; + boolean simulateVeryOldJob = randomBoolean(); + long recentJobModelMemoryLimitMb = 2; Job job = mock(Job.class); - when(job.getAnalysisLimits()).thenReturn(new AnalysisLimits(modelMemoryLimitMb, 4L)); + when(job.getAnalysisLimits()).thenReturn(simulateVeryOldJob ? null : new AnalysisLimits(recentJobModelMemoryLimitMb, 4L)); doAnswer(invocation -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; @@ -141,7 +142,9 @@ public void testRefreshOne() { assertEquals(Long.valueOf(modelBytes + Job.PROCESS_MEMORY_OVERHEAD.getBytes()), memoryTracker.getJobMemoryRequirement(jobId)); } else { - assertEquals(Long.valueOf(ByteSizeUnit.MB.toBytes(modelMemoryLimitMb) + Job.PROCESS_MEMORY_OVERHEAD.getBytes()), + long expectedModelMemoryLimit = + simulateVeryOldJob ? AnalysisLimits.PRE_6_1_DEFAULT_MODEL_MEMORY_LIMIT_MB : recentJobModelMemoryLimitMb; + assertEquals(Long.valueOf(ByteSizeUnit.MB.toBytes(expectedModelMemoryLimit) + Job.PROCESS_MEMORY_OVERHEAD.getBytes()), memoryTracker.getJobMemoryRequirement(jobId)); } } else { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java index 3c07db3a9abd2..44cdd2434aec7 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/support/BaseMlIntegTestCase.java @@ -199,7 +199,6 @@ public static DatafeedConfig.Builder createDatafeedBuilder(String datafeedId, St builder.setQueryDelay(TimeValue.timeValueSeconds(1)); builder.setFrequency(TimeValue.timeValueSeconds(1)); builder.setIndices(indices); - builder.setTypes(Collections.singletonList("type")); return builder; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutorTests.java similarity index 62% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutorTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutorTests.java index 87b83852ff56c..44bf4cf75aa13 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/ChainTaskExecutorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/VoidChainTaskExecutorTests.java @@ -19,7 +19,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; -public class ChainTaskExecutorTests extends ESTestCase { +public class VoidChainTaskExecutorTests extends ESTestCase { private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); private final CountDownLatch latch = new CountDownLatch(1); @@ -36,18 +36,18 @@ public void tearDown() throws Exception { public void testExecute() throws InterruptedException { final List strings = new ArrayList<>(); - ActionListener finalListener = createBlockingListener(() -> strings.add("last"), e -> fail()); - ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), false); - chainTaskExecutor.add(listener -> { + ActionListener> finalListener = createBlockingListener(() -> strings.add("last"), e -> fail()); + VoidChainTaskExecutor voidChainTaskExecutor = new VoidChainTaskExecutor(threadPool.generic(), false); + voidChainTaskExecutor.add(listener -> { strings.add("first"); listener.onResponse(null); }); - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { strings.add("second"); listener.onResponse(null); }); - chainTaskExecutor.execute(finalListener); + voidChainTaskExecutor.execute(finalListener); latch.await(); @@ -56,22 +56,22 @@ public void testExecute() throws InterruptedException { public void testExecute_GivenSingleFailureAndShortCircuit() throws InterruptedException { final List strings = new ArrayList<>(); - ActionListener finalListener = createBlockingListener(() -> fail(), + ActionListener> finalListener = createBlockingListener(() -> fail(), e -> assertThat(e.getMessage(), equalTo("some error"))); - ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), true); - chainTaskExecutor.add(listener -> { + VoidChainTaskExecutor voidChainTaskExecutor = new VoidChainTaskExecutor(threadPool.generic(), true); + voidChainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); }); - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { throw new RuntimeException("some error"); }); - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { strings.add("after"); listener.onResponse(null); }); - chainTaskExecutor.execute(finalListener); + voidChainTaskExecutor.execute(finalListener); latch.await(); @@ -80,21 +80,21 @@ public void testExecute_GivenSingleFailureAndShortCircuit() throws InterruptedEx public void testExecute_GivenMultipleFailuresAndShortCircuit() throws InterruptedException { final List strings = new ArrayList<>(); - ActionListener finalListener = createBlockingListener(() -> fail(), + ActionListener> finalListener = createBlockingListener(() -> fail(), e -> assertThat(e.getMessage(), equalTo("some error 1"))); - ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), true); - chainTaskExecutor.add(listener -> { + VoidChainTaskExecutor voidChainTaskExecutor = new VoidChainTaskExecutor(threadPool.generic(), true); + voidChainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); }); - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { throw new RuntimeException("some error 1"); }); - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { throw new RuntimeException("some error 2"); }); - chainTaskExecutor.execute(finalListener); + voidChainTaskExecutor.execute(finalListener); latch.await(); @@ -103,21 +103,21 @@ public void testExecute_GivenMultipleFailuresAndShortCircuit() throws Interrupte public void testExecute_GivenFailureAndNoShortCircuit() throws InterruptedException { final List strings = new ArrayList<>(); - ActionListener finalListener = createBlockingListener(() -> strings.add("last"), e -> fail()); - ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), false); - chainTaskExecutor.add(listener -> { + ActionListener> finalListener = createBlockingListener(() -> strings.add("last"), e -> fail()); + VoidChainTaskExecutor voidChainTaskExecutor = new VoidChainTaskExecutor(threadPool.generic(), false); + voidChainTaskExecutor.add(listener -> { strings.add("before"); listener.onResponse(null); }); - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { throw new RuntimeException("some error"); }); - chainTaskExecutor.add(listener -> { + voidChainTaskExecutor.add(listener -> { strings.add("after"); listener.onResponse(null); }); - chainTaskExecutor.execute(finalListener); + voidChainTaskExecutor.execute(finalListener); latch.await(); @@ -126,17 +126,17 @@ public void testExecute_GivenFailureAndNoShortCircuit() throws InterruptedExcept public void testExecute_GivenNoTasksAdded() throws InterruptedException { final List strings = new ArrayList<>(); - ActionListener finalListener = createBlockingListener(() -> strings.add("last"), e -> fail()); - ChainTaskExecutor chainTaskExecutor = new ChainTaskExecutor(threadPool.generic(), false); + ActionListener> finalListener = createBlockingListener(() -> strings.add("last"), e -> fail()); + VoidChainTaskExecutor voidChainTaskExecutor = new VoidChainTaskExecutor(threadPool.generic(), false); - chainTaskExecutor.execute(finalListener); + voidChainTaskExecutor.execute(finalListener); latch.await(); assertThat(strings, contains("last")); } - private ActionListener createBlockingListener(Runnable runnable, Consumer errorHandler) { + private ActionListener> createBlockingListener(Runnable runnable, Consumer errorHandler) { return ActionListener.wrap(nullValue -> { runnable.run(); latch.countDown(); @@ -145,4 +145,4 @@ private ActionListener createBlockingListener(Runnable runnable, Consumer< latch.countDown(); }); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java index 85a6da15177f9..34c069adb2a02 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporter.java @@ -25,11 +25,11 @@ public abstract class Exporter implements AutoCloseable { private static final Setting.AffixSetting ENABLED_SETTING = Setting.affixKeySetting("xpack.monitoring.exporters.","enabled", - (key) -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope)); + key -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope)); private static final Setting.AffixSetting TYPE_SETTING = Setting.affixKeySetting("xpack.monitoring.exporters.","type", - (key) -> Setting.simpleString(key, (v, s) -> { + key -> Setting.simpleString(key, v -> { switch (v) { case "": case "http": @@ -47,13 +47,13 @@ public abstract class Exporter implements AutoCloseable { */ public static final Setting.AffixSetting USE_INGEST_PIPELINE_SETTING = Setting.affixKeySetting("xpack.monitoring.exporters.","use_ingest", - (key) -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope)); + key -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope)); /** * Every {@code Exporter} allows users to explicitly disable cluster alerts. */ public static final Setting.AffixSetting CLUSTER_ALERTS_MANAGEMENT_SETTING = Setting.affixKeySetting("xpack.monitoring.exporters.", "cluster_alerts.management.enabled", - (key) -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope)); + key -> Setting.boolSetting(key, true, Property.Dynamic, Property.NodeScope)); /** * Every {@code Exporter} allows users to explicitly disable specific cluster alerts. *

@@ -61,14 +61,14 @@ public abstract class Exporter implements AutoCloseable { */ public static final Setting.AffixSetting> CLUSTER_ALERTS_BLACKLIST_SETTING = Setting .affixKeySetting("xpack.monitoring.exporters.", "cluster_alerts.management.blacklist", - (key) -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope)); + key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope)); /** * Every {@code Exporter} allows users to use a different index time format. */ private static final Setting.AffixSetting INDEX_NAME_TIME_FORMAT_SETTING = Setting.affixKeySetting("xpack.monitoring.exporters.","index.name.time_format", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); + key -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); private static final String INDEX_FORMAT = "YYYY.MM.dd"; diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index 293a8a5e325e2..e18485fc237da 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -251,7 +251,7 @@ private boolean setupIfElectedMaster(final ClusterState clusterState, final Map< final boolean clusterStateChange) { // we are on the elected master // Check that there is nothing that could block metadata updates - if (clusterState.blocks().hasGlobalBlock(ClusterBlockLevel.METADATA_WRITE)) { + if (clusterState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.METADATA_WRITE)) { logger.debug("waiting until metadata writes are unblocked"); return false; } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java index dc5cad7c94fd4..a8ab2960194f2 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; @@ -141,6 +142,8 @@ public void testAddRequestContent() throws IOException { assertThat(bulkDoc.getXContentType(), equalTo(xContentType)); ++count; } + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testAddRequestContentWithEmptySource() throws IOException { @@ -188,6 +191,8 @@ public void testAddRequestContentWithEmptySource() throws IOException { ); assertThat(e.getMessage(), containsString("source is missing for monitoring document [][doc][" + nbDocs + "]")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testAddRequestContentWithUnrecognizedIndexName() throws IOException { @@ -225,6 +230,9 @@ public void testAddRequestContentWithUnrecognizedIndexName() throws IOException ); assertThat(e.getMessage(), containsString("unrecognized index name [" + indexName + "]")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); + } public void testSerialization() throws IOException { diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index dd45c3e06db5d..555f2659113fd 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.license.License; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.collapse.CollapseBuilder; @@ -180,6 +181,7 @@ public void testMonitoringBulk() throws Exception { assertMonitoringDoc(toMap(hit), system, "test", interval); } }); + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } /** diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java index 7a4427c9f0fdc..10fc10e3f973d 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xpack.core.XPackClient; @@ -121,6 +122,8 @@ public void testNoErrors() throws Exception { assertThat(restResponse.status(), is(RestStatus.OK)); assertThat(restResponse.content().utf8ToString(), is("{\"took\":" + response.getTookInMillis() + ",\"ignored\":false,\"errors\":false}")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testNoErrorsButIgnored() throws Exception { @@ -131,6 +134,8 @@ public void testNoErrorsButIgnored() throws Exception { assertThat(restResponse.status(), is(RestStatus.OK)); assertThat(restResponse.content().utf8ToString(), is("{\"took\":" + response.getTookInMillis() + ",\"ignored\":true,\"errors\":false}")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } public void testWithErrors() throws Exception { @@ -150,6 +155,8 @@ public void testWithErrors() throws Exception { assertThat(restResponse.status(), is(RestStatus.INTERNAL_SERVER_ERROR)); assertThat(restResponse.content().utf8ToString(), is("{\"took\":" + response.getTookInMillis() + ",\"ignored\":false,\"errors\":true,\"error\":" + errorJson + "}")); + //This test's JSON contains outdated references to types + assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } /** diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java index a027bf06f4095..1d9960e711f61 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestRollupSearchAction.java @@ -25,7 +25,7 @@ public class RestRollupSearchAction extends BaseRestHandler { private static final Set RESPONSE_PARAMS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( RestSearchAction.TYPED_KEYS_PARAM, - RestSearchAction.TOTAL_HIT_AS_INT_PARAM))); + RestSearchAction.TOTAL_HITS_AS_INT_PARAM))); public RestRollupSearchAction(Settings settings, RestController controller) { super(settings); @@ -40,6 +40,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient SearchRequest searchRequest = new SearchRequest(); restRequest.withContentOrSourceParamParserOrNull(parser -> RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, size -> searchRequest.source().size(size))); + RestSearchAction.checkRestTotalHits(restRequest, searchRequest); return channel -> client.execute(RollupSearchAction.INSTANCE, searchRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java index 86891eda669fa..bd8a0b19f8250 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java @@ -83,7 +83,7 @@ public void testDefaultTimeZone() { assertThat(config.getTimeZone(), equalTo(DateTimeZone.UTC.getID())); } - public void testUnkownTimeZone() { + public void testUnknownTimeZone() { Exception e = expectThrows(IllegalArgumentException.class, () -> new DateHistogramGroupConfig("foo", DateHistogramInterval.HOUR, null, "FOO")); assertThat(e.getMessage(), equalTo("The datetime zone id 'FOO' is not recognised")); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 740cb7f3c3a56..afc39d5df5010 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -164,7 +164,8 @@ forbiddenApisMain { } // classes are missing, e.g. com.ibm.icu.lang.UCharacter -thirdPartyAudit.excludes = [ +thirdPartyAudit { + ignoreMissingClasses ( // SAML dependencies // [missing classes] Some cli utilities that we don't use depend on these missing JCommander classes 'com.beust.jcommander.JCommander', @@ -256,7 +257,10 @@ thirdPartyAudit.excludes = [ 'net.sf.ehcache.Ehcache', 'net.sf.ehcache.Element', // [missing classes] SLF4j includes an optional class that depends on an extension class (!) - 'org.slf4j.ext.EventData', + 'org.slf4j.ext.EventData' + ) + + ignoreViolations ( // Guava uses internal java api: sun.misc.Unsafe 'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64$1', @@ -265,16 +269,17 @@ thirdPartyAudit.excludes = [ 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', -] + ) +} if (project.runtimeJavaVersion > JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += [ + thirdPartyAudit.ignoreMissingClasses( 'javax.xml.bind.JAXBContext', 'javax.xml.bind.JAXBElement', 'javax.xml.bind.JAXBException', 'javax.xml.bind.Unmarshaller', - 'javax.xml.bind.UnmarshallerHandler', - ]; + 'javax.xml.bind.UnmarshallerHandler' + ) } run { diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 8515b538bd562..1c684809a3203 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -24,6 +24,7 @@ dependencyLicenses { if (project.inFipsJvm) { unitTest.enabled = false + testingConventions.enabled = false // Forbiden APIs non-portable checks fail because bouncy castle classes being used from the FIPS JDK since those are // not part of the Java specification - all of this is as designed, so we have to relax this check for FIPS. tasks.withType(CheckForbiddenApis) { @@ -32,4 +33,5 @@ if (project.inFipsJvm) { // FIPS JVM includes many classes from bouncycastle which count as jar hell for the third party audit, // rather than provide a long list of exclusions, disable the check on FIPS. thirdPartyAudit.enabled = false + } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index ad9f1d7aa948c..714da7cf11c35 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -463,6 +463,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste authcService.set(new AuthenticationService(settings, realms, auditTrailService, failureHandler, threadPool, anonymousUser, tokenService)); components.add(authcService.get()); + securityIndex.get().addIndexStateListener(authcService.get()::onSecurityIndexStateChange); final NativePrivilegeStore privilegeStore = new NativePrivilegeStore(settings, client, securityIndex.get()); components.add(privilegeStore); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java index 6db95e822101e..b4ee8b677c13b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/realm/TransportClearRealmCacheAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequest; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheResponse; import org.elasticsearch.xpack.core.security.authc.Realm; +import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.support.CachingRealm; @@ -26,14 +27,16 @@ public class TransportClearRealmCacheAction extends TransportNodesAction { private final Realms realms; + private final AuthenticationService authenticationService; @Inject public TransportClearRealmCacheAction(ThreadPool threadPool, ClusterService clusterService, TransportService transportService, - ActionFilters actionFilters, Realms realms) { + ActionFilters actionFilters, Realms realms, AuthenticationService authenticationService) { super(ClearRealmCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, ClearRealmCacheRequest::new, ClearRealmCacheRequest.Node::new, ThreadPool.Names.MANAGEMENT, ClearRealmCacheResponse.Node.class); this.realms = realms; + this.authenticationService = authenticationService; } @Override @@ -68,9 +71,23 @@ protected ClearRealmCacheResponse.Node nodeOperation(ClearRealmCacheRequest.Node } clearCache(realm, nodeRequest.getUsernames()); } + clearAuthenticationServiceCache(nodeRequest.getUsernames()); return new ClearRealmCacheResponse.Node(clusterService.localNode()); } + private void clearAuthenticationServiceCache(String[] usernames) { + // this is heavy handed since we could also take realm into account but that would add + // complexity since we would need to iterate over the cache under a lock to remove all + // entries that referenced the specific realm + if (usernames != null && usernames.length != 0) { + for (String username : usernames) { + authenticationService.expire(username); + } + } else { + authenticationService.expireAll(); + } + } + private void clearCache(Realm realm, String[] usernames) { if (!(realm instanceof CachingRealm)) { return; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index ef04f3d22f854..a6d5cb50a76d6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -13,9 +13,13 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestRequest; @@ -38,14 +42,21 @@ import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.audit.AuditUtil; import org.elasticsearch.xpack.security.authc.support.RealmUserLookup; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; +import java.util.ArrayList; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiConsumer; import java.util.function.Consumer; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed; + /** * An authentication service that delegates the authentication process to its configured {@link Realm realms}. * This service also supports request level caching of authenticated users (i.e. once a user authenticated @@ -53,6 +64,12 @@ */ public class AuthenticationService { + static final Setting SUCCESS_AUTH_CACHE_ENABLED = + Setting.boolSetting("xpack.security.authc.success_cache.enabled", true, Property.NodeScope); + private static final Setting SUCCESS_AUTH_CACHE_MAX_SIZE = + Setting.intSetting("xpack.security.authc.success_cache.size", 10000, Property.NodeScope); + private static final Setting SUCCESS_AUTH_CACHE_EXPIRE_AFTER_ACCESS = + Setting.timeSetting("xpack.security.authc.success_cache.expire_after_access", TimeValue.timeValueHours(1L), Property.NodeScope); private static final Logger logger = LogManager.getLogger(AuthenticationService.class); private final Realms realms; @@ -62,6 +79,8 @@ public class AuthenticationService { private final String nodeName; private final AnonymousUser anonymousUser; private final TokenService tokenService; + private final Cache lastSuccessfulAuthCache; + private final AtomicLong numInvalidation = new AtomicLong(); private final boolean runAsEnabled; private final boolean isAnonymousUserEnabled; @@ -77,6 +96,14 @@ public AuthenticationService(Settings settings, Realms realms, AuditTrailService this.runAsEnabled = AuthenticationServiceField.RUN_AS_ENABLED.get(settings); this.isAnonymousUserEnabled = AnonymousUser.isAnonymousEnabled(settings); this.tokenService = tokenService; + if (SUCCESS_AUTH_CACHE_ENABLED.get(settings)) { + this.lastSuccessfulAuthCache = CacheBuilder.builder() + .setMaximumWeight(Integer.toUnsignedLong(SUCCESS_AUTH_CACHE_MAX_SIZE.get(settings))) + .setExpireAfterAccess(SUCCESS_AUTH_CACHE_EXPIRE_AFTER_ACCESS.get(settings)) + .build(); + } else { + this.lastSuccessfulAuthCache = null; + } } /** @@ -120,6 +147,28 @@ public void authenticate(String action, TransportMessage message, new Authenticator(action, message, null, listener).authenticateToken(token); } + public void expire(String principal) { + if (lastSuccessfulAuthCache != null) { + numInvalidation.incrementAndGet(); + lastSuccessfulAuthCache.invalidate(principal); + } + } + + public void expireAll() { + if (lastSuccessfulAuthCache != null) { + numInvalidation.incrementAndGet(); + lastSuccessfulAuthCache.invalidateAll(); + } + } + + public void onSecurityIndexStateChange(SecurityIndexManager.State previousState, SecurityIndexManager.State currentState) { + if (lastSuccessfulAuthCache != null) { + if (isMoveFromRedToNonRed(previousState, currentState) || isIndexDeleted(previousState, currentState)) { + expireAll(); + } + } + } + // pkg private method for testing Authenticator createAuthenticator(RestRequest request, ActionListener listener) { return new Authenticator(request, listener); @@ -130,6 +179,11 @@ Authenticator createAuthenticator(String action, TransportMessage message, User return new Authenticator(action, message, fallbackUser, listener); } + // pkg private method for testing + long getNumInvalidation() { + return numInvalidation.get(); + } + /** * This class is responsible for taking a request and executing the authentication. The authentication is executed in an asynchronous * fashion in order to avoid blocking calls on a network thread. This class also performs the auditing necessary around authentication @@ -263,7 +317,8 @@ private void consumeToken(AuthenticationToken token) { handleNullToken(); } else { authenticationToken = token; - final List realmsList = realms.asList(); + final List realmsList = getRealmList(authenticationToken.principal()); + final long startInvalidation = numInvalidation.get(); final Map> messages = new LinkedHashMap<>(); final BiConsumer> realmAuthenticatingConsumer = (realm, userListener) -> { if (realm.supports(authenticationToken)) { @@ -273,6 +328,9 @@ private void consumeToken(AuthenticationToken token) { // user was authenticated, populate the authenticated by information authenticatedBy = new RealmRef(realm.name(), realm.type(), nodeName); authenticationResult = result; + if (lastSuccessfulAuthCache != null && startInvalidation == numInvalidation.get()) { + lastSuccessfulAuthCache.put(authenticationToken.principal(), realm); + } userListener.onResponse(result.getUser()); } else { // the user was not authenticated, call this so we can audit the correct event @@ -313,6 +371,27 @@ private void consumeToken(AuthenticationToken token) { } } + private List getRealmList(String principal) { + final List defaultOrderedRealms = realms.asList(); + if (lastSuccessfulAuthCache != null) { + final Realm lastSuccess = lastSuccessfulAuthCache.get(principal); + if (lastSuccess != null) { + final int index = defaultOrderedRealms.indexOf(lastSuccess); + if (index > 0) { + final List smartOrder = new ArrayList<>(defaultOrderedRealms.size()); + smartOrder.add(lastSuccess); + for (int i = 1; i < defaultOrderedRealms.size(); i++) { + if (i != index) { + smartOrder.add(defaultOrderedRealms.get(i)); + } + } + return Collections.unmodifiableList(smartOrder); + } + } + } + return defaultOrderedRealms; + } + /** * Handles failed extraction of an authentication token. This can happen in a few different scenarios: * @@ -391,7 +470,8 @@ private void consumeUser(User user, Map> message * names of users that exist using a timing attack */ private void lookupRunAsUser(final User user, String runAsUsername, Consumer userConsumer) { - final RealmUserLookup lookup = new RealmUserLookup(realms.asList(), threadContext); + final RealmUserLookup lookup = new RealmUserLookup(getRealmList(runAsUsername), threadContext); + final long startInvalidationNum = numInvalidation.get(); lookup.lookup(runAsUsername, ActionListener.wrap(tuple -> { if (tuple == null) { // the user does not exist, but we still create a User object, which will later be rejected by authz @@ -400,6 +480,11 @@ private void lookupRunAsUser(final User user, String runAsUsername, Consumer realm); + } userConsumer.accept(new User(foundUser, user)); } }, exception -> listener.onFailure(request.exceptionProcessingRequest(exception, authenticationToken)))); @@ -602,5 +687,8 @@ public String toString() { public static void addSettings(List> settings) { settings.add(AuthenticationServiceField.RUN_AS_ENABLED); + settings.add(SUCCESS_AUTH_CACHE_ENABLED); + settings.add(SUCCESS_AUTH_CACHE_MAX_SIZE); + settings.add(SUCCESS_AUTH_CACHE_EXPIRE_AFTER_ACCESS); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java index 9b1015a5849ee..c51c712506dc0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java @@ -33,7 +33,9 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; /** - * Responsible for cleaning the invalidated tokens from the invalidated tokens index. + * Responsible for cleaning the invalidated and expired tokens from the security index. + * The document gets deleted if it was created more than 24 hours which is the maximum + * lifetime of a refresh token */ final class ExpiredTokenRemover extends AbstractRunnable { private static final Logger logger = LogManager.getLogger(ExpiredTokenRemover.class); @@ -57,10 +59,8 @@ public void doRun() { final Instant now = Instant.now(); expiredDbq .setQuery(QueryBuilders.boolQuery() - .filter(QueryBuilders.termsQuery("doc_type", TokenService.INVALIDATED_TOKEN_DOC_TYPE, "token")) - .filter(QueryBuilders.boolQuery() - .should(QueryBuilders.rangeQuery("expiration_time").lte(now.toEpochMilli())) - .should(QueryBuilders.rangeQuery("creation_time").lte(now.minus(24L, ChronoUnit.HOURS).toEpochMilli())))); + .filter(QueryBuilders.termsQuery("doc_type", "token")) + .filter(QueryBuilders.rangeQuery("creation_time").lte(now.minus(24L, ChronoUnit.HOURS).toEpochMilli()))); logger.trace(() -> new ParameterizedMessage("Removing old tokens: [{}]", Strings.toString(expiredDbq))); executeAsyncWithOrigin(client, SECURITY_ORIGIN, DeleteByQueryAction.INSTANCE, expiredDbq, ActionListener.wrap(r -> { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 15d3e75842615..52c1081367451 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -19,14 +19,10 @@ import org.elasticsearch.action.DocWriteRequest.OpType; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.get.MultiGetItemResponse; -import org.elasticsearch.action.get.MultiGetRequest; -import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -165,7 +161,8 @@ public final class TokenService { public static final Setting DELETE_TIMEOUT = Setting.timeSetting("xpack.security.authc.token.delete.timeout", TimeValue.MINUS_ONE, Property.NodeScope); - static final String INVALIDATED_TOKEN_DOC_TYPE = "invalidated-token"; + private static final String TOKEN_DOC_TYPE = "token"; + private static final String TOKEN_DOC_ID_PREFIX = TOKEN_DOC_TYPE + "_"; static final int MINIMUM_BYTES = VERSION_BYTES + SALT_BYTES + IV_BYTES + 1; private static final int MINIMUM_BASE64_BYTES = Double.valueOf(Math.ceil((4 * MINIMUM_BYTES) / 3)).intValue(); private static final int MAX_RETRY_ATTEMPTS = 5; @@ -245,7 +242,7 @@ public void createUserToken(Authentication authentication, Authentication origin try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); - builder.field("doc_type", "token"); + builder.field("doc_type", TOKEN_DOC_TYPE); builder.field("creation_time", created.toEpochMilli()); if (includeRefreshToken) { builder.startObject("refresh_token") @@ -293,15 +290,13 @@ void getAndValidateToken(ThreadContext ctx, ActionListener listener) listener.onResponse(null); } else { try { - decodeAndValidateToken(token, ActionListener.wrap(listener::onResponse, e -> { - if (e instanceof IOException) { - // could happen with a token that is not ours - logger.debug("invalid token", e); - listener.onResponse(null); + decodeToken(token, ActionListener.wrap(userToken -> { + if (userToken != null) { + checkIfTokenIsValid(userToken, listener); } else { - listener.onFailure(e); + listener.onResponse(null); } - })); + }, listener::onFailure)); } catch (IOException e) { // could happen with a token that is not ours logger.debug("invalid token", e); @@ -331,22 +326,6 @@ public void getAuthenticationAndMetaData(String token, ActionListener listener) throws IOException { - decodeToken(token, ActionListener.wrap(userToken -> { - if (userToken != null) { - Instant currentTime = clock.instant(); - if (currentTime.isAfter(userToken.getExpirationTime())) { - // token expired - listener.onFailure(traceLog("decode token", token, expiredTokenException())); - } else { - checkIfTokenIsRevoked(userToken, listener); - } - } else { - listener.onResponse(null); - } - }, listener::onFailure)); - } - /* * Asynchronously decodes the string representation of a {@link UserToken}. The process for * this is asynchronous as we may need to compute a key, which can be computationally expensive @@ -373,55 +352,51 @@ void decodeToken(String token, ActionListener listener) throws IOExce try { final byte[] iv = in.readByteArray(); final Cipher cipher = getDecryptionCipher(iv, decodeKey, version, decodedSalt); - if (version.onOrAfter(Version.V_6_2_0)) { - // we only have the id and need to get the token from the doc! - decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId -> { - if (securityIndex.isAvailable() == false) { - logger.warn("failed to get token [{}] since index is not available", tokenId); - listener.onResponse(null); - } else { - securityIndex.checkIndexVersionThenExecute( - ex -> listener.onFailure(traceLog("prepare security index", tokenId, ex)), - () -> { - final GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, - getTokenDocumentId(tokenId)).request(); - Consumer onFailure = ex -> listener.onFailure(traceLog("decode token", tokenId, ex)); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, - ActionListener.wrap(response -> { - if (response.isExists()) { - Map accessTokenSource = - (Map) response.getSource().get("access_token"); - if (accessTokenSource == null) { - onFailure.accept(new IllegalStateException( - "token document is missing the access_token field")); - } else if (accessTokenSource.containsKey("user_token") == false) { - onFailure.accept(new IllegalStateException( - "token document is missing the user_token field")); - } else { - Map userTokenSource = - (Map) accessTokenSource.get("user_token"); - listener.onResponse(UserToken.fromSourceMap(userTokenSource)); - } - } else { - onFailure.accept( - new IllegalStateException("token document is missing and must be present")); - } - }, e -> { - // if the index or the shard is not there / available we assume that - // the token is not valid - if (isShardNotAvailableException(e)) { - logger.warn("failed to get token [{}] since index is not available", tokenId); - listener.onResponse(null); + decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId -> { + if (securityIndex.isAvailable() == false) { + logger.warn("failed to get token [{}] since index is not available", tokenId); + listener.onResponse(null); + } else { + securityIndex.checkIndexVersionThenExecute( + ex -> listener.onFailure(traceLog("prepare security index", tokenId, ex)), + () -> { + final GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, + getTokenDocumentId(tokenId)).request(); + Consumer onFailure = ex -> listener.onFailure(traceLog("decode token", tokenId, ex)); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, + ActionListener.wrap(response -> { + if (response.isExists()) { + Map accessTokenSource = + (Map) response.getSource().get("access_token"); + if (accessTokenSource == null) { + onFailure.accept(new IllegalStateException( + "token document is missing the access_token field")); + } else if (accessTokenSource.containsKey("user_token") == false) { + onFailure.accept(new IllegalStateException( + "token document is missing the user_token field")); } else { - logger.error(new ParameterizedMessage("failed to get token [{}]", tokenId), e); - listener.onFailure(e); + Map userTokenSource = + (Map) accessTokenSource.get("user_token"); + listener.onResponse(UserToken.fromSourceMap(userTokenSource)); } - }), client::get); - }); - }}, listener::onFailure)); - } else { - decryptToken(in, cipher, version, listener); - } + } else { + onFailure.accept( + new IllegalStateException("token document is missing and must be present")); + } + }, e -> { + // if the index or the shard is not there / available we assume that + // the token is not valid + if (isShardNotAvailableException(e)) { + logger.warn("failed to get token [{}] since index is not available", tokenId); + listener.onResponse(null); + } else { + logger.error(new ParameterizedMessage("failed to get token [{}]", tokenId), e); + listener.onFailure(e); + } + }), client::get); + }); + } + }, listener::onFailure)); } catch (GeneralSecurityException e) { // could happen with a token that is not ours logger.warn("invalid token", e); @@ -456,14 +431,6 @@ private void getKeyAsync(BytesKey decodedSalt, KeyAndCache keyAndCache, ActionLi } } - private static void decryptToken(StreamInput in, Cipher cipher, Version version, ActionListener listener) throws - IOException { - try (CipherInputStream cis = new CipherInputStream(in, cipher); StreamInput decryptedInput = new InputStreamStreamInput(cis)) { - decryptedInput.setVersion(version); - listener.onResponse(new UserToken(decryptedInput)); - } - } - private static void decryptTokenId(StreamInput in, Cipher cipher, Version version, ActionListener listener) throws IOException { try (CipherInputStream cis = new CipherInputStream(in, cipher); StreamInput decryptedInput = new InputStreamStreamInput(cis)) { decryptedInput.setVersion(version); @@ -473,10 +440,7 @@ private static void decryptTokenId(StreamInput in, Cipher cipher, Version versio /** * This method performs the steps necessary to invalidate a token so that it may no longer be - * used. The process of invalidation involves a step that is needed for backwards compatibility - * with versions prior to 6.2.0; this step records an entry to indicate that a token with a - * given id has been expired. The second step is to record the invalidation for tokens that - * have been created on versions on or after 6.2; this step involves performing an update to + * used. The process of invalidation involves performing an update to * the token document and setting the invalidated field to true */ public void invalidateAccessToken(String tokenString, ActionListener listener) { @@ -491,9 +455,8 @@ public void invalidateAccessToken(String tokenString, ActionListener accessTokenIds, ActionListener listener) { maybeStartTokenRemover(); - final long expirationEpochMilli = getExpirationTime().toEpochMilli(); // Invalidate the refresh tokens first so that they cannot be used to get new // access tokens while we invalidate the access tokens we currently know about indexInvalidation(accessTokenIds, ActionListener.wrap(result -> - indexBwcInvalidation(accessTokenIds, listener, new AtomicInteger(result.getAttemptCount()), - expirationEpochMilli, result), + indexInvalidation(accessTokenIds, listener, new AtomicInteger(result.getAttemptCount()), + "access_token", result), listener::onFailure), new AtomicInteger(0), "refresh_token", null); } - /** - * Performs the actual bwc invalidation of a collection of tokens and then kicks off the new invalidation method. - * - * @param tokenIds the collection of token ids or token document ids that should be invalidated - * @param listener the listener to notify upon completion - * @param attemptCount the number of attempts to invalidate that have already been tried - * @param expirationEpochMilli the expiration time as milliseconds since the epoch - * @param previousResult if this not the initial attempt for invalidation, it contains the result of invalidating - * tokens up to the point of the retry. This result is added to the result of the current attempt - */ - private void indexBwcInvalidation(Collection tokenIds, ActionListener listener, - AtomicInteger attemptCount, long expirationEpochMilli, - @Nullable TokensInvalidationResult previousResult) { - - if (tokenIds.isEmpty()) { - logger.warn("No tokens provided for invalidation"); - listener.onFailure(invalidGrantException("No tokens provided for invalidation")); - } else if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { - logger.warn("Failed to invalidate [{}] tokens after [{}] attempts", tokenIds.size(), - attemptCount.get()); - listener.onFailure(invalidGrantException("failed to invalidate tokens")); - } else { - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - for (String tokenId : tokenIds) { - final String invalidatedTokenId = getInvalidatedTokenDocumentId(tokenId); - IndexRequest indexRequest = client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) - .setOpType(OpType.CREATE) - .setSource("doc_type", INVALIDATED_TOKEN_DOC_TYPE, "expiration_time", expirationEpochMilli) - .request(); - bulkRequestBuilder.add(indexRequest); - } - bulkRequestBuilder.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - final BulkRequest bulkRequest = bulkRequestBuilder.request(); - securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", ex)), - () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, bulkRequest, - ActionListener.wrap(bulkResponse -> { - List retryTokenIds = new ArrayList<>(); - for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { - if (bulkItemResponse.isFailed()) { - Throwable cause = bulkItemResponse.getFailure().getCause(); - logger.error(cause.getMessage()); - traceLog("(bwc) invalidate tokens", cause); - if (isShardNotAvailableException(cause)) { - retryTokenIds.add(getTokenIdFromInvalidatedTokenDocumentId(bulkItemResponse.getFailure().getId())); - } else if ((cause instanceof VersionConflictEngineException) == false){ - // We don't handle VersionConflictEngineException, the ticket has been invalidated - listener.onFailure(bulkItemResponse.getFailure().getCause()); - } - } - } - if (retryTokenIds.isEmpty() == false) { - attemptCount.incrementAndGet(); - indexBwcInvalidation(retryTokenIds, listener, attemptCount, expirationEpochMilli, previousResult); - } - indexInvalidation(tokenIds, listener, attemptCount, "access_token", previousResult); - }, e -> { - Throwable cause = ExceptionsHelper.unwrapCause(e); - traceLog("(bwc) invalidate tokens", cause); - if (isShardNotAvailableException(cause)) { - attemptCount.incrementAndGet(); - indexBwcInvalidation(tokenIds, listener, attemptCount, expirationEpochMilli, previousResult); - } else { - listener.onFailure(e); - } - }), - client::bulk)); - } - } - /** * Performs the actual invalidation of a collection of tokens * @@ -777,7 +669,7 @@ private void findTokenFromRefreshToken(String refreshToken, ActionListener parseTokensFromDocument(Map sou } } - private static String getInvalidatedTokenDocumentId(UserToken userToken) { - return getInvalidatedTokenDocumentId(userToken.getId()); - } - - private static String getInvalidatedTokenDocumentId(String id) { - return INVALIDATED_TOKEN_DOC_TYPE + "_" + id; - } - private static String getTokenDocumentId(UserToken userToken) { return getTokenDocumentId(userToken.getId()); } private static String getTokenDocumentId(String id) { - return "token_" + id; + return TOKEN_DOC_ID_PREFIX + id; } private static String getTokenIdFromDocumentId(String docId) { - if (docId.startsWith("token_") == false) { + if (docId.startsWith(TOKEN_DOC_ID_PREFIX) == false) { throw new IllegalStateException("TokenDocument ID [" + docId + "] has unexpected value"); } else { - return docId.substring("token_".length()); - } - } - - private static String getTokenIdFromInvalidatedTokenDocumentId(String docId) { - final String invalidatedTokenDocPrefix = INVALIDATED_TOKEN_DOC_TYPE + "_"; - if (docId.startsWith(invalidatedTokenDocPrefix) == false) { - throw new IllegalStateException("InvalidatedTokenDocument ID [" + docId + "] has unexpected value"); - } else { - return docId.substring(invalidatedTokenDocPrefix.length()); + return docId.substring(TOKEN_DOC_ID_PREFIX.length()); } } @@ -1136,70 +1011,52 @@ private void ensureEnabled() { } /** - * Checks if the token has been stored as a revoked token to ensure we do not allow tokens that - * have been explicitly cleared. + * Checks if the access token has been explicitly invalidated */ - private void checkIfTokenIsRevoked(UserToken userToken, ActionListener listener) { - if (securityIndex.indexExists() == false) { - // index doesn't exist so the token is considered valid. it is important to note that - // we do not use isAvailable as the lack of a shard being available is not equivalent - // to the index not existing in the case of revocation checking. - listener.onResponse(userToken); + private void checkIfTokenIsValid(UserToken userToken, ActionListener listener) { + Instant currentTime = clock.instant(); + if (currentTime.isAfter(userToken.getExpirationTime())) { + listener.onFailure(traceLog("validate token", userToken.getId(), expiredTokenException())); + } else if (securityIndex.indexExists() == false) { + // index doesn't exist so the token is considered invalid as we cannot verify its validity + logger.warn("failed to validate token [{}] since the security index doesn't exist", userToken.getId()); + listener.onResponse(null); } else { securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { - MultiGetRequest mGetRequest = client.prepareMultiGet() - .add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getInvalidatedTokenDocumentId(userToken)) - .add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) - .request(); + final GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, + getTokenDocumentId(userToken)).request(); Consumer onFailure = ex -> listener.onFailure(traceLog("check token state", userToken.getId(), ex)); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - mGetRequest, - new ActionListener() { - - @Override - public void onResponse(MultiGetResponse response) { - MultiGetItemResponse[] itemResponse = response.getResponses(); - if (itemResponse[0].isFailed()) { - onFailure(itemResponse[0].getFailure().getFailure()); - } else if (itemResponse[0].getResponse().isExists()) { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, + ActionListener.wrap(response -> { + if (response.isExists()) { + Map source = response.getSource(); + Map accessTokenSource = (Map) source.get("access_token"); + if (accessTokenSource == null) { + onFailure.accept(new IllegalStateException("token document is missing access_token field")); + } else { + Boolean invalidated = (Boolean) accessTokenSource.get("invalidated"); + if (invalidated == null) { + onFailure.accept(new IllegalStateException("token document is missing invalidated field")); + } else if (invalidated) { onFailure.accept(expiredTokenException()); - } else if (itemResponse[1].isFailed()) { - onFailure(itemResponse[1].getFailure().getFailure()); - } else if (itemResponse[1].getResponse().isExists()) { - Map source = itemResponse[1].getResponse().getSource(); - Map accessTokenSource = (Map) source.get("access_token"); - if (accessTokenSource == null) { - onFailure.accept(new IllegalStateException("token document is missing access_token field")); - } else { - Boolean invalidated = (Boolean) accessTokenSource.get("invalidated"); - if (invalidated == null) { - onFailure.accept(new IllegalStateException("token document is missing invalidated field")); - } else if (invalidated) { - onFailure.accept(expiredTokenException()); - } else { - listener.onResponse(userToken); - } - } - } else if (userToken.getVersion().onOrAfter(Version.V_6_2_0)) { - onFailure.accept(new IllegalStateException("token document is missing and must be present")); } else { listener.onResponse(userToken); } } - - @Override - public void onFailure(Exception e) { - // if the index or the shard is not there / available we assume that - // the token is not valid - if (isShardNotAvailableException(e)) { - logger.warn("failed to get token [{}] since index is not available", userToken.getId()); - listener.onResponse(null); - } else { - logger.error(new ParameterizedMessage("failed to get token [{}]", userToken.getId()), e); - listener.onFailure(e); - } - } - }, client::multiGet); + } else { + onFailure.accept(new IllegalStateException("token document is missing and must be present")); + } + }, e -> { + // if the index or the shard is not there / available we assume that + // the token is not valid + if (isShardNotAvailableException(e)) { + logger.warn("failed to get token [{}] since index is not available", userToken.getId()); + listener.onResponse(null); + } else { + logger.error(new ParameterizedMessage("failed to get token [{}]", userToken.getId()), e); + listener.onFailure(e); + } + }), client::get); }); } } @@ -1209,10 +1066,6 @@ public TimeValue getExpirationDelay() { return expirationDelay; } - private Instant getExpirationTime() { - return getExpirationTime(clock.instant()); - } - private Instant getExpirationTime(Instant now) { return now.plusSeconds(expirationDelay.getSeconds()); } @@ -1258,11 +1111,7 @@ public String getUserTokenString(UserToken userToken) throws IOException, Genera new CipherOutputStream(out, getEncryptionCipher(initializationVector, keyAndCache, userToken.getVersion())); StreamOutput encryptedStreamOutput = new OutputStreamStreamOutput(encryptedOutput)) { encryptedStreamOutput.setVersion(userToken.getVersion()); - if (userToken.getVersion().onOrAfter(Version.V_6_2_0)) { - encryptedStreamOutput.writeString(userToken.getId()); - } else { - userToken.writeTo(encryptedStreamOutput); - } + encryptedStreamOutput.writeString(userToken.getId()); encryptedStreamOutput.close(); return new String(os.toByteArray(), StandardCharsets.UTF_8); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java index faec4b8c66511..fed4e1fb13ee6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.authc.support; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.SecureString; @@ -29,7 +30,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm implements CachingRealm { - private final Cache> cache; + private final Cache> cache; private final ThreadPool threadPool; private final boolean authenticationEnabled; final Hasher cacheHasher; @@ -40,7 +41,7 @@ protected CachingUsernamePasswordRealm(RealmConfig config, ThreadPool threadPool this.threadPool = threadPool; final TimeValue ttl = this.config.getSetting(CachingUsernamePasswordRealmSettings.CACHE_TTL_SETTING); if (ttl.getNanos() > 0) { - cache = CacheBuilder.>builder() + cache = CacheBuilder.>builder() .setExpireAfterWrite(ttl) .setMaximumWeight(this.config.getSetting(CachingUsernamePasswordRealmSettings.CACHE_MAX_USERS_SETTING)) .build(); @@ -122,58 +123,61 @@ private void authenticateWithCache(UsernamePasswordToken token, ActionListener listenableCacheEntry = cache.computeIfAbsent(token.principal(), k -> { + final ListenableFuture listenableCacheEntry = cache.computeIfAbsent(token.principal(), k -> { authenticationInCache.set(false); return new ListenableFuture<>(); }); if (authenticationInCache.get()) { // there is a cached or an inflight authenticate request - listenableCacheEntry.addListener(ActionListener.wrap(authenticatedUserWithHash -> { - if (authenticatedUserWithHash != null && authenticatedUserWithHash.verify(token.credentials())) { - // cached credential hash matches the credential hash for this forestalled request - handleCachedAuthentication(authenticatedUserWithHash.user, ActionListener.wrap(cacheResult -> { - if (cacheResult.isAuthenticated()) { - logger.debug("realm [{}] authenticated user [{}], with roles [{}]", - name(), token.principal(), cacheResult.getUser().roles()); - } else { - logger.debug("realm [{}] authenticated user [{}] from cache, but then failed [{}]", - name(), token.principal(), cacheResult.getMessage()); - } - listener.onResponse(cacheResult); - }, listener::onFailure)); + listenableCacheEntry.addListener(ActionListener.wrap(cachedResult -> { + final boolean credsMatch = cachedResult.verify(token.credentials()); + if (cachedResult.authenticationResult.isAuthenticated()) { + if (credsMatch) { + // cached credential hash matches the credential hash for this forestalled request + handleCachedAuthentication(cachedResult.user, ActionListener.wrap(cacheResult -> { + if (cacheResult.isAuthenticated()) { + logger.debug("realm [{}] authenticated user [{}], with roles [{}]", + name(), token.principal(), cacheResult.getUser().roles()); + } else { + logger.debug("realm [{}] authenticated user [{}] from cache, but then failed [{}]", + name(), token.principal(), cacheResult.getMessage()); + } + listener.onResponse(cacheResult); + }, listener::onFailure)); + } else { + // its credential hash does not match the + // hash of the credential for this forestalled request. + // clear cache and try to reach the authentication source again because password + // might have changed there and the local cached hash got stale + cache.invalidate(token.principal(), listenableCacheEntry); + authenticateWithCache(token, listener); + } + } else if (credsMatch) { + // not authenticated but instead of hammering reuse the result. a new + // request will trigger a retried auth + listener.onResponse(cachedResult.authenticationResult); } else { - // The inflight request has failed or its credential hash does not match the - // hash of the credential for this forestalled request. - // clear cache and try to reach the authentication source again because password - // might have changed there and the local cached hash got stale cache.invalidate(token.principal(), listenableCacheEntry); authenticateWithCache(token, listener); } - }, e -> { - // the inflight request failed, so try again, but first (always) make sure cache - // is cleared of the failed authentication - cache.invalidate(token.principal(), listenableCacheEntry); - authenticateWithCache(token, listener); - }), threadPool.executor(ThreadPool.Names.GENERIC), threadPool.getThreadContext()); + }, listener::onFailure), threadPool.executor(ThreadPool.Names.GENERIC), threadPool.getThreadContext()); } else { // attempt authentication against the authentication source doAuthenticate(token, ActionListener.wrap(authResult -> { - if (authResult.isAuthenticated() && authResult.getUser().enabled()) { - // compute the credential hash of this successful authentication request - final UserWithHash userWithHash = new UserWithHash(authResult.getUser(), token.credentials(), cacheHasher); - // notify any forestalled request listeners; they will not reach to the - // authentication request and instead will use this hash for comparison - listenableCacheEntry.onResponse(userWithHash); - } else { - // notify any forestalled request listeners; they will retry the request - listenableCacheEntry.onResponse(null); + if (authResult.isAuthenticated() == false || authResult.getUser().enabled() == false) { + // a new request should trigger a new authentication + cache.invalidate(token.principal(), listenableCacheEntry); } - // notify the listener of the inflight authentication request; this request is not retried + // notify any forestalled request listeners; they will not reach to the + // authentication request and instead will use this result if they contain + // the same credentials + listenableCacheEntry.onResponse(new CachedResult(authResult, cacheHasher, authResult.getUser(), token.credentials())); listener.onResponse(authResult); }, e -> { - // notify any staved off listeners; they will retry the request + cache.invalidate(token.principal(), listenableCacheEntry); + // notify any staved off listeners; they will propagate this error listenableCacheEntry.onFailure(e); - // notify the listener of the inflight authentication request; this request is not retried + // notify the listener of the inflight authentication request listener.onFailure(e); })); } @@ -225,25 +229,21 @@ private void lookupWithCache(String username, ActionListener listener) { assert cache != null; try { final AtomicBoolean lookupInCache = new AtomicBoolean(true); - final ListenableFuture listenableCacheEntry = cache.computeIfAbsent(username, key -> { + final ListenableFuture listenableCacheEntry = cache.computeIfAbsent(username, key -> { lookupInCache.set(false); return new ListenableFuture<>(); }); if (false == lookupInCache.get()) { // attempt lookup against the user directory doLookupUser(username, ActionListener.wrap(user -> { - if (user != null) { - // user found - final UserWithHash userWithHash = new UserWithHash(user, null, null); - // notify forestalled request listeners - listenableCacheEntry.onResponse(userWithHash); - } else { + final CachedResult result = new CachedResult(AuthenticationResult.notHandled(), cacheHasher, user, null); + if (user == null) { // user not found, invalidate cache so that subsequent requests are forwarded to // the user directory cache.invalidate(username, listenableCacheEntry); - // notify forestalled request listeners - listenableCacheEntry.onResponse(null); } + // notify forestalled request listeners + listenableCacheEntry.onResponse(result); }, e -> { // the next request should be forwarded, not halted by a failed lookup attempt cache.invalidate(username, listenableCacheEntry); @@ -251,9 +251,9 @@ private void lookupWithCache(String username, ActionListener listener) { listenableCacheEntry.onFailure(e); })); } - listenableCacheEntry.addListener(ActionListener.wrap(userWithHash -> { - if (userWithHash != null) { - listener.onResponse(userWithHash.user); + listenableCacheEntry.addListener(ActionListener.wrap(cachedResult -> { + if (cachedResult.user != null) { + listener.onResponse(cachedResult.user); } else { listener.onResponse(null); } @@ -265,16 +265,21 @@ private void lookupWithCache(String username, ActionListener listener) { protected abstract void doLookupUser(String username, ActionListener listener); - private static class UserWithHash { - final User user; - final char[] hash; + private static class CachedResult { + private final AuthenticationResult authenticationResult; + private final User user; + private final char[] hash; - UserWithHash(User user, SecureString password, Hasher hasher) { - this.user = Objects.requireNonNull(user); + private CachedResult(AuthenticationResult result, Hasher hasher, @Nullable User user, @Nullable SecureString password) { + this.authenticationResult = Objects.requireNonNull(result); + if (authenticationResult.isAuthenticated() && user == null) { + throw new IllegalArgumentException("authentication cannot be successful with a null user"); + } + this.user = user; this.hash = password == null ? null : hasher.hash(password); } - boolean verify(SecureString password) { + private boolean verify(SecureString password) { return hash != null && Hasher.verifyHash(password, hash); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java index c83bd16ca95e1..b5d5db2166c1f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.transport.nio; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.nio.FlushOperation; import org.elasticsearch.nio.InboundChannelBuffer; @@ -16,6 +17,7 @@ import javax.net.ssl.SSLEngine; import java.io.IOException; +import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Predicate; @@ -28,7 +30,11 @@ */ public final class SSLChannelContext extends SocketChannelContext { + private static final long CLOSE_TIMEOUT_NANOS = new TimeValue(10, TimeUnit.SECONDS).nanos(); + private static final Runnable DEFAULT_TIMEOUT_CANCELLER = () -> {}; + private final SSLDriver sslDriver; + private Runnable closeTimeoutCanceller = DEFAULT_TIMEOUT_CANCELLER; SSLChannelContext(NioSocketChannel channel, NioSelector selector, Consumer exceptionHandler, SSLDriver sslDriver, ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer) { @@ -53,6 +59,8 @@ public void queueWriteOperation(WriteOperation writeOperation) { getSelector().assertOnSelectorThread(); if (writeOperation instanceof CloseNotifyOperation) { sslDriver.initiateClose(); + long relativeNanos = CLOSE_TIMEOUT_NANOS + System.nanoTime(); + closeTimeoutCanceller = getSelector().getTaskScheduler().scheduleAtRelativeTime(this::channelCloseTimeout, relativeNanos); } else { super.queueWriteOperation(writeOperation); } @@ -161,6 +169,7 @@ public void closeChannel() { public void closeFromSelector() throws IOException { getSelector().assertOnSelectorThread(); if (channel.isOpen()) { + closeTimeoutCanceller.run(); IOUtils.close(super::closeFromSelector, sslDriver::close); } } @@ -169,6 +178,12 @@ public SSLEngine getSSLEngine() { return sslDriver.getSSLEngine(); } + private void channelCloseTimeout() { + closeTimeoutCanceller = DEFAULT_TIMEOUT_CANCELLER; + setCloseNow(); + getSelector().queueChannelClose(channel); + } + private static class CloseNotifyOperation implements WriteOperation { private static final BiConsumer LISTENER = (v, t) -> {}; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/PermissionPrecedenceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/PermissionPrecedenceTests.java index 24ef13c0b07f7..c07491dc86314 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/PermissionPrecedenceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/PermissionPrecedenceTests.java @@ -91,7 +91,7 @@ public void testDifferentCombinationsOfIndices() throws Exception { .filterWithHeader(Collections.singletonMap(UsernamePasswordToken.BASIC_AUTH_HEADER, basicAuthHeaderValue(transportClientUsername(), transportClientPassword()))) .admin().indices().preparePutTemplate("template1") - .setTemplate("test_*") + .setPatterns(Collections.singletonList("test_*")) .get(); assertAcked(putResponse); @@ -105,7 +105,7 @@ public void testDifferentCombinationsOfIndices() throws Exception { Map auth = Collections.singletonMap(UsernamePasswordToken.BASIC_AUTH_HEADER, basicAuthHeaderValue("user", transportClientPassword())); assertThrowsAuthorizationException(client.filterWithHeader(auth).admin().indices().preparePutTemplate("template1") - .setTemplate("test_*")::get, PutIndexTemplateAction.NAME, "user"); + .setPatterns(Collections.singletonList("test_*"))::get, PutIndexTemplateAction.NAME, "user"); Map headers = Collections.singletonMap(UsernamePasswordToken.BASIC_AUTH_HEADER, basicAuthHeaderValue("user", new SecureString("test123"))); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 5a4c8f3bde824..66485f0505c5f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -310,32 +310,26 @@ public void testInvalidateCorrectTokensFromLogoutRequest() throws Exception { assertThat(((TermQueryBuilder) filter1.get(1)).fieldName(), equalTo("refresh_token.token")); assertThat(((TermQueryBuilder) filter1.get(1)).value(), equalTo(tokenToInvalidate1.v2())); - assertThat(bulkRequests.size(), equalTo(6)); // 4 updates (refresh-token + access-token) plus 2 indexes (bwc-invalidate * 2) + assertThat(bulkRequests.size(), equalTo(4)); // 4 updates (refresh-token + access-token) // Invalidate refresh token 1 assertThat(bulkRequests.get(0).requests().get(0), instanceOf(UpdateRequest.class)); assertThat(bulkRequests.get(0).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); UpdateRequest updateRequest1 = (UpdateRequest) bulkRequests.get(0).requests().get(0); assertThat(updateRequest1.toString().contains("refresh_token"), equalTo(true)); - // BWC incalidate access token 1 - assertThat(bulkRequests.get(1).requests().get(0), instanceOf(IndexRequest.class)); - assertThat(bulkRequests.get(1).requests().get(0).id(), equalTo("invalidated-token_" + tokenToInvalidate1.v1().getId())); // Invalidate access token 1 - assertThat(bulkRequests.get(2).requests().get(0), instanceOf(UpdateRequest.class)); - assertThat(bulkRequests.get(2).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); - UpdateRequest updateRequest2 = (UpdateRequest) bulkRequests.get(2).requests().get(0); + assertThat(bulkRequests.get(1).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(1).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); + UpdateRequest updateRequest2 = (UpdateRequest) bulkRequests.get(1).requests().get(0); assertThat(updateRequest2.toString().contains("access_token"), equalTo(true)); // Invalidate refresh token 2 - assertThat(bulkRequests.get(3).requests().get(0), instanceOf(UpdateRequest.class)); - assertThat(bulkRequests.get(3).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); - UpdateRequest updateRequest3 = (UpdateRequest) bulkRequests.get(3).requests().get(0); + assertThat(bulkRequests.get(2).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(2).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); + UpdateRequest updateRequest3 = (UpdateRequest) bulkRequests.get(2).requests().get(0); assertThat(updateRequest3.toString().contains("refresh_token"), equalTo(true)); - // BWC incalidate access token 2 - assertThat(bulkRequests.get(4).requests().get(0), instanceOf(IndexRequest.class)); - assertThat(bulkRequests.get(4).requests().get(0).id(), equalTo("invalidated-token_" + tokenToInvalidate2.v1().getId())); // Invalidate access token 2 - assertThat(bulkRequests.get(5).requests().get(0), instanceOf(UpdateRequest.class)); - assertThat(bulkRequests.get(5).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); - UpdateRequest updateRequest4 = (UpdateRequest) bulkRequests.get(5).requests().get(0); + assertThat(bulkRequests.get(3).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(3).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); + UpdateRequest updateRequest4 = (UpdateRequest) bulkRequests.get(3).requests().get(0); assertThat(updateRequest4.toString().contains("access_token"), equalTo(true)); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 7dec105e1ee80..085df140f3ecb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -241,7 +241,7 @@ public void testLogoutInvalidatesToken() throws Exception { final PlainActionFuture> future = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, future, tokenMetaData, true); final UserToken userToken = future.actionGet().v1(); - mockGetTokenFromId(userToken, client); + mockGetTokenFromId(userToken, false, client); final String tokenString = tokenService.getUserTokenString(userToken); final SamlLogoutRequest request = new SamlLogoutRequest(); @@ -256,17 +256,13 @@ public void testLogoutInvalidatesToken() throws Exception { assertThat(indexRequest1, notNullValue()); assertThat(indexRequest1.id(), startsWith("token")); - assertThat(bulkRequests.size(), equalTo(2)); - final BulkRequest bulkRequest1 = bulkRequests.get(0); - assertThat(bulkRequest1.requests().size(), equalTo(1)); - assertThat(bulkRequest1.requests().get(0), instanceOf(IndexRequest.class)); - assertThat(bulkRequest1.requests().get(0).id(), startsWith("invalidated-token_")); + assertThat(bulkRequests.size(), equalTo(1)); - final BulkRequest bulkRequest2 = bulkRequests.get(1); - assertThat(bulkRequest2.requests().size(), equalTo(1)); - assertThat(bulkRequest2.requests().get(0), instanceOf(UpdateRequest.class)); - assertThat(bulkRequest2.requests().get(0).id(), startsWith("token_")); - assertThat(bulkRequest2.requests().get(0).toString(), containsString("\"access_token\":{\"invalidated\":true")); + final BulkRequest bulkRequest = bulkRequests.get(0); + assertThat(bulkRequest.requests().size(), equalTo(1)); + assertThat(bulkRequest.requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequest.requests().get(0).id(), startsWith("token_")); + assertThat(bulkRequest.requests().get(0).toString(), containsString("\"access_token\":{\"invalidated\":true")); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 5e1fe282bdcf6..397c68c1b72ed 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -12,12 +12,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequestBuilder; -import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetAction; -import org.elasticsearch.action.get.MultiGetItemResponse; -import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequestBuilder; -import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -26,9 +22,9 @@ import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -88,7 +84,6 @@ import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError; -import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockCheckTokenInvalidationFromId; import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockGetTokenFromId; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -109,6 +104,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; @@ -139,6 +135,7 @@ public class AuthenticationServiceTests extends ESTestCase { @SuppressForbidden(reason = "Allow accessing localhost") public void init() throws Exception { token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); message = new InternalMessage(); remoteAddress = new InetSocketAddress(InetAddress.getLocalHost(), 100); message.remoteAddress(new TransportAddress(remoteAddress)); @@ -233,7 +230,7 @@ public void testTokenMissing() throws Exception { }); ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> future.actionGet()); - assertThat(e.getMessage(), containsString("missing authentication token")); + assertThat(e.getMessage(), containsString("missing authentication credentials")); verify(auditTrail).anonymousAccessDenied(reqId, "_action", message); verifyNoMoreInteractions(auditTrail); } @@ -264,6 +261,134 @@ public void testAuthenticateBothSupportSecondSucceeds() throws Exception { verify(auditTrail).authenticationFailed(reqId, firstRealm.name(), token, "_action", message); } + public void testAuthenticateSmartRealmOrdering() { + User user = new User("_username", "r1"); + when(firstRealm.supports(token)).thenReturn(true); + mockAuthenticate(firstRealm, token, null); + when(secondRealm.supports(token)).thenReturn(true); + mockAuthenticate(secondRealm, token, user); + when(secondRealm.token(threadContext)).thenReturn(token); + final String reqId = AuditUtil.getOrGenerateRequestId(threadContext); + + final AtomicBoolean completed = new AtomicBoolean(false); + service.authenticate("_action", message, (User)null, ActionListener.wrap(result -> { + assertThat(result, notNullValue()); + assertThat(result.getUser(), is(user)); + assertThat(result.getLookedUpBy(), is(nullValue())); + assertThat(result.getAuthenticatedBy(), is(notNullValue())); // TODO implement equals + assertThreadContextContainsAuthentication(result); + setCompletedToTrue(completed); + }, this::logAndFail)); + assertTrue(completed.get()); + + completed.set(false); + service.authenticate("_action", message, (User)null, ActionListener.wrap(result -> { + assertThat(result, notNullValue()); + assertThat(result.getUser(), is(user)); + assertThat(result.getLookedUpBy(), is(nullValue())); + assertThat(result.getAuthenticatedBy(), is(notNullValue())); // TODO implement equals + assertThreadContextContainsAuthentication(result); + setCompletedToTrue(completed); + }, this::logAndFail)); + verify(auditTrail).authenticationFailed(reqId, firstRealm.name(), token, "_action", message); + verify(auditTrail, times(2)).authenticationSuccess(reqId, secondRealm.name(), user, "_action", message); + verify(firstRealm, times(2)).name(); // used above one time + verify(secondRealm, times(3)).name(); // used above one time + verify(secondRealm, times(2)).type(); // used to create realm ref + verify(firstRealm, times(2)).token(threadContext); + verify(secondRealm, times(2)).token(threadContext); + verify(firstRealm).supports(token); + verify(secondRealm, times(2)).supports(token); + verify(firstRealm).authenticate(eq(token), any(ActionListener.class)); + verify(secondRealm, times(2)).authenticate(eq(token), any(ActionListener.class)); + verifyNoMoreInteractions(auditTrail, firstRealm, secondRealm); + } + + public void testCacheClearOnSecurityIndexChange() { + long expectedInvalidation = 0L; + assertEquals(expectedInvalidation, service.getNumInvalidation()); + + // existing to no longer present + SecurityIndexManager.State previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + SecurityIndexManager.State currentState = dummyState(null); + service.onSecurityIndexStateChange(previousState, currentState); + assertEquals(++expectedInvalidation, service.getNumInvalidation()); + + // doesn't exist to exists + previousState = dummyState(null); + currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + service.onSecurityIndexStateChange(previousState, currentState); + assertEquals(++expectedInvalidation, service.getNumInvalidation()); + + // green or yellow to red + previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentState = dummyState(ClusterHealthStatus.RED); + service.onSecurityIndexStateChange(previousState, currentState); + assertEquals(expectedInvalidation, service.getNumInvalidation()); + + // red to non red + previousState = dummyState(ClusterHealthStatus.RED); + currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + service.onSecurityIndexStateChange(previousState, currentState); + assertEquals(++expectedInvalidation, service.getNumInvalidation()); + + // green to yellow or yellow to green + previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentState = dummyState(previousState.indexStatus == ClusterHealthStatus.GREEN ? + ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); + service.onSecurityIndexStateChange(previousState, currentState); + assertEquals(expectedInvalidation, service.getNumInvalidation()); + } + + public void testAuthenticateSmartRealmOrderingDisabled() { + final Settings settings = Settings.builder() + .put(AuthenticationService.SUCCESS_AUTH_CACHE_ENABLED.getKey(), false) + .build(); + service = new AuthenticationService(settings, realms, auditTrail, + new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(Settings.EMPTY), + tokenService); + User user = new User("_username", "r1"); + when(firstRealm.supports(token)).thenReturn(true); + mockAuthenticate(firstRealm, token, null); + when(secondRealm.supports(token)).thenReturn(true); + mockAuthenticate(secondRealm, token, user); + when(secondRealm.token(threadContext)).thenReturn(token); + final String reqId = AuditUtil.getOrGenerateRequestId(threadContext); + + final AtomicBoolean completed = new AtomicBoolean(false); + service.authenticate("_action", message, (User)null, ActionListener.wrap(result -> { + assertThat(result, notNullValue()); + assertThat(result.getUser(), is(user)); + assertThat(result.getLookedUpBy(), is(nullValue())); + assertThat(result.getAuthenticatedBy(), is(notNullValue())); // TODO implement equals + assertThreadContextContainsAuthentication(result); + setCompletedToTrue(completed); + }, this::logAndFail)); + assertTrue(completed.get()); + + completed.set(false); + service.authenticate("_action", message, (User)null, ActionListener.wrap(result -> { + assertThat(result, notNullValue()); + assertThat(result.getUser(), is(user)); + assertThat(result.getLookedUpBy(), is(nullValue())); + assertThat(result.getAuthenticatedBy(), is(notNullValue())); // TODO implement equals + assertThreadContextContainsAuthentication(result); + setCompletedToTrue(completed); + }, this::logAndFail)); + verify(auditTrail, times(2)).authenticationFailed(reqId, firstRealm.name(), token, "_action", message); + verify(auditTrail, times(2)).authenticationSuccess(reqId, secondRealm.name(), user, "_action", message); + verify(firstRealm, times(3)).name(); // used above one time + verify(secondRealm, times(3)).name(); // used above one time + verify(secondRealm, times(2)).type(); // used to create realm ref + verify(firstRealm, times(2)).token(threadContext); + verify(secondRealm, times(2)).token(threadContext); + verify(firstRealm, times(2)).supports(token); + verify(secondRealm, times(2)).supports(token); + verify(firstRealm, times(2)).authenticate(eq(token), any(ActionListener.class)); + verify(secondRealm, times(2)).authenticate(eq(token), any(ActionListener.class)); + verifyNoMoreInteractions(auditTrail, firstRealm, secondRealm); + } + public void testAuthenticateFirstNotSupportingSecondSucceeds() throws Exception { User user = new User("_username", "r1"); when(firstRealm.supports(token)).thenReturn(false); @@ -620,6 +745,7 @@ public void testRealmTokenThrowingExceptionRest() throws Exception { public void testRealmSupportsMethodThrowingException() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenThrow(authenticationError("realm doesn't like supports")); final String reqId = AuditUtil.getOrGenerateRequestId(threadContext); @@ -634,6 +760,7 @@ public void testRealmSupportsMethodThrowingException() throws Exception { public void testRealmSupportsMethodThrowingExceptionRest() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenThrow(authenticationError("realm doesn't like supports")); try { @@ -649,6 +776,7 @@ public void testRealmSupportsMethodThrowingExceptionRest() throws Exception { public void testRealmAuthenticateTerminatingAuthenticationProcess() throws Exception { final String reqId = AuditUtil.getOrGenerateRequestId(threadContext); final AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); final boolean terminateWithNoException = rarely(); @@ -690,6 +818,7 @@ public void testRealmAuthenticateTerminatingAuthenticationProcess() throws Excep public void testRealmAuthenticateThrowingException() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); doThrow(authenticationError("realm doesn't like authenticate")) @@ -706,6 +835,7 @@ public void testRealmAuthenticateThrowingException() throws Exception { public void testRealmAuthenticateThrowingExceptionRest() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); doThrow(authenticationError("realm doesn't like authenticate")) @@ -722,6 +852,7 @@ public void testRealmAuthenticateThrowingExceptionRest() throws Exception { public void testRealmLookupThrowingException() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); @@ -742,6 +873,7 @@ public void testRealmLookupThrowingException() throws Exception { public void testRealmLookupThrowingExceptionRest() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); @@ -761,6 +893,7 @@ public void testRealmLookupThrowingExceptionRest() throws Exception { public void testRunAsLookupSameRealm() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); @@ -809,6 +942,7 @@ public void testRunAsLookupSameRealm() throws Exception { @SuppressWarnings("unchecked") public void testRunAsLookupDifferentRealm() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); @@ -845,6 +979,7 @@ public void testRunAsLookupDifferentRealm() throws Exception { public void testRunAsWithEmptyRunAsUsernameRest() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); User user = new User("lookup user", new String[]{"user"}); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, ""); when(secondRealm.token(threadContext)).thenReturn(token); @@ -863,6 +998,7 @@ public void testRunAsWithEmptyRunAsUsernameRest() throws Exception { public void testRunAsWithEmptyRunAsUsername() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); User user = new User("lookup user", new String[]{"user"}); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, ""); final String reqId = AuditUtil.getOrGenerateRequestId(threadContext); @@ -882,6 +1018,7 @@ public void testRunAsWithEmptyRunAsUsername() throws Exception { @SuppressWarnings("unchecked") public void testAuthenticateTransportDisabledRunAsUser() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); final String reqId = AuditUtil.getOrGenerateRequestId(threadContext); when(secondRealm.token(threadContext)).thenReturn(token); @@ -903,6 +1040,7 @@ public void testAuthenticateTransportDisabledRunAsUser() throws Exception { public void testAuthenticateRestDisabledRunAsUser() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); + when(token.principal()).thenReturn(randomAlphaOfLength(5)); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); @@ -934,8 +1072,7 @@ public void testAuthenticateWithToken() throws Exception { } String token = tokenService.getUserTokenString(tokenFuture.get().v1()); when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); - mockGetTokenFromId(tokenFuture.get().v1(), client); - mockCheckTokenInvalidationFromId(tokenFuture.get().v1(), client); + mockGetTokenFromId(tokenFuture.get().v1(), false, client); when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.indexExists()).thenReturn(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { @@ -1017,32 +1154,7 @@ public void testExpiredToken() throws Exception { tokenService.createUserToken(expected, originatingAuth, tokenFuture, Collections.emptyMap(), true); } String token = tokenService.getUserTokenString(tokenFuture.get().v1()); - mockGetTokenFromId(tokenFuture.get().v1(), client); - when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); - doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - MultiGetResponse response = mock(MultiGetResponse.class); - MultiGetItemResponse[] responses = new MultiGetItemResponse[2]; - when(response.getResponses()).thenReturn(responses); - - final boolean newExpired = randomBoolean(); - GetResponse oldGetResponse = mock(GetResponse.class); - when(oldGetResponse.isExists()).thenReturn(newExpired == false); - responses[0] = new MultiGetItemResponse(oldGetResponse, null); - - GetResponse getResponse = mock(GetResponse.class); - responses[1] = new MultiGetItemResponse(getResponse, null); - when(getResponse.isExists()).thenReturn(newExpired); - if (newExpired) { - Map source = MapBuilder.newMapBuilder() - .put("access_token", Collections.singletonMap("invalidated", true)) - .immutableMap(); - when(getResponse.getSource()).thenReturn(source); - } - listener.onResponse(response); - return Void.TYPE; - }).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class)); - + mockGetTokenFromId(tokenFuture.get().v1(), true, client); doAnswer(invocationOnMock -> { ((Runnable) invocationOnMock.getArguments()[1]).run(); return null; @@ -1149,4 +1261,8 @@ private void logAndFail(Exception e) { private void setCompletedToTrue(AtomicBoolean completed) { assertTrue(completed.compareAndSet(false, true)); } + + private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { + return new SecurityIndexManager.State(true, true, true, true, null, indexStatus); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index 968c17f556b9b..61ea4ef967224 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -54,7 +54,7 @@ public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) // crank up the deletion interval and set timeout for delete requests - .put(TokenService.DELETE_INTERVAL.getKey(), TimeValue.timeValueSeconds(1L)) + .put(TokenService.DELETE_INTERVAL.getKey(), TimeValue.timeValueMillis(200L)) .put(TokenService.DELETE_TIMEOUT.getKey(), TimeValue.timeValueSeconds(5L)) .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), true) .build(); @@ -151,7 +151,7 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { assertBusy(() -> { SearchResponse searchResponse = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setSource(SearchSourceBuilder.searchSource() - .query(QueryBuilders.termQuery("doc_type", TokenService.INVALIDATED_TOKEN_DOC_TYPE))) + .query(QueryBuilders.termQuery("doc_type", "token"))) .setSize(1) .setTerminateAfter(1) .get(); @@ -159,11 +159,11 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { docId.set(searchResponse.getHits().getAt(0).getId()); }); - // hack doc to modify the time to the day before - Instant dayBefore = created.minus(1L, ChronoUnit.DAYS); - assertTrue(Instant.now().isAfter(dayBefore)); + // hack doc to modify the creation time to the day before + Instant yesterday = created.minus(36L, ChronoUnit.HOURS); + assertTrue(Instant.now().isAfter(yesterday)); client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, "doc", docId.get()) - .setDoc("expiration_time", dayBefore.toEpochMilli()) + .setDoc("creation_time", yesterday.toEpochMilli()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -183,8 +183,7 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { client.admin().indices().prepareRefresh(SecurityIndexManager.SECURITY_INDEX_NAME).get(); SearchResponse searchResponse = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setSource(SearchSourceBuilder.searchSource() - .query(QueryBuilders.termQuery("doc_type", TokenService.INVALIDATED_TOKEN_DOC_TYPE))) - .setSize(0) + .query(QueryBuilders.termQuery("doc_type", "token"))) .setTerminateAfter(1) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 286f07667eca6..47770288b1b66 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -12,11 +12,6 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.get.MultiGetAction; -import org.elasticsearch.action.get.MultiGetItemResponse; -import org.elasticsearch.action.get.MultiGetRequest; -import org.elasticsearch.action.get.MultiGetRequestBuilder; -import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -27,7 +22,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -104,23 +98,6 @@ public void setupClient() { .setId((String) invocationOnMock.getArguments()[2]); return builder; }).when(client).prepareGet(anyString(), anyString(), anyString()); - when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); - doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - MultiGetResponse response = mock(MultiGetResponse.class); - MultiGetItemResponse[] responses = new MultiGetItemResponse[2]; - when(response.getResponses()).thenReturn(responses); - - GetResponse oldGetResponse = mock(GetResponse.class); - when(oldGetResponse.isExists()).thenReturn(false); - responses[0] = new MultiGetItemResponse(oldGetResponse, null); - - GetResponse getResponse = mock(GetResponse.class); - responses[1] = new MultiGetItemResponse(getResponse, null); - when(getResponse.isExists()).thenReturn(false); - listener.onResponse(response); - return Void.TYPE; - }).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class)); when(client.prepareIndex(any(String.class), any(String.class), any(String.class))) .thenReturn(new IndexRequestBuilder(client, IndexAction.INSTANCE)); when(client.prepareUpdate(any(String.class), any(String.class), any(String.class))) @@ -168,8 +145,7 @@ public void testAttachAndGetToken() throws Exception { tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); assertNotNull(token); - mockGetTokenFromId(token); - mockCheckTokenInvalidationFromId(token); + mockGetTokenFromId(token, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + tokenService.getUserTokenString(token)); @@ -215,8 +191,7 @@ public void testRotateKey() throws Exception { tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); assertNotNull(token); - mockGetTokenFromId(token); - mockCheckTokenInvalidationFromId(token); + mockGetTokenFromId(token, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -244,7 +219,7 @@ public void testRotateKey() throws Exception { requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(newToken)); - mockGetTokenFromId(newToken); + mockGetTokenFromId(newToken, false); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -275,8 +250,7 @@ public void testKeyExchange() throws Exception { tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); assertNotNull(token); - mockGetTokenFromId(token); - mockCheckTokenInvalidationFromId(token); + mockGetTokenFromId(token, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -306,8 +280,7 @@ public void testPruneKeys() throws Exception { tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); assertNotNull(token); - mockGetTokenFromId(token); - mockCheckTokenInvalidationFromId(token); + mockGetTokenFromId(token, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -351,7 +324,7 @@ public void testPruneKeys() throws Exception { requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(newToken)); - mockGetTokenFromId(newToken); + mockGetTokenFromId(newToken, false); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); tokenService.getAndValidateToken(requestContext, future); @@ -368,8 +341,7 @@ public void testPassphraseWorks() throws Exception { tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); assertNotNull(token); - mockGetTokenFromId(token); - mockCheckTokenInvalidationFromId(token); + mockGetTokenFromId(token, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -413,33 +385,10 @@ public void testInvalidatedToken() throws Exception { tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); assertNotNull(token); - doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - MultiGetResponse response = mock(MultiGetResponse.class); - MultiGetItemResponse[] responses = new MultiGetItemResponse[2]; - when(response.getResponses()).thenReturn(responses); - - final boolean newExpired = randomBoolean(); - GetResponse oldGetResponse = mock(GetResponse.class); - when(oldGetResponse.isExists()).thenReturn(newExpired == false); - responses[0] = new MultiGetItemResponse(oldGetResponse, null); - - GetResponse getResponse = mock(GetResponse.class); - responses[1] = new MultiGetItemResponse(getResponse, null); - when(getResponse.isExists()).thenReturn(newExpired); - if (newExpired) { - Map source = MapBuilder.newMapBuilder() - .put("access_token", Collections.singletonMap("invalidated", true)) - .immutableMap(); - when(getResponse.getSource()).thenReturn(source); - } - listener.onResponse(response); - return Void.TYPE; - }).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class)); + mockGetTokenFromId(token, true); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); - mockGetTokenFromId(token); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -466,8 +415,7 @@ public void testTokenExpiry() throws Exception { PlainActionFuture> tokenFuture = new PlainActionFuture<>(); tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); - mockGetTokenFromId(token); - mockCheckTokenInvalidationFromId(token); + mockGetTokenFromId(token, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -577,7 +525,7 @@ public void testIndexNotAvailable() throws Exception { tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); assertNotNull(token); - mockGetTokenFromId(token); + //mockGetTokenFromId(token, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -586,7 +534,7 @@ public void testIndexNotAvailable() throws Exception { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; listener.onFailure(new NoShardAvailableActionException(new ShardId(new Index("foo", "uuid"), 0), "shard oh shard")); return Void.TYPE; - }).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class)); + }).when(client).get(any(GetRequest.class), any(ActionListener.class)); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -606,19 +554,19 @@ public void testIndexNotAvailable() throws Exception { when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.indexExists()).thenReturn(true); - mockCheckTokenInvalidationFromId(token); + mockGetTokenFromId(token, false); future = new PlainActionFuture<>(); tokenService.getAndValidateToken(requestContext, future); assertEquals(token.getAuthentication(), future.get().getAuthentication()); } } - public void testGetAuthenticationWorksWithExpiredToken() throws Exception { + public void testGetAuthenticationWorksWithExpiredUserToken() throws Exception { TokenService tokenService = new TokenService(tokenServiceEnabledSettings, Clock.systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); UserToken expired = new UserToken(authentication, Instant.now().minus(3L, ChronoUnit.DAYS)); - mockGetTokenFromId(expired); + mockGetTokenFromId(expired, false); String userTokenString = tokenService.getUserTokenString(expired); PlainActionFuture>> authFuture = new PlainActionFuture<>(); tokenService.getAuthenticationAndMetaData(userTokenString, authFuture); @@ -626,62 +574,30 @@ public void testGetAuthenticationWorksWithExpiredToken() throws Exception { assertEquals(authentication, retrievedAuth); } - private void mockGetTokenFromId(UserToken userToken) { - mockGetTokenFromId(userToken, client); - } - - public static void mockGetTokenFromId(UserToken userToken, Client client) { - doAnswer(invocationOnMock -> { - GetRequest getRequest = (GetRequest) invocationOnMock.getArguments()[0]; - ActionListener getResponseListener = (ActionListener) invocationOnMock.getArguments()[1]; - GetResponse getResponse = mock(GetResponse.class); - if (userToken.getId().equals(getRequest.id().replace("token_", ""))) { - when(getResponse.isExists()).thenReturn(true); - Map sourceMap = new HashMap<>(); - try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { - userToken.toXContent(builder, ToXContent.EMPTY_PARAMS); - sourceMap.put("access_token", - Collections.singletonMap("user_token", - XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false))); - } - when(getResponse.getSource()).thenReturn(sourceMap); - } - getResponseListener.onResponse(getResponse); - return Void.TYPE; - }).when(client).get(any(GetRequest.class), any(ActionListener.class)); - } - - private void mockCheckTokenInvalidationFromId(UserToken userToken) { - mockCheckTokenInvalidationFromId(userToken, client); + private void mockGetTokenFromId(UserToken userToken, boolean isExpired) { + mockGetTokenFromId(userToken, isExpired, client); } - public static void mockCheckTokenInvalidationFromId(UserToken userToken, Client client) { + public static void mockGetTokenFromId(UserToken userToken, boolean isExpired, Client client) { doAnswer(invocationOnMock -> { - MultiGetRequest request = (MultiGetRequest) invocationOnMock.getArguments()[0]; - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - MultiGetResponse response = mock(MultiGetResponse.class); - MultiGetItemResponse[] responses = new MultiGetItemResponse[2]; - when(response.getResponses()).thenReturn(responses); - GetResponse legacyResponse = mock(GetResponse.class); - responses[0] = new MultiGetItemResponse(legacyResponse, null); - when(legacyResponse.isExists()).thenReturn(false); - GetResponse tokenResponse = mock(GetResponse.class); - if (userToken.getId().equals(request.getItems().get(1).id().replace("token_", ""))) { - when(tokenResponse.isExists()).thenReturn(true); + GetRequest request = (GetRequest) invocationOnMock.getArguments()[0]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + GetResponse response = mock(GetResponse.class); + if (userToken.getId().equals(request.id().replace("token_", ""))) { + when(response.isExists()).thenReturn(true); Map sourceMap = new HashMap<>(); try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { userToken.toXContent(builder, ToXContent.EMPTY_PARAMS); Map accessTokenMap = new HashMap<>(); accessTokenMap.put("user_token", XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false)); - accessTokenMap.put("invalidated", false); + accessTokenMap.put("invalidated", isExpired); sourceMap.put("access_token", accessTokenMap); } - when(tokenResponse.getSource()).thenReturn(sourceMap); + when(response.getSource()).thenReturn(sourceMap); } - responses[1] = new MultiGetItemResponse(tokenResponse, null); listener.onResponse(response); return Void.TYPE; - }).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class)); + }).when(client).get(any(GetRequest.class), any(ActionListener.class)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java index c0c0a08a59a56..106eaa8932629 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java @@ -13,19 +13,25 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings; +import org.elasticsearch.xpack.core.ssl.SSLConfigurationReloader; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; import org.elasticsearch.xpack.security.authc.ldap.support.LdapTestCase; import org.junit.After; import org.junit.Before; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; import java.util.List; import java.util.concurrent.ExecutionException; @@ -38,10 +44,23 @@ public class LdapSessionFactoryTests extends LdapTestCase { private Settings globalSettings; private SSLService sslService; private ThreadPool threadPool; + private Path ldapCaPath; + + @Override + protected boolean openLdapsPort() { + // Support LDAPS, because it's used in some test + return true; + } @Before public void setup() throws Exception { - globalSettings = Settings.builder().put("path.home", createTempDir()).build(); + final Path origCa = getDataPath("/org/elasticsearch/xpack/security/authc/ldap/support/ldap-ca.crt"); + ldapCaPath = createTempFile(); + Files.copy(origCa, ldapCaPath, StandardCopyOption.REPLACE_EXISTING); + globalSettings = Settings.builder() + .put("path.home", createTempDir()) + .putList(RealmSettings.realmSslPrefix(REALM_IDENTIFIER) + "certificate_authorities", ldapCaPath.toString()) + .build(); sslService = new SSLService(globalSettings, TestEnvironment.newEnvironment(globalSettings)); threadPool = new TestThreadPool("LdapSessionFactoryTests thread pool"); } @@ -53,7 +72,8 @@ public void shutdown() throws InterruptedException { public void testBindWithReadTimeout() throws Exception { InMemoryDirectoryServer ldapServer = randomFrom(ldapServers); - String ldapUrl = new LDAPURL("ldap", "localhost", ldapServer.getListenPort(), null, null, null, null).toString(); + String protocol = randomFrom("ldap", "ldaps"); + String ldapUrl = new LDAPURL(protocol, "localhost", ldapServer.getListenPort(protocol), null, null, null, null).toString(); String groupSearchBase = "o=sevenSeas"; String userTemplates = "cn={0},ou=people,o=sevenSeas"; @@ -203,4 +223,53 @@ public void testGroupLookupBase() throws Exception { assertThat(groups, contains("cn=HMS Lydia,ou=crews,ou=groups,o=sevenSeas")); } } + + /** + * This test connects to the in memory LDAP server over SSL using 2 different CA certificates. + * One certificate is valid, the other is not. + * The path to the certificate never changes, but the contents are copied in place. + * If the realm's CA path is monitored for changes and the underlying SSL context is reloaded, then we will get two different outcomes + * (one failure, one success) depending on which file content is in place. + */ + public void testSslTrustIsReloaded() throws Exception { + InMemoryDirectoryServer ldapServer = randomFrom(ldapServers); + String ldapUrl = new LDAPURL("ldaps", "localhost", ldapServer.getListenPort("ldaps"), null, null, null, null).toString(); + String groupSearchBase = "o=sevenSeas"; + String userTemplates = "cn={0},ou=people,o=sevenSeas"; + + Settings settings = Settings.builder() + .put(globalSettings) + .put(buildLdapSettings(ldapUrl, userTemplates, groupSearchBase, LdapSearchScope.SUB_TREE)) + .build(); + + final Path realCa = getDataPath("/org/elasticsearch/xpack/security/authc/ldap/support/ldap-ca.crt"); + final Path fakeCa = getDataPath("/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt"); + + final Environment environment = TestEnvironment.newEnvironment(settings); + RealmConfig config = new RealmConfig(REALM_IDENTIFIER, settings, + environment, new ThreadContext(settings)); + LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool); + String user = "Horatio Hornblower"; + SecureString userPass = new SecureString("pass"); + + final ResourceWatcherService resourceWatcher = new ResourceWatcherService(settings, threadPool); + new SSLConfigurationReloader(environment, sslService, resourceWatcher); + + Files.copy(fakeCa, ldapCaPath, StandardCopyOption.REPLACE_EXISTING); + resourceWatcher.notifyNow(ResourceWatcherService.Frequency.HIGH); + + UncategorizedExecutionException e = + expectThrows(UncategorizedExecutionException.class, () -> session(sessionFactory, user, userPass)); + assertThat(e.getCause(), instanceOf(ExecutionException.class)); + assertThat(e.getCause().getCause(), instanceOf(LDAPException.class)); + assertThat(e.getCause().getCause().getMessage(), containsString("SSLPeerUnverifiedException")); + + Files.copy(realCa, ldapCaPath, StandardCopyOption.REPLACE_EXISTING); + resourceWatcher.notifyNow(ResourceWatcherService.Frequency.HIGH); + + final LdapSession session = session(sessionFactory, user, userPass); + assertThat(session.userDn(), is("cn=Horatio Hornblower,ou=people,o=sevenSeas")); + + session.close(); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java index bba13e9ec2cac..2c0b2f7716650 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.security.authc.ldap.support; import com.unboundid.ldap.listener.InMemoryDirectoryServer; +import com.unboundid.ldap.listener.InMemoryDirectoryServerConfig; +import com.unboundid.ldap.listener.InMemoryListenerConfig; import com.unboundid.ldap.sdk.Attribute; import com.unboundid.ldap.sdk.LDAPConnection; import com.unboundid.ldap.sdk.LDAPConnectionPool; @@ -30,6 +32,7 @@ import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.support.DnRoleMapperSettings; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.core.ssl.VerificationMode; import org.elasticsearch.xpack.security.authc.support.DnRoleMapper; @@ -37,7 +40,14 @@ import org.junit.Before; import org.junit.BeforeClass; +import javax.net.ssl.KeyManager; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLServerSocketFactory; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.X509ExtendedKeyManager; import java.security.AccessController; +import java.security.KeyStore; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; @@ -64,7 +74,25 @@ public static void setNumberOfLdapServers() { public void startLdap() throws Exception { ldapServers = new InMemoryDirectoryServer[numberOfLdapServers]; for (int i = 0; i < numberOfLdapServers; i++) { - InMemoryDirectoryServer ldapServer = new InMemoryDirectoryServer("o=sevenSeas"); + InMemoryDirectoryServerConfig serverConfig = new InMemoryDirectoryServerConfig("o=sevenSeas"); + List listeners = new ArrayList<>(2); + listeners.add(InMemoryListenerConfig.createLDAPConfig("ldap")); + if (openLdapsPort()) { + final char[] ldapPassword = "ldap-password".toCharArray(); + final KeyStore ks = CertParsingUtils.getKeyStoreFromPEM( + getDataPath("/org/elasticsearch/xpack/security/authc/ldap/support/ldap-test-case.crt"), + getDataPath("/org/elasticsearch/xpack/security/authc/ldap/support/ldap-test-case.key"), + ldapPassword + ); + X509ExtendedKeyManager keyManager = CertParsingUtils.keyManager(ks, ldapPassword, KeyManagerFactory.getDefaultAlgorithm()); + final SSLContext context = SSLContext.getInstance("TLSv1.2"); + context.init(new KeyManager[] { keyManager }, null, null); + SSLServerSocketFactory serverSocketFactory = context.getServerSocketFactory(); + SSLSocketFactory clientSocketFactory = context.getSocketFactory(); + listeners.add(InMemoryListenerConfig.createLDAPSConfig("ldaps", null, 0, serverSocketFactory, clientSocketFactory)); + } + serverConfig.setListenerConfigs(listeners); + InMemoryDirectoryServer ldapServer = new InMemoryDirectoryServer(serverConfig); ldapServer.add("o=sevenSeas", new Attribute("dc", "UnboundID"), new Attribute("objectClass", "top", "domain", "extensibleObject")); ldapServer.importFromLDIF(false, @@ -78,6 +106,10 @@ public void startLdap() throws Exception { } } + protected boolean openLdapsPort() { + return false; + } + @After public void stopLdap() throws Exception { for (int i = 0; i < numberOfLdapServers; i++) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java index 5bce054b42f62..2fed720e23c09 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java @@ -58,13 +58,13 @@ public void setup() { } @After - public void stop() throws InterruptedException { + public void stop() { if (threadPool != null) { terminate(threadPool); } } - public void testCacheSettings() throws Exception { + public void testCacheSettings() { String cachingHashAlgo = Hasher.values()[randomIntBetween(0, Hasher.values().length - 1)].name().toLowerCase(Locale.ROOT); int maxUsers = randomIntBetween(10, 100); TimeValue ttl = TimeValue.timeValueMinutes(randomIntBetween(10, 20)); @@ -352,7 +352,7 @@ private void sleepUntil(long until) throws InterruptedException { } } - public void testAuthenticateContract() throws Exception { + public void testAuthenticateContract() { Realm realm = new FailingAuthenticationRealm(globalSettings, threadPool); PlainActionFuture future = new PlainActionFuture<>(); realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future); @@ -366,7 +366,7 @@ public void testAuthenticateContract() throws Exception { assertThat(e.getMessage(), containsString("whatever exception")); } - public void testLookupContract() throws Exception { + public void testLookupContract() { Realm realm = new FailingAuthenticationRealm(globalSettings, threadPool); PlainActionFuture future = new PlainActionFuture<>(); realm.lookupUser("user", future); @@ -380,7 +380,7 @@ public void testLookupContract() throws Exception { assertThat(e.getMessage(), containsString("lookup exception")); } - public void testReturnDifferentObjectFromCache() throws Exception { + public void testReturnDifferentObjectFromCache() { final AtomicReference userArg = new AtomicReference<>(); final AtomicReference result = new AtomicReference<>(); Realm realm = new AlwaysAuthenticateCachingRealm(globalSettings, threadPool) { @@ -473,6 +473,89 @@ protected void doLookupUser(String username, ActionListener listener) { assertEquals(1, authCounter.get()); } + public void testUnauthenticatedResultPropagatesWithSameCreds() throws Exception { + final String username = "username"; + final SecureString password = SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; + final AtomicInteger authCounter = new AtomicInteger(0); + final Hasher pwdHasher = Hasher.resolve(randomFrom("pbkdf2", "pbkdf2_1000", "bcrypt", "bcrypt9")); + final String passwordHash = new String(pwdHasher.hash(password)); + RealmConfig config = new RealmConfig(new RealmConfig.RealmIdentifier("caching", "test_realm"), globalSettings, + TestEnvironment.newEnvironment(globalSettings), new ThreadContext(Settings.EMPTY)); + + final int numberOfProcessors = Runtime.getRuntime().availableProcessors(); + final int numberOfThreads = scaledRandomIntBetween((numberOfProcessors + 1) / 2, numberOfProcessors * 3); + List threads = new ArrayList<>(numberOfThreads); + final SecureString credsToUse = new SecureString(randomAlphaOfLength(12).toCharArray()); + + // we use a bunch of different latches here, the first `latch` is used to ensure all threads have been started + // before they start to execute. The `authWaitLatch` is there to ensure we have all threads waiting on the + // listener before we auth otherwise we may run into a race condition where we auth and one of the threads is + // not waiting on auth yet. Finally, the completedLatch is used to signal that each thread received a response! + final CountDownLatch latch = new CountDownLatch(1 + numberOfThreads); + final CountDownLatch authWaitLatch = new CountDownLatch(numberOfThreads); + final CountDownLatch completedLatch = new CountDownLatch(numberOfThreads); + final CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm(config, threadPool) { + @Override + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { + authCounter.incrementAndGet(); + authWaitLatch.countDown(); + try { + authWaitLatch.await(); + } catch (InterruptedException e) { + logger.info("authentication was interrupted", e); + Thread.currentThread().interrupt(); + } + // do something slow + if (pwdHasher.verify(token.credentials(), passwordHash.toCharArray())) { + listener.onFailure(new IllegalStateException("password auth should never succeed")); + } else { + listener.onResponse(AuthenticationResult.unsuccessful("password verification failed", null)); + } + } + + @Override + protected void doLookupUser(String username, ActionListener listener) { + listener.onFailure(new UnsupportedOperationException("this method should not be called")); + } + }; + for (int i = 0; i < numberOfThreads; i++) { + threads.add(new Thread(() -> { + try { + latch.countDown(); + latch.await(); + final UsernamePasswordToken token = new UsernamePasswordToken(username, credsToUse); + + realm.authenticate(token, ActionListener.wrap((result) -> { + if (result.isAuthenticated()) { + completedLatch.countDown(); + throw new IllegalStateException("invalid password led to an authenticated result: " + result); + } + assertThat(result.getMessage(), containsString("password verification failed")); + completedLatch.countDown(); + }, (e) -> { + logger.error("caught exception", e); + completedLatch.countDown(); + fail("unexpected exception - " + e); + })); + authWaitLatch.countDown(); + } catch (InterruptedException e) { + logger.error("thread was interrupted", e); + Thread.currentThread().interrupt(); + } + })); + } + + for (Thread thread : threads) { + thread.start(); + } + latch.countDown(); + for (Thread thread : threads) { + thread.join(); + } + completedLatch.await(); + assertEquals(1, authCounter.get()); + } + public void testCacheConcurrency() throws Exception { final String username = "username"; final SecureString password = SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; @@ -704,27 +787,4 @@ protected void doLookupUser(String username, ActionListener listener) { listener.onResponse(new User(username, new String[]{"lookupRole1", "lookupRole2"})); } } - - static class LookupNotSupportedRealm extends CachingUsernamePasswordRealm { - - public final AtomicInteger authInvocationCounter = new AtomicInteger(0); - public final AtomicInteger lookupInvocationCounter = new AtomicInteger(0); - - LookupNotSupportedRealm(Settings globalSettings, ThreadPool threadPool) { - super(new RealmConfig(new RealmConfig.RealmIdentifier("caching", "lookup-notsupported-test"), globalSettings, - TestEnvironment.newEnvironment(globalSettings), threadPool.getThreadContext()), threadPool); - } - - @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { - authInvocationCounter.incrementAndGet(); - listener.onResponse(AuthenticationResult.success(new User(token.principal(), new String[]{"testRole1", "testRole2"}))); - } - - @Override - protected void doLookupUser(String username, ActionListener listener) { - lookupInvocationCounter.incrementAndGet(); - listener.onFailure(new UnsupportedOperationException("don't call lookup if lookup isn't supported!!!")); - } - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java index 06c9411d0bc1f..f180e356b767c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java @@ -32,8 +32,7 @@ public void testToXcontent() throws Exception{ result.toXContent(builder, ToXContent.EMPTY_PARAMS); assertThat(Strings.toString(builder), equalTo( - "{\"created\":false," + - "\"invalidated_tokens\":2," + + "{\"invalidated_tokens\":2," + "\"previously_invalidated_tokens\":2," + "\"error_count\":2," + "\"error_details\":[" + @@ -64,8 +63,7 @@ public void testToXcontentWithNoErrors() throws Exception{ result.toXContent(builder, ToXContent.EMPTY_PARAMS); assertThat(Strings.toString(builder), equalTo( - "{\"created\":true," + - "\"invalidated_tokens\":2," + + "{\"invalidated_tokens\":2," + "\"previously_invalidated_tokens\":0," + "\"error_count\":0" + "}")); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContextTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContextTests.java index 4fdfb196d034e..0870124022850 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContextTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContextTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.nio.NioSelector; import org.elasticsearch.nio.NioSocketChannel; +import org.elasticsearch.nio.TaskScheduler; import org.elasticsearch.nio.WriteOperation; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -26,9 +27,11 @@ import java.util.function.Consumer; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -41,6 +44,7 @@ public class SSLChannelContextTests extends ESTestCase { private SSLChannelContext context; private InboundChannelBuffer channelBuffer; private NioSelector selector; + private TaskScheduler nioTimer; private BiConsumer listener; private Consumer exceptionHandler; private SSLDriver sslDriver; @@ -56,6 +60,7 @@ public void init() { messageLength = randomInt(96) + 20; selector = mock(NioSelector.class); + nioTimer = mock(TaskScheduler.class); listener = mock(BiConsumer.class); channel = mock(NioSocketChannel.class); rawChannel = mock(SocketChannel.class); @@ -66,6 +71,7 @@ public void init() { context = new SSLChannelContext(channel, selector, exceptionHandler, sslDriver, readWriteHandler, channelBuffer); when(selector.isOnCurrentThread()).thenReturn(true); + when(selector.getTaskScheduler()).thenReturn(nioTimer); when(sslDriver.getNetworkReadBuffer()).thenReturn(readBuffer); when(sslDriver.getNetworkWriteBuffer()).thenReturn(writeBuffer); ByteBuffer buffer = ByteBuffer.allocate(1 << 14); @@ -334,6 +340,44 @@ public void testReadyToCloseIfDriverIndicateClosed() { assertTrue(context.selectorShouldClose()); } + public void testCloseTimeout() { + context.closeChannel(); + + ArgumentCaptor captor = ArgumentCaptor.forClass(WriteOperation.class); + verify(selector).writeToChannel(captor.capture()); + + ArgumentCaptor taskCaptor = ArgumentCaptor.forClass(Runnable.class); + Runnable cancellable = mock(Runnable.class); + when(nioTimer.scheduleAtRelativeTime(taskCaptor.capture(), anyLong())).thenReturn(cancellable); + context.queueWriteOperation(captor.getValue()); + verify(nioTimer).scheduleAtRelativeTime(taskCaptor.capture(), anyLong()); + assertFalse(context.selectorShouldClose()); + taskCaptor.getValue().run(); + assertTrue(context.selectorShouldClose()); + verify(selector).queueChannelClose(channel); + verify(cancellable, never()).run(); + } + + @SuppressWarnings("unchecked") + public void testCloseTimeoutIsCancelledOnClose() throws IOException { + try (SocketChannel realChannel = SocketChannel.open()) { + when(channel.getRawChannel()).thenReturn(realChannel); + TestReadWriteHandler readWriteHandler = new TestReadWriteHandler(readConsumer); + context = new SSLChannelContext(channel, selector, exceptionHandler, sslDriver, readWriteHandler, channelBuffer); + context.closeChannel(); + ArgumentCaptor captor = ArgumentCaptor.forClass(WriteOperation.class); + verify(selector).writeToChannel(captor.capture()); + ArgumentCaptor taskCaptor = ArgumentCaptor.forClass(Runnable.class); + Runnable cancellable = mock(Runnable.class); + when(nioTimer.scheduleAtRelativeTime(taskCaptor.capture(), anyLong())).thenReturn(cancellable); + context.queueWriteOperation(captor.getValue()); + + when(channel.isOpen()).thenReturn(true); + context.closeFromSelector(); + verify(cancellable).run(); + } + } + public void testInitiateCloseFromDifferentThreadSchedulesCloseNotify() { when(selector.isOnCurrentThread()).thenReturn(false, true); context.closeChannel(); diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-ca.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-ca.crt new file mode 100644 index 0000000000000..3083656002253 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-ca.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIVAM5ozCjWHrKyM5Yf/WzUJg/ei3YMMA0GCSqGSIb3DQEB +CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu +ZXJhdGVkIENBMB4XDTE4MTIyMTA3NDUyOFoXDTQ2MDUwNzA3NDUyOFowNDEyMDAG +A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3QoZjXdPIwfa6y8YehqSF +4yxeFW+mDQi6soNqGJGjCJj7IN630Gcx4/4smL32mLVk6RwGxS555z+FP3Gt/NLf +mMc4GWwCAl+l+tAhBPuMtACZwNrKINRP/DdIaVKIKlQWh6bu7WJoyJriyUqN9US6 +ki54G/wfY7NoKhvWoj4zKbQg5lW5XADd2EFgoz9wkRrB2UzyuUYbBmJye/dnZnXn +mo4Cgwd6kQ/8+VMzcxDFa6jh2TXmb3zIyShe0fiYShPNicScGLhxXHbWhczl3Fy9 +E+x4ksZrqsb6c0WAOHCsgmLZwd5h2lk4+WPg5tf7Va1uZ5ETH8CMFRsbrVb2L93t +AgMBAAGjUzBRMB0GA1UdDgQWBBROJaHRWe17um5rqqYn10aqedr55DAfBgNVHSME +GDAWgBROJaHRWe17um5rqqYn10aqedr55DAPBgNVHRMBAf8EBTADAQH/MA0GCSqG +SIb3DQEBCwUAA4IBAQAm6l1TKmTh5HcwRhBpYWJSQOTq1CVKPGUBvdUCKQGFPmcE +S5NYf4zvJisuUAB5ycUCPno4nWQ3jEXfITsysg0WypRWuZf61P4th6RKtzm4nP6a +G+n1CtjIUN5mp0NTgBUeOL0aIXAPuWdQaVx9Q8JAV4N/w9B9n0LPvQ6j/ZtltvXE +s6WyQTnSX6wAuxk0qxePszI2ZICeukp85Q3XjXOFTODbmT3rbANpKFJaaH7jBYqV +XHVo38zVx4UBGnZVAs0MH2pcGp1hWpq2p/cXjxi4IaGofKt9/CbUgTAFJnEkrSRP +2C5LrbRaaj1zECnwVmTnx1L9j/g7Ti83P+kdi7rI +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-ca.key b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-ca.key new file mode 100644 index 0000000000000..c7e7f6cb5c128 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-ca.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAt0KGY13TyMH2usvGHoakheMsXhVvpg0IurKDahiRowiY+yDe +t9BnMeP+LJi99pi1ZOkcBsUueec/hT9xrfzS35jHOBlsAgJfpfrQIQT7jLQAmcDa +yiDUT/w3SGlSiCpUFoem7u1iaMia4slKjfVEupIueBv8H2OzaCob1qI+Mym0IOZV +uVwA3dhBYKM/cJEawdlM8rlGGwZicnv3Z2Z155qOAoMHepEP/PlTM3MQxWuo4dk1 +5m98yMkoXtH4mEoTzYnEnBi4cVx21oXM5dxcvRPseJLGa6rG+nNFgDhwrIJi2cHe +YdpZOPlj4ObX+1WtbmeREx/AjBUbG61W9i/d7QIDAQABAoIBABic5LO/zEvwURTx +fWBoMPyScEkKk/43Te7VPvUm65h79R/1YDRL1zBKML8InKrcA7DT5iG6pe1Vc6CP +ztLRW/kP6eHM+EakzvfZ4c4tfyN8oYAE+N2g3yMG+t3M13rWRIjqGy+HzmnIV5UR +9+NtB5gPPhJ/n7MPju70iNyg2b3BJ0LozgboT+/7UGJAHTnla83BTJ5prVwrBVhX +eCLokYnfttI6swoZci84qONIAhRt5WSE98XZZa8ESmJzJ2vV2dAHaWXQTRKlRsQ8 +1FKkhbU60+L5eFMokvrHquWmYGjdE8Kow+NDTx00AdANKTWBwUjYj4kA0qR3yK6g +1Ny7PbkCgYEA/KPMqSJi/2cq9gDiVIL1LPGxfMEUID9+1yAwZ1sDH1+iCKkYYqCF +0miy1mm+H3SFKcstZd0+Ck2TvRqFHPb7g7PNH5oMZfSsxEsmGFr+TS/ZpMUKuXJ8 +68A6oRwWyycdwgTVdbG4iBYGv9Vs8tE3VdhcEvQHAloVrEad+dwn0o8CgYEAubJ/ +RHnvyl6PCT+/Ek7ZIYyIEkgl0swajOkaR6z4lewK4cfkh0djAjpSd43xDKR9Rk3N +8viyiXIilvzw1sX5ag/QCAiSYANHPmVX5PQ+jWCnqam4PBJXCSQoEtCjEelIkqVx +Tusjb0gzhwRZ7IhS2Gl+A/rnxnnS16PdEAp/VcMCgYEA6LDNbfKYD/kr3o0N6Rz9 +SLoL6YXETbdt0iJ5sphnFdx1V1i3dw+2cgewwD+At2QQyl+ynqHZ5I9zRbdJZ1Ys +bi+K/FJcnQNwpRM6MTCODPXHljVOHWRPnqvc1EsUy2Rpyiu9l7tq5Ry0drfSswrz +1oOCuoo8cnQahiQ8mMenfg0CgYAKBC3HNMiRYt5WQmD9DNG7dIgWbFvV7fp0pVIs +kZDrDUtc+FpETb5ybVDrb/WTl//F3gaA15dRwJ1LBaO8Afu9E9NFy6iRkzuxiufd +yqrhF1iT1zq/ysF1FcUvlp9lJO8sMc5V0msb4ooc+0gacRP+5lnMvyjnVMThqs4O +wnIx3wKBgQCUSKcmfp4ROe8PHOZ4pC6MTo/fPAjBIav3Yd+PcJ6yodtk8/DVkQww +ssvGP6TuBhOXQdfeLd6PHclMQGMMe2cRdCYJNWUF1LC5ae9Il+NZjjZsHNXjPaun +/gHCDOI0oh0Wu7j8/QtCxIO8+6GJyAOUE3f/amqpUa+U60mqPzS99A== +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-test-case.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-test-case.crt new file mode 100644 index 0000000000000..b291aaa9362de --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-test-case.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDRTCCAi2gAwIBAgIVAJpxxIbXWyvdd6/rIFXPgWe6fyvTMA0GCSqGSIb3DQEB +CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu +ZXJhdGVkIENBMB4XDTE4MTIyMTA3NDY1NVoXDTQ2MDUwNzA3NDY1NVowGTEXMBUG +A1UEAxMObGRhcC10ZXN0LWNhc2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQCswbyZDtaghZXsPhs1+lqCnq5HmRT2P6Drrs9bJlABeql29IhzdHOGLr+l +TMhKOUpHuphgC31qbf/GvQLS65qdOzTjNfLv93+Jj0gp4S7Q6eRZvn1ihUgzECHa +zTYwIlzVs4sFPm5i2fQbDK7W6zQm+h9r6GjCYj01OeIAe7rbRI9Ar+svuHGfZnaQ +HzLZlfYkkM2bCaXBgKWVwmEUmwMW+IMOPCrVm+gk1MDbGnu9KtY/LqrJcddsqOdk +K8qJ0Lpchg3zlP4qIzbmWRyTUIy1USbcazjuC/vMmN4fr/Xr0Jrhi4Rw8l2LGdyA +8qnqtKYTqMzo3uv1ESlER8EAZDUbAgMBAAGjaTBnMB0GA1UdDgQWBBQaiCDScfBa +jHOSk04XOymffbLBxTAfBgNVHSMEGDAWgBROJaHRWe17um5rqqYn10aqedr55DAa +BgNVHREEEzARgglsb2NhbGhvc3SHBH8AAAEwCQYDVR0TBAIwADANBgkqhkiG9w0B +AQsFAAOCAQEAXBovNqVg+VQ1LR0PfEMpbgbQlekky8qY2y1tz7J0ntGepAq+Np6n +7J9En6ty1ELZUvgPUCF2btQqZbv8uyHz/C+rojKC5xzHN5qbZ31o5/0I/kNase1Z +NbXuNJe3wAXuz+Mj5rtuOGZvlFsbtocuoydVYOclfqjUXcoZtqCcRamSvye7vGl2 +CHPqDi0uK8d75nE9Jrnmz/BNNV7CjPg636PJmCUrLL21+t69ZFL1eGAFtLBmmjcw +cMkyv9bJirjZbjt/9UB+fW9XzV3RVLAzfrIHtToupXmWc4+hTOnlbKfFwqB9fa7Y +XcCfGrZoJg9di1HbJrSJmv5QgRTM+/zkrA== +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-test-case.key b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-test-case.key new file mode 100644 index 0000000000000..dfa7e7038e91b --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ldap-test-case.key @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,F7442C1F07A3A829 + +jIINPsJbBILgZPf/MXVO5FPESwscOXnP9hQ6RpiQnxs3OlFWHqcuc0xw8jUeTGkS +TJP/01p5PsqxA+keUAShgPGHHvLGZjKiIv/INAWk6Blm3Ic8hRYeTGfvJztSC+gk +tFOj2vBJpSkTwdb9nk4vfGNYvhx4PZOrNqOFMXlgxQfioE/vWwHHHpx52niABQeh +rAJKqW84oGmJl0cOxGkIblSgt6roQ0W+YzAQCTlpPLide1sW1daWRQgh4J+nwGLl +WhoiGDTZxwmiOI0CgSSiaFwb4gmX8oyZq5MIVvN3QyQCAJYhiL+gJ4iNBE0mBZKW +qF+5+8xerEPQ94Psox6PYMv/nJKSxv4yU57dx3Qp9qK0vJQTTK/T6sECom8gJ1XG +yr/ZzeN3iAECK6rYnG+GGePN4iyBncBHGztbGzjcmPujbMQ96bpWAChNngOR9TUx +sUufIwDR7Bw+Bwi7ybRqNkB+OAlzs8ioiMSQho5gR2YkVG6uFGYeIv5v67jXk62i +kPiNLDBSpHRUa2CAwnlYmvDLo6VyDCVhkglPERFki9YYv9akAXExQ0+8UKXA43gH +KXhqByiv4fw6h9q85T/n7CHXqsiDmrsHrlwun4a9ge5cHPnFrBsCZXcw0ZVEFtpQ +VR1e0ELMBpdgLND8HtX95a/f1JRgX7AllG/egie3dfMeMUHEKVKJDRof2PVIuYqg +hYFlLZXowHWvlYVwbEEgeR07n2TC9sD1UmEOHG1s9UV1iv0iPgJObTX8V1wC0lDx +hq5TXMk3BAApHQZlOhuNLCabw6vOT1ijVoriWjpTxMhr9twYWo1lQ7QdHFft8HO8 +Ut0z7IPSTYvgj3IcE3CUrPqRYtqRimR4VFbafUZFM1UwiF7Qca5tkxAiGZWzzEYk +hRZYaCwAruexbEVJx87a8TxV34h7gAviNeFSSzTeNVG3VXZlvGSWY23AlVZn5tFE +fekxFB20T93u1XWKS8k5He0D0Pb39nuJrBOkZv+c0e5daBkAw3QMM2lYIM5iGJd9 +UehpzOLBR1qLmILL7k8dscebJ5HPKxGdBDE1PMdwkujydUvBSyenssWMosCglvl4 +Nso1kckc09FGUL9JRNFuhzrC0eP2+kzRVJ7upwr5SBHgOPpy99q0ugkzkSYDGf4c +OpBOiAjbF8xwK1O+tV+yl3B8JMBEQEdePvjZ3WcqL6aYIaakUaSMKHa0zYS3pOFi +zOR0Y2d69KhbSckTgh71gXpbT0ym1EUhpALbQskJ7StL/hU4AguWwOYpfuAYVvHt +3BxMTZHqZaQCEMQFQWUIt7ZgtUoe2Lab1gx+q+gpUYaoCGRSYa0+H4cSQvIgiIsh +9LU+deFLu6jF3lWlwQO5ZVxM0K95SwTj2eBpkYuonDIs7tUW7LIksM+8sxW+DCiA +fBkFMdcOkiFV9oSTWI1HqpiePSJTOmHPZvYVqJ/ZDNBk6xSfPOH04TYNMEJqDf7W +KQ2BSFXSiRxi4RniYyYzFYuFM/Fo4V3CTIGX2r4R8Jfb9t4Nn2Db6+r7RknDaR5k +nhHQXwB+hbKNQSJlc2nMpG4MbwSO1axWio3yELRSCN+yFP6z8cobIw== +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index 3cde541d304d5..f5dc3175d4162 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -1,3 +1,5 @@ +import com.carrotsearch.gradle.junit4.RandomizedTestingTask + evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' @@ -18,6 +20,12 @@ archivesBaseName = 'x-pack-sql' // All integration tests live in qa modules integTest.enabled = false +task internalClusterTest(type: RandomizedTestingTask, + group: JavaBasePlugin.VERIFICATION_GROUP +) { + include '**/*IT.class' +} + dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java index ef674d30da965..50143f729370f 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java @@ -5,10 +5,8 @@ */ package org.elasticsearch.xpack.sql.jdbc; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.jdbc.EsType; -import org.elasticsearch.xpack.sql.jdbc.JdbcConfiguration; -import org.elasticsearch.xpack.sql.jdbc.JdbcPreparedStatement; import java.net.URL; import java.nio.charset.StandardCharsets; @@ -287,12 +285,12 @@ public void testThrownExceptionsWhenSettingFloatValues() throws SQLException { Float floatNotInt = 5_155_000_000f; sqle = expectThrows(SQLException.class, () -> jps.setObject(1, floatNotInt, Types.INTEGER)); - assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", - Long.toString(Math.round(floatNotInt.doubleValue()))), sqle.getMessage()); + assertEquals(LoggerMessageFormat.format("Numeric {} out of range", + Math.round(floatNotInt.doubleValue())), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> jps.setObject(1, floatNotInt, Types.SMALLINT)); - assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", - Long.toString(Math.round(floatNotInt.doubleValue()))), sqle.getMessage()); + assertEquals(LoggerMessageFormat.format("Numeric {} out of range", + Math.round(floatNotInt.doubleValue())), sqle.getMessage()); } public void testSettingDoubleValues() throws SQLException { @@ -328,8 +326,8 @@ public void testThrownExceptionsWhenSettingDoubleValues() throws SQLException { Double doubleNotInt = 5_155_000_000d; sqle = expectThrows(SQLException.class, () -> jps.setObject(1, doubleNotInt, Types.INTEGER)); - assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", - Long.toString(((Number) doubleNotInt).longValue())), sqle.getMessage()); + assertEquals(LoggerMessageFormat.format("Numeric {} out of range", + ((Number) doubleNotInt).longValue()), sqle.getMessage()); } public void testUnsupportedClasses() throws SQLException { diff --git a/x-pack/plugin/sql/qa/build.gradle b/x-pack/plugin/sql/qa/build.gradle index 8b27119cc7e4e..abbbd6e9663f3 100644 --- a/x-pack/plugin/sql/qa/build.gradle +++ b/x-pack/plugin/sql/qa/build.gradle @@ -31,7 +31,7 @@ forbiddenApisMain { replaceSignatureFiles 'es-all-signatures', 'es-test-signatures' } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // jLine's optional dependencies 'org.apache.sshd.client.SshClient', 'org.apache.sshd.client.auth.keyboard.UserInteraction', @@ -72,7 +72,7 @@ thirdPartyAudit.excludes = [ 'org.fusesource.jansi.internal.Kernel32', 'org.fusesource.jansi.internal.WindowsSupport', 'org.mozilla.universalchardet.UniversalDetector', -] +) subprojects { apply plugin: 'elasticsearch.standalone-rest-test' diff --git a/x-pack/plugin/sql/qa/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlMultinodeIT.java b/x-pack/plugin/sql/qa/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlMultinodeIT.java index 2a4dd79001d1c..95066306cc2c2 100644 --- a/x-pack/plugin/sql/qa/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlMultinodeIT.java +++ b/x-pack/plugin/sql/qa/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/RestSqlMultinodeIT.java @@ -83,7 +83,7 @@ public void testIndexOnWrongNode() throws IOException { } private void createTestData(int documents) throws UnsupportedCharsetException, IOException { - Request request = new Request("PUT", "/test/test/_bulk"); + Request request = new Request("PUT", "/test/_bulk"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); diff --git a/x-pack/plugin/sql/qa/security/build.gradle b/x-pack/plugin/sql/qa/security/build.gradle index 9e6cc4eab2352..d239518df81fe 100644 --- a/x-pack/plugin/sql/qa/security/build.gradle +++ b/x-pack/plugin/sql/qa/security/build.gradle @@ -6,6 +6,9 @@ Project mainProject = project group = "${group}.x-pack.qa.sql.security" +// Tests are pushed down to subprojects and will be checked there. +testingConventions.enabled = false + subprojects { // Use resources from the parent project in subprojects sourceSets { diff --git a/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/SqlSecurityTestCase.java b/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/SqlSecurityTestCase.java index 1dc765e242a2e..053fe0c4775dc 100644 --- a/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/SqlSecurityTestCase.java +++ b/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/SqlSecurityTestCase.java @@ -141,11 +141,11 @@ public void oneTimeSetup() throws Exception { request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); - bulk.append("{\"index\":{\"_index\": \"test\", \"_type\": \"doc\", \"_id\":\"1\"}\n"); + bulk.append("{\"index\":{\"_index\": \"test\", \"_id\":\"1\"}\n"); bulk.append("{\"a\": 1, \"b\": 2, \"c\": 3}\n"); - bulk.append("{\"index\":{\"_index\": \"test\", \"_type\": \"doc\", \"_id\":\"2\"}\n"); + bulk.append("{\"index\":{\"_index\": \"test\", \"_id\":\"2\"}\n"); bulk.append("{\"a\": 4, \"b\": 5, \"c\": 6}\n"); - bulk.append("{\"index\":{\"_index\": \"bort\", \"_type\": \"doc\", \"_id\":\"1\"}\n"); + bulk.append("{\"index\":{\"_index\": \"bort\", \"_id\":\"1\"}\n"); bulk.append("{\"a\": \"test\"}\n"); request.setJsonEntity(bulk.toString()); client().performRequest(request); diff --git a/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/UserFunctionIT.java b/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/UserFunctionIT.java index 5d37e38bc4cd6..639ffd17e34f3 100644 --- a/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/UserFunctionIT.java +++ b/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/UserFunctionIT.java @@ -203,7 +203,7 @@ private static Map toMap(Response response) throws IOException { } private void index(String... docs) throws IOException { - Request request = new Request("POST", "/test/test/_bulk"); + Request request = new Request("POST", "/test/_bulk"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); for (String doc : docs) { diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java index 487dc5f717348..f7c490cc8b490 100644 --- a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java @@ -31,7 +31,7 @@ public void createTestIndex() throws IOException { createTest2.addParameter("timeout", "100ms"); client().performRequest(createTest2); - Request request = new Request("PUT", "/test1/doc/_bulk"); + Request request = new Request("PUT", "/test1/_bulk"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); for (int i = 0; i < 20; i++) { diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java index 6bbd564e0b4cc..78971d60e8ca3 100644 --- a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/RestSqlIT.java @@ -5,11 +5,46 @@ */ package org.elasticsearch.xpack.sql.qa.single_node; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; import org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase; +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + /** * Integration test for the rest sql action. The one that speaks json directly to a * user rather than to the JDBC driver or CLI. */ public class RestSqlIT extends RestSqlTestCase { + + + public void testErrorMessageForTranslatingQueryWithWhereEvaluatingToFalse() throws IOException { + index("{\"foo\":1}"); + expectBadRequest(() -> runSql( + new StringEntity("{\"query\":\"SELECT * FROM test WHERE foo = 1 AND foo = 2\"}", + ContentType.APPLICATION_JSON), "/translate/"), + containsString("Cannot generate a query DSL for an SQL query that either its WHERE clause evaluates " + + "to FALSE or doesn't operate on a table (missing a FROM clause), sql statement: " + + "[SELECT * FROM test WHERE foo = 1 AND foo = 2]")); + } + + public void testErrorMessageForTranslatingQueryWithLocalExecution() throws IOException { + index("{\"foo\":1}"); + expectBadRequest(() -> runSql( + new StringEntity("{\"query\":\"SELECT SIN(PI())\"}", + ContentType.APPLICATION_JSON), "/translate/"), + containsString("Cannot generate a query DSL for an SQL query that either its WHERE clause evaluates " + + "to FALSE or doesn't operate on a table (missing a FROM clause), sql statement: [SELECT SIN(PI())]")); + } + + public void testErrorMessageForTranslatingSQLCommandStatement() throws IOException { + index("{\"foo\":1}"); + expectBadRequest(() -> runSql( + new StringEntity("{\"query\":\"SHOW FUNCTIONS\"}", + ContentType.APPLICATION_JSON), "/translate/"), + containsString("Cannot generate a query DSL for a special SQL command " + + "(e.g.: DESCRIBE, SHOW), sql statement: [SHOW FUNCTIONS]")); + } } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/FetchSizeTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/FetchSizeTestCase.java index afc4c302995d5..84f74bcbac137 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/FetchSizeTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/FetchSizeTestCase.java @@ -16,7 +16,7 @@ */ public abstract class FetchSizeTestCase extends CliIntegrationTestCase { public void testSelect() throws IOException { - Request request = new Request("PUT", "/test/doc/_bulk"); + Request request = new Request("PUT", "/test/_bulk"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); for (int i = 0; i < 20; i++) { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java index abf56cee9c766..d8b6375e7ca96 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java @@ -42,6 +42,7 @@ public static List readScriptSpec() throws Exception { tests.addAll(readScriptSpec("/nested.csv-spec", parser)); tests.addAll(readScriptSpec("/functions.csv-spec", parser)); tests.addAll(readScriptSpec("/math.csv-spec", parser)); + tests.addAll(readScriptSpec("/field-alias.csv-spec", parser)); return tests; } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java index 53669f9de0eb9..efa39ec517eb2 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java @@ -83,7 +83,7 @@ private static void loadEmpDatasetIntoEs(RestClient client, String index, String createIndex.endObject(); createIndex.startObject("mappings"); { - createIndex.startObject("emp"); + createIndex.startObject("_doc"); { createIndex.startObject("properties"); { @@ -105,6 +105,10 @@ private static void loadEmpDatasetIntoEs(RestClient client, String index, String if (extraFields) { createIndex.startObject("extra_gender").field("type", "keyword").endObject(); + createIndex.startObject("extra.info.gender") + .field("type", "alias") + .field("path", "gender") + .endObject(); } createIndex.startObject("birth_date").field("type", "date").endObject(); @@ -153,7 +157,7 @@ private static void loadEmpDatasetIntoEs(RestClient client, String index, String list.add(dep); }); - request = new Request("POST", "/" + index + "/emp/_bulk?refresh=wait_for"); + request = new Request("POST", "/" + index + "/_bulk?refresh=wait_for"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); csvToLines(fileName, (titles, fields) -> { @@ -228,7 +232,7 @@ protected static void loadLogsDatasetIntoEs(RestClient client, String index, Str request.setJsonEntity(Strings.toString(createIndex)); client.performRequest(request); - request = new Request("POST", "/" + index + "/_doc/_bulk?refresh=wait_for"); + request = new Request("POST", "/" + index + "/_bulk?refresh=wait_for"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); csvToLines(filename, (titles, fields) -> { @@ -259,7 +263,7 @@ protected static void loadLibDatasetIntoEs(RestClient client, String index) thro createIndex.endObject(); createIndex.startObject("mappings"); { - createIndex.startObject("book"); + createIndex.startObject("_doc"); { createIndex.startObject("properties"); { @@ -276,7 +280,7 @@ protected static void loadLibDatasetIntoEs(RestClient client, String index) thro request.setJsonEntity(Strings.toString(createIndex)); client.performRequest(request); - request = new Request("POST", "/" + index + "/book/_bulk?refresh=wait_for"); + request = new Request("POST", "/" + index + "/_bulk?refresh=wait_for"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); csvToLines("library", (titles, fields) -> { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java index fee5901bc4cb2..86fa4805ba6ea 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/FetchSizeTestCase.java @@ -30,7 +30,7 @@ public void createTestIndex() throws IOException { XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); createIndex.startObject("mappings"); { - createIndex.startObject("doc"); + createIndex.startObject("_doc"); { createIndex.startObject("properties"); { @@ -48,7 +48,7 @@ public void createTestIndex() throws IOException { request.setJsonEntity(Strings.toString(createIndex)); client().performRequest(request); - request = new Request("PUT", "/test/doc/_bulk"); + request = new Request("PUT", "/test/_bulk"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); StringBuilder bulkLine; diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java index becb76f4e0e93..9d1b68a92f46f 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcTestUtils.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.xpack.sql.action.CliFormatter; import org.elasticsearch.xpack.sql.proto.ColumnInfo; +import org.elasticsearch.xpack.sql.proto.StringUtils; import java.sql.ResultSet; import java.sql.ResultSetMetaData; @@ -134,7 +135,7 @@ public static void logLikeCLI(ResultSet rs, Logger logger) throws SQLException { logger.info("\n" + formatter.formatWithHeader(cols, data)); } - public static ZonedDateTime of(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC); + public static String of(long millis) { + return StringUtils.toString(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC)); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index f575c5ef408aa..21faaf17c4ea1 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.sql.qa.rest; import com.fasterxml.jackson.core.io.JsonStringEncoder; - import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; @@ -76,7 +75,7 @@ public void testBasicQuery() throws IOException { } public void testNextPage() throws IOException { - Request request = new Request("POST", "/test/test/_bulk"); + Request request = new Request("POST", "/test/_bulk"); request.addParameter("refresh", "true"); String mode = randomMode(); StringBuilder bulk = new StringBuilder(); @@ -142,7 +141,7 @@ public void testTimeZone() throws IOException { } public void testScoreWithFieldNamedScore() throws IOException { - Request request = new Request("POST", "/test/test/_bulk"); + Request request = new Request("POST", "/test/_bulk"); request.addParameter("refresh", "true"); String mode = randomMode(); StringBuilder bulk = new StringBuilder(); @@ -281,7 +280,7 @@ public void testSelectScoreInScalar() throws Exception { containsString("line 1:12: [SCORE()] cannot be an argument to a function")); } - private void expectBadRequest(CheckedSupplier, Exception> code, Matcher errorMessageMatcher) { + protected void expectBadRequest(CheckedSupplier, Exception> code, Matcher errorMessageMatcher) { try { Map result = code.get(); fail("expected ResponseException but got " + result); @@ -310,7 +309,7 @@ private Map runSql(String mode, String sql, String suffix) throw return runSql(new StringEntity("{\"query\":\"" + sql + "\"" + mode(mode) + "}", ContentType.APPLICATION_JSON), suffix); } - private Map runSql(HttpEntity sql, String suffix) throws IOException { + protected Map runSql(HttpEntity sql, String suffix) throws IOException { Request request = new Request("POST", "/_sql" + suffix); request.addParameter("error_trace", "true"); // Helps with debugging in case something crazy happens on the server. request.addParameter("pretty", "true"); // Improves error reporting readability @@ -719,8 +718,8 @@ public static String mode(String mode) { return Strings.isEmpty(mode) ? StringUtils.EMPTY : ",\"mode\":\"" + mode + "\""; } - private void index(String... docs) throws IOException { - Request request = new Request("POST", "/test/_doc/_bulk"); + protected void index(String... docs) throws IOException { + Request request = new Request("POST", "/test/_bulk"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); for (String doc : docs) { diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index f9576c7b859a6..bdb94321b76d5 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -309,4 +309,59 @@ SELECT HISTOGRAM(emp_no % 100, 10) AS h, COUNT(*) as c FROM test_emp GROUP BY h 20 |10 10 |10 0 |10 -; \ No newline at end of file +; + +countAll +schema::all_names:l|c:l +SELECT COUNT(ALL first_name) all_names, COUNT(*) c FROM test_emp; + + all_names | c +---------------+--------------- +90 |100 +; + +countAllCountTypesWithHaving +schema::ln:l|dln:l|fn:l|dfn:l|ccc:l +SELECT COUNT(last_name) ln, COUNT(distinct last_name) dln, COUNT(first_name) fn, COUNT(distinct first_name) dfn, COUNT(*) ccc FROM test_emp GROUP BY gender HAVING dln>5 AND ln>32 AND dfn>1 AND fn>1 AND ccc>5; + + ln | dln | fn | dfn | ccc +---------------+-------------+---------------+------------+------------- +33 |32 |32 |32 |33 +57 |54 |48 |48 |57 +; + +aggCountEqualityFalse +schema::areEqual:b|ln:l|dln:l +SELECT COUNT(last_name)=COUNT(DISTINCT last_name) AS areEqual, COUNT(last_name) ln, COUNT(DISTINCT last_name) dln FROM test_emp; + + areEqual | ln | dln +---------------+---------------+--------------- +false |100 |96 +; + +aggCountEqualityTrue +schema::areEqual:b|fn:l|dfn:l +SELECT COUNT(first_name)=COUNT(DISTINCT first_name) AS areEqual, COUNT(first_name) fn, COUNT(DISTINCT first_name) dfn FROM test_emp; + + areEqual | fn | dfn +---------------+---------------+--------------- +true |90 |90 +; + +aggCountAllEquality +schema::areEqual:b|afn:l +SELECT COUNT(first_name)=COUNT(ALL first_name) AS areEqual, COUNT(ALL first_name) afn FROM test_emp; + + areEqual | afn +---------------+--------------- +true |90 +; + +aggCountAllDifferentFields +schema::areEqual:b|afn:l|aln:l +SELECT COUNT(ALL last_name)=COUNT(ALL first_name) AS areEqual, COUNT(ALL first_name) afn, COUNT(ALL last_name) aln FROM test_emp; + + areEqual | afn | aln +---------------+---------------+--------------- +false |90 |100 +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec index 149e23f771349..21dd7bf530e3d 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec @@ -110,6 +110,8 @@ aggCountWithAlias SELECT gender g, COUNT(*) c FROM "test_emp" GROUP BY g ORDER BY gender; countDistinct SELECT COUNT(DISTINCT hire_date) AS count FROM test_emp; +countDistinctAndCountSimpleWithAlias +SELECT COUNT(*) cnt, COUNT(DISTINCT first_name) as names, gender FROM test_emp GROUP BY gender ORDER BY gender; aggCountAliasAndWhereClauseMultiGroupBy SELECT gender g, languages l, COUNT(*) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender, languages ORDER BY gender, languages; @@ -121,6 +123,8 @@ aggCountWithAliasMultiGroupBy SELECT gender g, languages l, COUNT(*) c FROM "test_emp" GROUP BY g, l ORDER BY gender, languages; aggCountWithAliasMultiGroupByDifferentOrder SELECT gender g, languages l, COUNT(*) c FROM "test_emp" GROUP BY g, l ORDER BY languages ASC, gender DESC; +aggCountDistinctWithAliasAndGroupBy +SELECT COUNT(*) cnt, COUNT(DISTINCT first_name) as names, gender FROM test_emp GROUP BY gender ORDER BY gender; @@ -161,12 +165,20 @@ aggCountStarAndHavingBetween SELECT gender g, COUNT(*) c FROM "test_emp" GROUP BY g HAVING c BETWEEN 10 AND 70 ORDER BY gender ASC; aggCountStarAndHavingBetweenWithLimit SELECT gender g, COUNT(*) c FROM "test_emp" GROUP BY g HAVING c BETWEEN 10 AND 70 ORDER BY gender LIMIT 1; +aggCountDistinctAndHavingBetweenWithLimit +SELECT gender g, COUNT(DISTINCT first_name) c FROM "test_emp" GROUP BY g HAVING c BETWEEN 40 AND 50 ORDER BY gender LIMIT 1; aggCountOnColumnAndHavingOnAliasAndFunction SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND COUNT(gender) < 70 ORDER BY gender; aggCountOnColumnAndHavingOnAliasAndFunctionWildcard -> COUNT(*/1) vs COUNT(gender) SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND COUNT(*) < 70 ORDER BY gender; aggCountOnColumnAndHavingOnAliasAndFunctionConstant SELECT gender g, COUNT(gender) c FROM "test_emp" GROUP BY g HAVING c > 10 AND COUNT(1) < 70 ORDER BY gender; +aggDistinctCountWithAliasAndHaving +SELECT COUNT(*) c, COUNT(DISTINCT first_name) AS names, gender FROM test_emp GROUP BY gender HAVING names > 40 ORDER BY gender; +aggDistinctCountWithFunctionWildcardAndHaving +SELECT COUNT(*) c, COUNT(DISTINCT first_name) AS names, gender FROM test_emp GROUP BY gender HAVING names < 50 AND c < 50 ORDER BY gender; +aggDistinctCountWithFunctionWildcardAndFunctionConstantAndHaving +SELECT COUNT(*) c, COUNT(DISTINCT first_name) AS names, COUNT(123) AS c123, gender FROM test_emp GROUP BY gender HAVING names < 50 AND c < 50 AND c123 < 50 ORDER BY gender; aggCountAndHavingMultiGroupBy SELECT gender g, languages l, COUNT(*) c FROM "test_emp" GROUP BY g, l HAVING COUNT(*) > 10 ORDER BY gender, l; @@ -195,7 +207,21 @@ aggCountOnColumnAndHavingOnAliasAndFunctionWildcardMultiGroupBy -> COUNT(*/1) vs SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c > 10 AND COUNT(*) < 70 ORDER BY gender, languages; aggCountOnColumnAndHavingOnAliasAndFunctionConstantMultiGroupBy SELECT gender g, languages l, COUNT(gender) c FROM "test_emp" GROUP BY g, l HAVING c > 10 AND COUNT(1) < 70 ORDER BY gender, languages; - +aggCountOnDistinctColumnAndHavingOnAliasAndFunctionConstantMultiGroupBy +SELECT gender g, languages l, COUNT(DISTINCT last_name) c FROM "test_emp" GROUP BY g, l HAVING c > 5 AND COUNT(1) < 70 ORDER BY gender, languages; + +aggCount +SELECT COUNT(last_name) c FROM test_emp; +aggCountAndCountDistinct +SELECT COUNT(last_name) c, COUNT(DISTINCT last_name) distinct_names FROM test_emp; +aggCountAndCountDistinctWithHaving +SELECT COUNT(last_name) c, COUNT(DISTINCT last_name) distinct_names, gender FROM test_emp GROUP BY gender HAVING distinct_names > 10 ORDER BY gender; +aggCountMultiComparisonWithHaving +SELECT COUNT(last_name) ln, COUNT(distinct last_name) dln, COUNT(first_name) fn, COUNT(distinct first_name) dfn, COUNT(*) ccc FROM test_emp GROUP BY gender HAVING dln>5 AND ln>32 AND dfn>1 AND fn>1 AND ccc>5 ORDER BY gender DESC; +aggCountMultiComparisonWithHavingAndNullGrouping +SELECT gender, COUNT(last_name) ln, COUNT(distinct last_name) dln, COUNT(first_name) fn, COUNT(distinct first_name) dfn, COUNT(*) ccc FROM test_emp GROUP BY gender HAVING dln>1 AND ln>1 AND dfn>1 AND fn>1 AND ccc>1 ORDER BY gender DESC; +aggCountWithHavingAndWhere +SELECT COUNT(last_name) c, COUNT(DISTINCT last_name) distinct_names, gender FROM test_emp WHERE salary > 65000 GROUP BY gender HAVING distinct_names > 10 ORDER BY gender; // MIN aggMinImplicit diff --git a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec index e87aaecf6f332..fe8e6e5da4e63 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec @@ -36,6 +36,9 @@ dep.dep_name.keyword|VARCHAR |KEYWORD dep.from_date |TIMESTAMP |DATE dep.to_date |TIMESTAMP |DATE emp_no |INTEGER |INTEGER +extra |STRUCT |OBJECT +extra.info |STRUCT |OBJECT +extra.info.gender |VARCHAR |KEYWORD extra_gender |VARCHAR |KEYWORD extra_no |INTEGER |INTEGER first_name |VARCHAR |TEXT @@ -45,7 +48,7 @@ hire_date |TIMESTAMP |DATE languages |TINYINT |BYTE last_name |VARCHAR |TEXT last_name.keyword |VARCHAR |KEYWORD -salary |INTEGER |INTEGER +salary |INTEGER |INTEGER ; describePattern @@ -61,6 +64,9 @@ dep.dep_name.keyword|VARCHAR |KEYWORD dep.from_date |TIMESTAMP |DATE dep.to_date |TIMESTAMP |DATE emp_no |INTEGER |INTEGER +extra |STRUCT |OBJECT +extra.info |STRUCT |OBJECT +extra.info.gender |VARCHAR |KEYWORD extra_gender |VARCHAR |KEYWORD extra_no |INTEGER |INTEGER first_name |VARCHAR |TEXT @@ -70,7 +76,7 @@ hire_date |TIMESTAMP |DATE languages |TINYINT |BYTE last_name |VARCHAR |TEXT last_name.keyword |VARCHAR |KEYWORD -salary |INTEGER |INTEGER +salary |INTEGER |INTEGER ; showAlias diff --git a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec index 7c9c98f6d0446..c52a5f807bde1 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec @@ -236,6 +236,9 @@ dep.dep_name.keyword|VARCHAR |KEYWORD dep.from_date |TIMESTAMP |DATE dep.to_date |TIMESTAMP |DATE emp_no |INTEGER |INTEGER +extra |STRUCT |OBJECT +extra.info |STRUCT |OBJECT +extra.info.gender |VARCHAR |KEYWORD extra_gender |VARCHAR |KEYWORD extra_no |INTEGER |INTEGER first_name |VARCHAR |TEXT @@ -261,6 +264,9 @@ dep.dep_name.keyword|VARCHAR |KEYWORD dep.from_date |TIMESTAMP |DATE dep.to_date |TIMESTAMP |DATE emp_no |INTEGER |INTEGER +extra |STRUCT |OBJECT +extra.info |STRUCT |OBJECT +extra.info.gender |VARCHAR |KEYWORD extra_gender |VARCHAR |KEYWORD extra_no |INTEGER |INTEGER first_name |VARCHAR |TEXT @@ -270,7 +276,7 @@ hire_date |TIMESTAMP |DATE languages |TINYINT |BYTE last_name |VARCHAR |TEXT last_name.keyword |VARCHAR |KEYWORD -salary |INTEGER |INTEGER +salary |INTEGER |INTEGER ; describeSimpleIdentifier diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index d966c8c822ea8..5e51ae69bf396 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -125,6 +125,40 @@ SELECT WEEK(birth_date) week, birth_date FROM test_emp WHERE WEEK(birth_date) > // Aggregate // +castedDateTimeWithGroupBy1 +SELECT CONVERT(birth_date, DOUBLE) AS date FROM test_emp GROUP BY date ORDER BY date LIMIT 10; + + date:d +--------------- +null +-5.631552E8 +-5.586624E8 +-5.56416E8 +-5.539104E8 +-5.517504E8 +-5.492448E8 +-5.406912E8 +-5.371488E8 +-5.359392E8 +; + +castedDateTimeWithGroupBy2 +SELECT CAST(hire_date AS INTEGER) AS date FROM test_emp GROUP BY date ORDER BY date LIMIT 10; + + date:i +--------------- +477532800 +478051200 +484790400 +489715200 +495763200 +498096000 +498614400 +501206400 +501292800 +501379200 +; + dateTimeAggByIsoDayOfWeekWithFilter SELECT IDOW(birth_date) day, DAY_NAME(birth_date) name, COUNT(*) c FROM test_emp WHERE IDOW(birth_date) < 6 GROUP BY day, name ORDER BY day desc; diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec index af16675ab8138..2903292b1adff 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec @@ -977,14 +977,24 @@ SELECT COUNT(*) AS count FROM emp; // end::aggCountStar ; +aggCountAll +// tag::aggCountAll +SELECT COUNT(ALL last_name) AS count_all, COUNT(DISTINCT last_name) count_distinct FROM emp; + + count_all | count_distinct +---------------+------------------ +100 |96 +// end::aggCountAll +; + aggCountDistinct // tag::aggCountDistinct -SELECT COUNT(DISTINCT hire_date) AS hires FROM emp; +SELECT COUNT(DISTINCT hire_date) unique_hires, COUNT(hire_date) AS hires FROM emp; - hires ---------------- -99 + unique_hires | hires +----------------+--------------- +99 |100 // end::aggCountDistinct ; @@ -2130,3 +2140,30 @@ SELECT NOW() AS result; 2018-12-12T14:48:52.448Z // end::nowFunction ; + +//////////// +// Next two queries need to have the same output, as they should be equivalent. +// They are used in the "SQL Limitations" page. +//////////// +limitationSubSelect +// tag::limitationSubSelect +SELECT * FROM (SELECT first_name, last_name FROM emp WHERE last_name NOT LIKE '%a%') WHERE first_name LIKE 'A%'; + + first_name | last_name +---------------+--------------- +Anneke |Preusig +Anoosh |Peyn +Arumugam |Ossenbruggen +// end::limitationSubSelect +; + +limitationSubSelect +// tag::limitationSubSelectRewritten +SELECT first_name, last_name FROM emp WHERE last_name NOT LIKE '%a%' AND first_name LIKE 'A%'; +// end::limitationSubSelectRewritten + first_name | last_name +---------------+--------------- +Anneke |Preusig +Anoosh |Peyn +Arumugam |Ossenbruggen +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/field-alias.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/field-alias.csv-spec new file mode 100644 index 0000000000000..977c0e8309567 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/field-alias.csv-spec @@ -0,0 +1,129 @@ +// +// Tests testing field alias (introduced in ES 6.4) +// + +// filtering + +filterEquals +SELECT extra.info.gender gender FROM "test_emp_copy" WHERE gender = 'M' LIMIT 5; + + gender +--------------- +M +M +M +M +M + +; + +filterNotEquals +SELECT extra.info.gender gender FROM "test_emp_copy" WHERE gender <> 'M' ORDER BY gender LIMIT 5; + + gender +--------------- +F +F +F +F +F +; + +aggWithNullFilter +SELECT COUNT(*) count FROM test_emp_copy WHERE extra.info.gender IS NULL; + + count:l +--------------- +10 +; + +functionOverAlias +SELECT BIT_LENGTH(extra.info.gender) bit FROM test_emp_copy ORDER BY extra.info.gender LIMIT 1; + + bit +--------------- +8 +; + + +singlePercentileWithoutComma +SELECT extra.info.gender AS gender, PERCENTILE(emp_no, 97) p1 FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | p1:d +null |10019.0 +F |10099.51 +M |10095.789999999999 +; + +singlePercentileWithComma +SELECT extra.info.gender AS gender, PERCENTILE(emp_no, 97.76) p1 FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | p1:d +null |10019.0 +F |10099.7608 +M |10096.2232 +; + +multiplePercentilesOneWithCommaOneWithout +SELECT extra.info.gender AS gender, PERCENTILE(emp_no, 92.45) p1, PERCENTILE(emp_no, 91) p2 FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | p1:d | p2:d +null |10018.745 |10018.599999999999 +F |10098.0085 |10096.119999999999 +M |10091.393 |10090.37 +; + +multiplePercentilesWithoutComma +SELECT extra.info.gender AS gender, PERCENTILE(emp_no, 91) p1, PERCENTILE(emp_no, 89) p2 FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | p1:d | p2:d +null |10018.599999999999 |10018.4 +F |10096.119999999999 |10093.74 +M |10090.37 |10086.92 +; + +multiplePercentilesWithComma +SELECT extra.info.gender AS gender, PERCENTILE(emp_no, 85.7) p1, PERCENTILE(emp_no, 94.3) p2 FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | p1:d | p2:d +null |10018.070000000002 |10018.929999999998 +F |10091.343 |10098.619 +M |10084.349 |10093.502 +; + +percentileRank +SELECT extra.info.gender AS gender, PERCENTILE_RANK(emp_no, 10025) rank FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | rank:d +null |100.0 +F |17.424242424242426 +M |15.350877192982457 +; + +multiplePercentileRanks +SELECT extra.info.gender AS gender, PERCENTILE_RANK(emp_no, 10030.0) rank1, PERCENTILE_RANK(emp_no, 10025) rank2 FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | rank1:d | rank2:d +null |100.0 |100.0 +F |21.445221445221442 |17.424242424242426 +M |21.929824561403507 |15.350877192982457 +; + +multiplePercentilesAndPercentileRank +SELECT extra.info.gender AS gender, PERCENTILE(emp_no, 97.76) p1, PERCENTILE(emp_no, 93.3) p2, PERCENTILE_RANK(emp_no, 10025) rank FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | p1:d | p2:d | rank:d +null |10019.0 |10018.83 |100.0 +F |10099.7608 |10098.289 |17.424242424242426 +M |10096.2232 |10092.362 |15.350877192982457 +; + +kurtosisAndSkewnessGroup +SELECT extra.info.gender AS gender, KURTOSIS(salary) k, SKEWNESS(salary) s FROM test_emp_copy GROUP BY extra.info.gender; + +gender:s | k:d | s:d + +null |2.2215791166941923 |-0.03373126000214023 +F |1.7873117044424276 |0.05504995122217512 +M |2.280646181070106 |0.44302407229580243 +; diff --git a/x-pack/plugin/sql/sql-action/build.gradle b/x-pack/plugin/sql/sql-action/build.gradle index 9cf62723ea61e..86a028186f441 100644 --- a/x-pack/plugin/sql/sql-action/build.gradle +++ b/x-pack/plugin/sql/sql-action/build.gradle @@ -42,7 +42,7 @@ dependencyLicenses { ignoreSha 'elasticsearch-core' } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( 'com.fasterxml.jackson.dataformat.yaml.YAMLFactory', 'com.fasterxml.jackson.dataformat.yaml.YAMLMapper', @@ -138,4 +138,4 @@ thirdPartyAudit.excludes = [ 'org.zeromq.ZMQ$Context', 'org.zeromq.ZMQ$Socket', 'org.zeromq.ZMQ' -] +) diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 deleted file mode 100644 index 2708b818d44cb..0000000000000 --- a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-774e9aefbc.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49b3ac44b6749a7ebf0c2e41a81e7910133d2fcc \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-a1c6e642aa.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-a1c6e642aa.jar.sha1 new file mode 100644 index 0000000000000..efbb9ada534a5 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-a1c6e642aa.jar.sha1 @@ -0,0 +1 @@ +ad32720fe677becb93a26692338b63754613aa50 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index dbeeda5ee2d3c..be7fe189cf033 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -1,5 +1,3 @@ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis - /* * This project is named sql-cli because it is in the "org.elasticsearch.plugin" * group and it'd be super confusing for it to just be called "cli" there. @@ -80,7 +78,7 @@ forbiddenApisMain { signaturesFiles += files('src/forbidden/cli-signatures.txt') } -thirdPartyAudit.excludes = [ +thirdPartyAudit.ignoreMissingClasses ( // jLine's optional dependencies 'org.apache.sshd.client.SshClient', 'org.apache.sshd.client.auth.keyboard.UserInteraction', @@ -109,7 +107,7 @@ thirdPartyAudit.excludes = [ 'org.mozilla.universalchardet.UniversalDetector', 'org.fusesource.jansi.internal.Kernel32$FOCUS_EVENT_RECORD', 'org.fusesource.jansi.internal.Kernel32$MOUSE_EVENT_RECORD', -] +) task runcli { description = 'Run the CLI and connect to elasticsearch running on 9200' diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java index c50d1da820edb..6096f5baf865d 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/ConnectionConfiguration.java @@ -161,7 +161,7 @@ private static String isKnownProperty(String propertyName, Collection kn if (knownOptions.contains(propertyName)) { return null; } - return "Unknown parameter [" + propertyName + "] ; did you mean " + StringUtils.findSimiliar(propertyName, knownOptions); + return "Unknown parameter [" + propertyName + "] ; did you mean " + StringUtils.findSimilar(propertyName, knownOptions); } protected T parseValue(String key, String value, Function parser) { diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java index 9e89f7b848c47..1b19c385db4d1 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/SslConfig.java @@ -121,19 +121,19 @@ private KeyManager[] loadKeyManagers() throws GeneralSecurityException, IOExcept } - private KeyStore loadKeyStore(String location, char[] pass, String keyStoreType) throws GeneralSecurityException, IOException { + private KeyStore loadKeyStore(String source, char[] pass, String keyStoreType) throws GeneralSecurityException, IOException { KeyStore keyStore = KeyStore.getInstance(keyStoreType); - Path path = Paths.get(location); + Path path = Paths.get(source); if (!Files.exists(path)) { throw new ClientException( - "Expected to find keystore file at [" + location + "] but was unable to. Make sure you have specified a valid URI."); + "Expected to find keystore file at [" + source + "] but was unable to. Make sure you have specified a valid URI."); } - try (InputStream in = Files.newInputStream(Paths.get(location), StandardOpenOption.READ)) { + try (InputStream in = Files.newInputStream(Paths.get(source), StandardOpenOption.READ)) { keyStore.load(in, pass); } catch (Exception ex) { - throw new ClientException("Cannot open keystore [" + location + "] - " + ex.getMessage(), ex); + throw new ClientException("Cannot open keystore [" + source + "] - " + ex.getMessage(), ex); } finally { } @@ -174,6 +174,7 @@ public boolean equals(Object obj) { && Objects.equals(truststoreType, other.truststoreType); } + @Override public int hashCode() { return getClass().hashCode(); } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/StringUtils.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/StringUtils.java index e4e5bf4d98517..1539302242b27 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/StringUtils.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/StringUtils.java @@ -262,7 +262,7 @@ else if (m == 0) { return -1; } - public static List findSimiliar(CharSequence match, Collection potential) { + public static List findSimilar(CharSequence match, Collection potential) { List list = new ArrayList(3); // 1 switches or 1 extra char diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/AnalysisException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/AnalysisException.java index 5e9224979f831..262d62814e164 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/AnalysisException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/AnalysisException.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.Node; -import java.util.Locale; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; public class AnalysisException extends ClientSqlException { @@ -21,8 +21,8 @@ public AnalysisException(Node source, String message, Object... args) { super(message, args); Location loc = Location.EMPTY; - if (source != null && source.location() != null) { - loc = source.location(); + if (source != null && source.source() != null) { + loc = source.source().source(); } this.line = loc.getLineNumber(); this.column = loc.getColumnNumber(); @@ -32,8 +32,8 @@ public AnalysisException(Node source, String message, Throwable cause) { super(message, cause); Location loc = Location.EMPTY; - if (source != null && source.location() != null) { - loc = source.location(); + if (source != null && source.source() != null) { + loc = source.source().source(); } this.line = loc.getLineNumber(); this.column = loc.getColumnNumber(); @@ -54,6 +54,6 @@ public RestStatus status() { @Override public String getMessage() { - return String.format(Locale.ROOT, "line %s:%s: %s", getLineNumber(), getColumnNumber(), super.getMessage()); + return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), super.getMessage()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 2b1aa42277ea1..090fea80b14c4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.expression.function.Functions; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.sql.expression.function.aggregate.Count; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.ArithmeticOperation; import org.elasticsearch.xpack.sql.plan.TableIdentifier; @@ -173,7 +174,7 @@ private static Attribute resolveAgainstList(UnresolvedAttribute u, Collection subQ SubQueryAlias subQueryAlias = subQueries.get(ur.table().index()); if (subQueryAlias != null) { if (ur.alias() != null) { - return new SubQueryAlias(ur.location(), subQueryAlias, ur.alias()); + return new SubQueryAlias(ur.source(), subQueryAlias, ur.alias()); } return subQueryAlias; } @@ -282,15 +283,15 @@ private class ResolveTable extends AnalyzeRule { protected LogicalPlan rule(UnresolvedRelation plan) { TableIdentifier table = plan.table(); if (indexResolution.isValid() == false) { - return plan.unresolvedMessage().equals(indexResolution.toString()) ? plan : new UnresolvedRelation(plan.location(), + return plan.unresolvedMessage().equals(indexResolution.toString()) ? plan : new UnresolvedRelation(plan.source(), plan.table(), plan.alias(), indexResolution.toString()); } assert indexResolution.matches(table.index()); - LogicalPlan logicalPlan = new EsRelation(plan.location(), indexResolution.get()); - SubQueryAlias sa = new SubQueryAlias(plan.location(), logicalPlan, table.index()); + LogicalPlan logicalPlan = new EsRelation(plan.source(), indexResolution.get()); + SubQueryAlias sa = new SubQueryAlias(plan.source(), logicalPlan, table.index()); if (plan.alias() != null) { - sa = new SubQueryAlias(plan.location(), sa, plan.alias()); + sa = new SubQueryAlias(plan.source(), sa, plan.alias()); } return sa; @@ -311,13 +312,13 @@ protected LogicalPlan rule(LogicalPlan plan) { if (plan instanceof Project) { Project p = (Project) plan; if (hasStar(p.projections())) { - return new Project(p.location(), p.child(), expandProjections(p.projections(), p.child())); + return new Project(p.source(), p.child(), expandProjections(p.projections(), p.child())); } } else if (plan instanceof Aggregate) { Aggregate a = (Aggregate) plan; if (hasStar(a.aggregates())) { - return new Aggregate(a.location(), a.child(), a.groupings(), + return new Aggregate(a.source(), a.child(), a.groupings(), expandProjections(a.aggregates(), a.child())); } // if the grouping is unresolved but the aggs are, use the latter to resolve the former @@ -339,7 +340,7 @@ else if (plan instanceof Aggregate) { newGroupings.add(grouping); } - return changed ? new Aggregate(a.location(), a.child(), newGroupings, a.aggregates()) : a; + return changed ? new Aggregate(a.source(), a.child(), newGroupings, a.aggregates()) : a; } } @@ -347,7 +348,7 @@ else if (plan instanceof Join) { Join j = (Join) plan; if (!j.duplicatesResolved()) { LogicalPlan deduped = dedupRight(j.left(), j.right()); - return new Join(j.location(), j.left(), deduped, j.type(), j.condition()); + return new Join(j.source(), j.left(), deduped, j.type(), j.condition()); } } // try resolving the order expression (the children are resolved as this point) @@ -357,7 +358,7 @@ else if (plan instanceof OrderBy) { List resolvedOrder = o.order().stream() .map(or -> resolveExpression(or, o.child())) .collect(toList()); - return new OrderBy(o.location(), o.child(), resolvedOrder); + return new OrderBy(o.source(), o.child(), resolvedOrder); } } @@ -442,12 +443,12 @@ else if (q.resolved() == false) { } if (q.qualifier() != null) { if (Objects.equals(q.qualifiedName(), fa.qualifiedPath())) { - expanded.add(fa.withLocation(attr.location())); + expanded.add(fa.withLocation(attr.source())); } } else { // use the path only to match non-compound types if (Objects.equals(q.name(), fa.path())) { - expanded.add(fa.withLocation(attr.location())); + expanded.add(fa.withLocation(attr.source())); } } } @@ -516,7 +517,7 @@ protected LogicalPlan rule(LogicalPlan plan) { if (ordinal != null) { changed = true; if (ordinal > 0 && ordinal <= max) { - newOrder.add(new Order(order.location(), orderBy.child().output().get(ordinal - 1), order.direction(), + newOrder.add(new Order(order.source(), orderBy.child().output().get(ordinal - 1), order.direction(), order.nullsPosition())); } else { @@ -528,7 +529,7 @@ protected LogicalPlan rule(LogicalPlan plan) { } } - return changed ? new OrderBy(orderBy.location(), orderBy.child(), newOrder) : orderBy; + return changed ? new OrderBy(orderBy.source(), orderBy.child(), newOrder) : orderBy; } if (plan instanceof Aggregate) { @@ -565,7 +566,7 @@ protected LogicalPlan rule(LogicalPlan plan) { } } - return changed ? new Aggregate(agg.location(), agg.child(), newGroupings, aggregates) : agg; + return changed ? new Aggregate(agg.source(), agg.child(), newGroupings, aggregates) : agg; } return plan; @@ -612,7 +613,7 @@ protected LogicalPlan rule(LogicalPlan plan) { .collect(toList())); - AttributeSet missing = resolvedRefs.substract(o.child().outputSet()); + AttributeSet missing = resolvedRefs.subtract(o.child().outputSet()); if (!missing.isEmpty()) { // Add missing attributes but project them away afterwards @@ -629,15 +630,15 @@ protected LogicalPlan rule(LogicalPlan plan) { newOrders.add(order.equals(transformed) ? order : transformed); } - return o.order().equals(newOrders) ? o : new OrderBy(o.location(), o.child(), newOrders); + return o.order().equals(newOrders) ? o : new OrderBy(o.source(), o.child(), newOrders); } // everything worked - return new Project(o.location(), new OrderBy(o.location(), newChild, maybeResolved), o.child().output()); + return new Project(o.source(), new OrderBy(o.source(), newChild, maybeResolved), o.child().output()); } if (!maybeResolved.equals(o.order())) { - return new OrderBy(o.location(), o.child(), maybeResolved); + return new OrderBy(o.source(), o.child(), maybeResolved); } } @@ -648,7 +649,7 @@ protected LogicalPlan rule(LogicalPlan plan) { .filter(Expression::resolved) .collect(toList())); - AttributeSet missing = resolvedRefs.substract(f.child().outputSet()); + AttributeSet missing = resolvedRefs.subtract(f.child().outputSet()); if (!missing.isEmpty()) { // Again, add missing attributes and project them away @@ -661,14 +662,14 @@ protected LogicalPlan rule(LogicalPlan plan) { Expression transformed = f.condition().transformUp(ua -> resolveMetadataToMessage(ua, failedAttrs, "filter"), UnresolvedAttribute.class); - return f.condition().equals(transformed) ? f : new Filter(f.location(), f.child(), transformed); + return f.condition().equals(transformed) ? f : new Filter(f.source(), f.child(), transformed); } - return new Project(f.location(), new Filter(f.location(), newChild, maybeResolved), f.child().output()); + return new Project(f.source(), new Filter(f.source(), newChild, maybeResolved), f.child().output()); } if (!maybeResolved.equals(f.condition())) { - return new Filter(f.location(), f.child(), maybeResolved); + return new Filter(f.source(), f.child(), maybeResolved); } } @@ -695,8 +696,8 @@ private static LogicalPlan propagateMissing(LogicalPlan plan, AttributeSet missi if (plan instanceof Project) { Project p = (Project) plan; - AttributeSet diff = missing.substract(p.child().outputSet()); - return new Project(p.location(), propagateMissing(p.child(), diff, failed), combine(p.projections(), missing)); + AttributeSet diff = missing.subtract(p.child().outputSet()); + return new Project(p.source(), propagateMissing(p.child(), diff, failed), combine(p.projections(), missing)); } if (plan instanceof Aggregate) { @@ -707,7 +708,7 @@ private static LogicalPlan propagateMissing(LogicalPlan plan, AttributeSet missi if (!Expressions.anyMatch(a.groupings(), m::semanticEquals)) { if (m instanceof Attribute) { // pass failure information to help the verifier - m = new UnresolvedAttribute(m.location(), m.name(), m.qualifier(), null, null, + m = new UnresolvedAttribute(m.source(), m.name(), m.qualifier(), null, null, new AggGroupingFailure(Expressions.names(a.groupings()))); } failed.add(m); @@ -717,7 +718,7 @@ private static LogicalPlan propagateMissing(LogicalPlan plan, AttributeSet missi if (!failed.isEmpty()) { return plan; } - return new Aggregate(a.location(), a.child(), a.groupings(), combine(a.aggregates(), missing)); + return new Aggregate(a.source(), a.child(), a.groupings(), combine(a.aggregates(), missing)); } // LeafPlans are tables and BinaryPlans are joins so pushing can only happen on unary @@ -770,7 +771,15 @@ private Expression collectResolvedAndReplace(Expression e, Map list = getList(seen, fName); for (Function seenFunction : list) { if (seenFunction != f && f.arguments().equals(seenFunction.arguments())) { - return seenFunction; + // Special check for COUNT: an already seen COUNT function will be returned only if its DISTINCT property + // matches the one from the unresolved function to be checked. + if (seenFunction instanceof Count) { + if (seenFunction.equals(f)){ + return seenFunction; + } + } else { + return seenFunction; + } } } list.add(f); @@ -808,7 +817,15 @@ protected LogicalPlan resolve(LogicalPlan plan, Map> seen if (!list.isEmpty()) { for (Function seenFunction : list) { if (uf.arguments().equals(seenFunction.arguments())) { - return seenFunction; + // Special check for COUNT: an already seen COUNT function will be returned only if its DISTINCT property + // matches the one from the unresolved function to be checked. + if (seenFunction instanceof Count) { + if (uf.sameAs((Count) seenFunction)) { + return seenFunction; + } + } else { + return seenFunction; + } } } } @@ -845,14 +862,14 @@ protected LogicalPlan rule(LogicalPlan plan) { if (plan instanceof Project) { Project p = (Project) plan; if (p.childrenResolved() && hasUnresolvedAliases(p.projections())) { - return new Project(p.location(), p.child(), assignAliases(p.projections())); + return new Project(p.source(), p.child(), assignAliases(p.projections())); } return p; } if (plan instanceof Aggregate) { Aggregate a = (Aggregate) plan; if (a.childrenResolved() && hasUnresolvedAliases(a.aggregates())) { - return new Aggregate(a.location(), a.child(), a.groupings(), assignAliases(a.aggregates())); + return new Aggregate(a.source(), a.child(), a.groupings(), assignAliases(a.aggregates())); } return a; } @@ -879,11 +896,11 @@ private List assignAliases(List expr if (child instanceof Cast) { Cast c = (Cast) child; if (c.field() instanceof NamedExpression) { - return new Alias(c.location(), ((NamedExpression) c.field()).name(), c); + return new Alias(c.source(), ((NamedExpression) c.field()).name(), c); } } //TODO: maybe add something closer to SQL - return new Alias(child.location(), child.toString(), child); + return new Alias(child.source(), child.toString(), child); }, UnresolvedAlias.class); newExpr.add(expr.equals(transformed) ? expr : transformed); } @@ -900,7 +917,7 @@ private static class ProjectedAggregations extends AnalyzeRule { @Override protected LogicalPlan rule(Project p) { if (containsAggregate(p.projections())) { - return new Aggregate(p.location(), p.child(), emptyList(), p.projections()); + return new Aggregate(p.source(), p.child(), emptyList(), p.projections()); } return p; } @@ -934,8 +951,8 @@ protected LogicalPlan rule(LogicalPlan plan) { // so try resolving the condition in one go through a 'dummy' aggregate if (!condition.resolved()) { // that's why try to resolve the condition - Aggregate tryResolvingCondition = new Aggregate(agg.location(), agg.child(), agg.groupings(), - combine(agg.aggregates(), new Alias(f.location(), ".having", condition))); + Aggregate tryResolvingCondition = new Aggregate(agg.source(), agg.child(), agg.groupings(), + combine(agg.aggregates(), new Alias(f.source(), ".having", condition))); tryResolvingCondition = (Aggregate) analyze(tryResolvingCondition, false); @@ -953,14 +970,14 @@ protected LogicalPlan rule(LogicalPlan plan) { missing = findMissingAggregate(agg, condition); if (!missing.isEmpty()) { - Aggregate newAgg = new Aggregate(agg.location(), agg.child(), agg.groupings(), + Aggregate newAgg = new Aggregate(agg.source(), agg.child(), agg.groupings(), combine(agg.aggregates(), missing)); - Filter newFilter = new Filter(f.location(), newAgg, condition); + Filter newFilter = new Filter(f.source(), newAgg, condition); // preserve old output - return new Project(f.location(), newFilter, f.output()); + return new Project(f.source(), newFilter, f.output()); } - return new Filter(f.location(), f.child(), condition); + return new Filter(f.source(), f.child(), condition); } return plan; } @@ -1054,8 +1071,8 @@ private Expression implicitCast(Expression e) { if (common == null) { return e; } - left = l == common ? left : new Cast(left.location(), left, common); - right = r == common ? right : new Cast(right.location(), right, common); + left = l == common ? left : new Cast(left.source(), left, common); + right = r == common ? right : new Cast(right.source(), right, common); return e.replaceChildren(Arrays.asList(left, right)); } } @@ -1086,14 +1103,14 @@ public static class CleanAliases extends AnalyzeRule { protected LogicalPlan rule(LogicalPlan plan) { if (plan instanceof Project) { Project p = (Project) plan; - return new Project(p.location(), p.child(), cleanExpressions(p.projections())); + return new Project(p.source(), p.child(), cleanExpressions(p.projections())); } if (plan instanceof Aggregate) { Aggregate a = (Aggregate) plan; // clean group expressions List cleanedGroups = a.groupings().stream().map(CleanAliases::trimAliases).collect(toList()); - return new Aggregate(a.location(), a.child(), cleanedGroups, cleanExpressions(a.aggregates())); + return new Aggregate(a.source(), a.child(), cleanedGroups, cleanExpressions(a.aggregates())); } return plan.transformExpressionsOnly(e -> { @@ -1111,7 +1128,7 @@ private List cleanExpressions(List a public static Expression trimNonTopLevelAliases(Expression e) { if (e instanceof Alias) { Alias a = (Alias) e; - return new Alias(a.location(), a.name(), a.qualifier(), trimAliases(a.child()), a.id()); + return new Alias(a.source(), a.name(), a.qualifier(), trimAliases(a.child()), a.id()); } return trimAliases(e); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java index 86af4815894b8..e7aa0b36f1482 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerificationException.java @@ -5,14 +5,14 @@ */ package org.elasticsearch.xpack.sql.analysis.analyzer; -import java.util.Collection; -import java.util.stream.Collectors; - import org.elasticsearch.xpack.sql.analysis.AnalysisException; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier.Failure; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.util.StringUtils; +import java.util.Collection; +import java.util.stream.Collectors; + public class VerificationException extends AnalysisException { @@ -27,7 +27,7 @@ protected VerificationException(Collection sources) { public String getMessage() { return failures.stream() .map(f -> { - Location l = f.source().location(); + Location l = f.source().source().source(); return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message(); }) .collect(Collectors.joining(StringUtils.NEW_LINE, "Found " + failures.size() + " problem(s)\n", StringUtils.EMPTY)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index 189509e95114c..f1043b2196703 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -169,7 +169,7 @@ Collection verify(LogicalPlan plan) { if (!ua.customMessage()) { boolean useQualifier = ua.qualifier() != null; List potentialMatches = new ArrayList<>(); - for (Attribute a : p.intputSet()) { + for (Attribute a : p.inputSet()) { String nameCandidate = useQualifier ? a.qualifiedName() : a.name(); // add only primitives (object types would only result in another error) if ((a.dataType() != DataType.UNSUPPORTED) && a.dataType().isPrimitive()) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 8d7d6b5bbee43..b3fdb4d1170fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DateEsField; import org.elasticsearch.xpack.sql.type.EsField; @@ -64,7 +63,7 @@ public enum IndexType { INDEX("BASE TABLE"), ALIAS("ALIAS"), // value for user types unrecognized - UNKNOWN("UKNOWN"); + UNKNOWN("UNKNOWN"); public static final EnumSet VALID = EnumSet.of(INDEX, ALIAS); @@ -351,12 +350,18 @@ private static EsField createField(String fieldName, Map map = globalCaps.get(parentName); + Function fieldFunction; + + // lack of parent implies the field is an alias if (map == null) { - throw new SqlIllegalArgumentException("Cannot find field {}; this is likely a bug", parentName); + // as such, create the field manually + fieldFunction = s -> createField(s, DataType.OBJECT.name(), new TreeMap<>(), false); + } else { + FieldCapabilities parentCap = map.values().iterator().next(); + fieldFunction = s -> createField(s, parentCap.getType(), new TreeMap<>(), parentCap.isAggregatable()); } - FieldCapabilities parentCap = map.values().iterator().next(); - parent = createField(parentName, globalCaps, hierarchicalMapping, flattedMapping, - s -> createField(s, parentCap.getType(), new TreeMap<>(), parentCap.isAggregatable())); + + parent = createField(parentName, globalCaps, hierarchicalMapping, flattedMapping, fieldFunction); } parentProps = parent.getProperties(); } @@ -368,7 +373,7 @@ private static EsField createField(String fieldName, Map props, boolean isAggregateable) { DataType esType = DataType.fromTypeName(typeName); switch (esType) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java index 59044c98055d7..b4c7e063db092 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java @@ -15,7 +15,9 @@ import org.elasticsearch.xpack.sql.execution.search.SourceGenerator; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.optimizer.Optimizer; +import org.elasticsearch.xpack.sql.plan.physical.CommandExec; import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.sql.plan.physical.LocalExec; import org.elasticsearch.xpack.sql.planner.Planner; import org.elasticsearch.xpack.sql.planner.PlanningException; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; @@ -66,8 +68,15 @@ public void searchSource(Configuration cfg, String sql, List if (exec instanceof EsQueryExec) { EsQueryExec e = (EsQueryExec) exec; listener.onResponse(SourceGenerator.sourceBuilder(e.queryContainer(), cfg.filter(), cfg.pageSize())); + } else if (exec instanceof LocalExec) { + listener.onFailure(new PlanningException("Cannot generate a query DSL for an SQL query that either " + + "its WHERE clause evaluates to FALSE or doesn't operate on a table (missing a FROM clause), sql statement: [{}]", + sql)); + } else if (exec instanceof CommandExec) { + listener.onFailure(new PlanningException("Cannot generate a query DSL for a special SQL command " + + "(e.g.: DESCRIBE, SHOW), sql statement: [{}]", sql)); } else { - listener.onFailure(new PlanningException("Cannot generate a query DSL for {}", sql)); + listener.onFailure(new PlanningException("Cannot generate a query DSL, sql statement: [{}]", sql)); } }, listener::onFailure)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 14d7fa57fffab..16a6a4135b472 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -281,7 +281,7 @@ private BucketExtractor createExtractor(FieldExtraction ref, BucketExtractor tot // wrap only agg inputs proc = proc.transformDown(l -> { BucketExtractor be = createExtractor(l.context(), totalCount); - return new AggExtractorInput(l.location(), l.expression(), l.action(), be); + return new AggExtractorInput(l.source(), l.expression(), l.action(), be); }, AggPathInput.class); return new ComputingExtractor(proc.asProcessor()); @@ -364,7 +364,7 @@ private HitExtractor createExtractor(FieldExtraction ref) { throw new SqlIllegalArgumentException("Multi-level nested fields [{}] not supported yet", hitNames); } - return new HitExtractorInput(l.location(), l.expression(), he); + return new HitExtractorInput(l.source(), l.expression(), he); }, ReferenceInput.class); String hitName = null; if (hitNames.size() == 1) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/MetricAggExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/MetricAggExtractor.java index fc3f6753525d4..e24bf4d0adaa5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/MetricAggExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/MetricAggExtractor.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; +import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation.SingleValue; @@ -83,6 +84,9 @@ public Object extract(Bucket bucket) { // throw new SqlIllegalArgumentException("Invalid innerKey {} specified for aggregation {}", innerKey, name); //} return ((InternalNumericMetricsAggregation.MultiValue) agg).value(property); + } else if (agg instanceof InternalFilter) { + // COUNT(expr) and COUNT(ALL expr) uses this type of aggregation to account for non-null values only + return ((InternalFilter) agg).getDocCount(); } Object v = agg.getProperty(property); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java index fb3c0290f3148..576bd233ac622 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; @@ -36,20 +36,20 @@ public class Alias extends NamedExpression { */ private Attribute lazyAttribute; - public Alias(Location location, String name, Expression child) { - this(location, name, null, child, null); + public Alias(Source source, String name, Expression child) { + this(source, name, null, child, null); } - public Alias(Location location, String name, String qualifier, Expression child) { - this(location, name, qualifier, child, null); + public Alias(Source source, String name, String qualifier, Expression child) { + this(source, name, qualifier, child, null); } - public Alias(Location location, String name, String qualifier, Expression child, ExpressionId id) { - this(location, name, qualifier, child, id, false); + public Alias(Source source, String name, String qualifier, Expression child, ExpressionId id) { + this(source, name, qualifier, child, id, false); } - public Alias(Location location, String name, String qualifier, Expression child, ExpressionId id, boolean synthetic) { - super(location, name, singletonList(child), id, synthetic); + public Alias(Source source, String name, String qualifier, Expression child, ExpressionId id, boolean synthetic) { + super(source, name, singletonList(child), id, synthetic); this.child = child; this.qualifier = qualifier; } @@ -64,7 +64,7 @@ public Expression replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Alias(location(), name(), qualifier, newChildren.get(0), id(), synthetic()); + return new Alias(source(), name(), qualifier, newChildren.get(0), id(), synthetic()); } public Expression child() { @@ -76,7 +76,7 @@ public String qualifier() { } @Override - public boolean nullable() { + public Nullability nullable() { return child.nullable(); } @@ -104,21 +104,21 @@ private Attribute createAttribute() { Attribute attr = Expressions.attribute(c); if (attr != null) { - return attr.clone(location(), name(), qualifier, child.nullable(), id(), synthetic()); + return attr.clone(source(), name(), qualifier, child.nullable(), id(), synthetic()); } else { // TODO: WE need to fix this fake Field - return new FieldAttribute(location(), null, name(), + return new FieldAttribute(source(), null, name(), new EsField(name(), child.dataType(), Collections.emptyMap(), true), qualifier, child.nullable(), id(), synthetic()); } } - return new UnresolvedAttribute(location(), name(), qualifier); + return new UnresolvedAttribute(source(), name(), qualifier); } @Override public String toString() { return child + " AS " + name() + "#" + id(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java index 3be4b02795465..2f8b6633249d0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java @@ -7,8 +7,8 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; import java.util.Objects; @@ -42,20 +42,20 @@ public abstract class Attribute extends NamedExpression { private final String qualifier; // can the attr be null - typically used in JOINs - private final boolean nullable; + private final Nullability nullability; - public Attribute(Location location, String name, String qualifier, ExpressionId id) { - this(location, name, qualifier, true, id); + public Attribute(Source source, String name, String qualifier, ExpressionId id) { + this(source, name, qualifier, Nullability.TRUE, id); } - public Attribute(Location location, String name, String qualifier, boolean nullable, ExpressionId id) { - this(location, name, qualifier, nullable, id, false); + public Attribute(Source source, String name, String qualifier, Nullability nullability, ExpressionId id) { + this(source, name, qualifier, nullability, id, false); } - public Attribute(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { - super(location, name, emptyList(), id, synthetic); + public Attribute(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { + super(source, name, emptyList(), id, synthetic); this.qualifier = qualifier; - this.nullable = nullable; + this.nullability = nullability; } @Override @@ -77,8 +77,8 @@ public String qualifiedName() { } @Override - public boolean nullable() { - return nullable; + public Nullability nullable() { + return nullability; } @Override @@ -86,19 +86,19 @@ public AttributeSet references() { return new AttributeSet(this); } - public Attribute withLocation(Location location) { - return Objects.equals(location(), location) ? this : clone(location, name(), qualifier(), nullable(), id(), synthetic()); + public Attribute withLocation(Source source) { + return Objects.equals(source(), source) ? this : clone(source, name(), qualifier(), nullable(), id(), synthetic()); } public Attribute withQualifier(String qualifier) { - return Objects.equals(qualifier(), qualifier) ? this : clone(location(), name(), qualifier, nullable(), id(), synthetic()); + return Objects.equals(qualifier(), qualifier) ? this : clone(source(), name(), qualifier, nullable(), id(), synthetic()); } - public Attribute withNullability(boolean nullable) { - return Objects.equals(nullable(), nullable) ? this : clone(location(), name(), qualifier(), nullable, id(), synthetic()); + public Attribute withNullability(Nullability nullability) { + return Objects.equals(nullable(), nullability) ? this : clone(source(), name(), qualifier(), nullability, id(), synthetic()); } - protected abstract Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, + protected abstract Attribute clone(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic); @Override @@ -123,7 +123,7 @@ public boolean semanticEquals(Expression other) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), qualifier, nullable); + return Objects.hash(super.hashCode(), qualifier, nullability); } @Override @@ -131,7 +131,7 @@ public boolean equals(Object obj) { if (super.equals(obj)) { Attribute other = (Attribute) obj; return Objects.equals(qualifier, other.qualifier) - && Objects.equals(nullable, other.nullable); + && Objects.equals(nullability, other.nullability); } return false; @@ -143,4 +143,4 @@ public String toString() { } protected abstract String label(); -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/AttributeMap.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/AttributeMap.java index 57dc8f6152e99..b5d137617722e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/AttributeMap.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/AttributeMap.java @@ -175,7 +175,7 @@ void addAll(AttributeMap other) { delegate.putAll(other.delegate); } - public AttributeMap substract(AttributeMap other) { + public AttributeMap subtract(AttributeMap other) { AttributeMap diff = new AttributeMap<>(); for (Entry entry : this.delegate.entrySet()) { if (!other.delegate.containsKey(entry.getKey())) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/AttributeSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/AttributeSet.java index 5d4065e5f3654..af290371dafd3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/AttributeSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/AttributeSet.java @@ -57,8 +57,8 @@ void addAll(AttributeSet other) { delegate.addAll(other.delegate); } - public AttributeSet substract(AttributeSet other) { - return new AttributeSet(delegate.substract(other.delegate)); + public AttributeSet subtract(AttributeSet other) { + return new AttributeSet(delegate.subtract(other.delegate)); } public AttributeSet intersect(AttributeSet other) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Exists.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Exists.java index 6cffd46fbc86d..2363b52316c2f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Exists.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Exists.java @@ -6,18 +6,18 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; public class Exists extends SubQueryExpression { - public Exists(Location location, LogicalPlan query) { - this(location, query, null); + public Exists(Source source, LogicalPlan query) { + this(source, query, null); } - public Exists(Location location, LogicalPlan query, ExpressionId id) { - super(location, query, id); + public Exists(Source source, LogicalPlan query, ExpressionId id) { + super(source, query, id); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected SubQueryExpression clone(LogicalPlan newQuery) { - return new Exists(location(), newQuery); + return new Exists(source(), newQuery); } @Override @@ -36,7 +36,7 @@ public DataType dataType() { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expression.java index 204ea8b11023e..d421d2b01c098 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expression.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.capabilities.Resolvable; import org.elasticsearch.xpack.sql.capabilities.Resolvables; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.Node; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -65,8 +65,8 @@ public String message() { private Boolean lazyChildrenResolved = null; private Expression lazyCanonical = null; - public Expression(Location location, List children) { - super(location, children); + public Expression(Source source, List children) { + super(source, children); } // whether the expression can be evaluated statically (folded) or not @@ -78,8 +78,7 @@ public Object fold() { throw new SqlIllegalArgumentException("Should not fold expression"); } - // whether the expression becomes null if at least one param/input is null - public abstract boolean nullable(); + public abstract Nullability nullable(); // the references/inputs/leaves of the expression tree public AttributeSet references() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index 58b559d93a2be..967ce6cc6a6fa 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -35,7 +35,7 @@ public enum ParamOrdinal { private Expressions() {} public static NamedExpression wrapAsNamed(Expression exp) { - return exp instanceof NamedExpression ? (NamedExpression) exp : new Alias(exp.location(), exp.nodeName(), exp); + return exp instanceof NamedExpression ? (NamedExpression) exp : new Alias(exp.source(), exp.nodeName(), exp); } public static List asAttributes(List named) { @@ -79,13 +79,8 @@ public static boolean match(List exps, Predicate exps) { - for (Expression exp : exps) { - if (exp.nullable()) { - return true; - } - } - return false; + public static Nullability nullable(List exps) { + return Nullability.and(exps.stream().map(Expression::nullable).toArray(Nullability[]::new)); } public static boolean foldable(List exps) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java index d5b472c2fab03..832af029df315 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java @@ -6,8 +6,8 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.common.Strings; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -29,17 +29,17 @@ public class FieldAttribute extends TypedAttribute { private final String path; private final EsField field; - public FieldAttribute(Location location, String name, EsField field) { - this(location, null, name, field); + public FieldAttribute(Source source, String name, EsField field) { + this(source, null, name, field); } - public FieldAttribute(Location location, FieldAttribute parent, String name, EsField field) { - this(location, parent, name, field, null, true, null, false); + public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field) { + this(source, parent, name, field, null, Nullability.TRUE, null, false); } - public FieldAttribute(Location location, FieldAttribute parent, String name, EsField field, String qualifier, - boolean nullable, ExpressionId id, boolean synthetic) { - super(location, name, field.getDataType(), qualifier, nullable, id, synthetic); + public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, String qualifier, + Nullability nullability, ExpressionId id, boolean synthetic) { + super(source, name, field.getDataType(), qualifier, nullability, id, synthetic); this.path = parent != null ? parent.name() : StringUtils.EMPTY; this.parent = parent; this.field = field; @@ -93,18 +93,19 @@ public FieldAttribute exactAttribute() { } private FieldAttribute innerField(EsField type) { - return new FieldAttribute(location(), this, name() + "." + type.getName(), type, qualifier(), nullable(), id(), synthetic()); + return new FieldAttribute(source(), this, name() + "." + type.getName(), type, qualifier(), nullable(), id(), synthetic()); } @Override protected Expression canonicalize() { - return new FieldAttribute(location(), null, "", field, null, true, id(), false); + return new FieldAttribute(source(), null, "", field, null, Nullability.TRUE, id(), false); } @Override - protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { + protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic) { FieldAttribute qualifiedParent = parent != null ? (FieldAttribute) parent.withQualifier(qualifier) : null; - return new FieldAttribute(location, qualifiedParent, name, field, qualifier, nullable, id, synthetic); + return new FieldAttribute(source, qualifiedParent, name, field, qualifier, nullability, id, synthetic); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LeafExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LeafExpression.java index 49325ce1c8175..16646452a31ad 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LeafExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LeafExpression.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; @@ -13,8 +13,8 @@ public abstract class LeafExpression extends Expression { - protected LeafExpression(Location location) { - super(location, emptyList()); + protected LeafExpression(Source source) { + super(source, emptyList()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java index 09a8bd08917c9..1d029f3133039 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java @@ -8,8 +8,8 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.script.Params; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -24,19 +24,19 @@ */ public class Literal extends NamedExpression { - public static final Literal TRUE = Literal.of(Location.EMPTY, Boolean.TRUE); - public static final Literal FALSE = Literal.of(Location.EMPTY, Boolean.FALSE); - public static final Literal NULL = Literal.of(Location.EMPTY, null); + public static final Literal TRUE = Literal.of(Source.EMPTY, Boolean.TRUE); + public static final Literal FALSE = Literal.of(Source.EMPTY, Boolean.FALSE); + public static final Literal NULL = Literal.of(Source.EMPTY, null); private final Object value; private final DataType dataType; - public Literal(Location location, Object value, DataType dataType) { - this(location, null, value, dataType); + public Literal(Source source, Object value, DataType dataType) { + this(source, null, value, dataType); } - public Literal(Location location, String name, Object value, DataType dataType) { - super(location, name == null ? String.valueOf(value) : name, emptyList(), null); + public Literal(Source source, String name, Object value, DataType dataType) { + super(source, name == null ? String.valueOf(value) : name, emptyList(), null); this.dataType = dataType; this.value = DataTypeConversion.convert(value, dataType); } @@ -56,8 +56,8 @@ public boolean foldable() { } @Override - public boolean nullable() { - return value == null; + public Nullability nullable() { + return value == null ? Nullability.TRUE : Nullability.FALSE; } @Override @@ -77,7 +77,7 @@ public Object fold() { @Override public Attribute toAttribute() { - return new LiteralAttribute(location(), name(), null, false, id(), false, dataType, this); + return new LiteralAttribute(source(), name(), null, Nullability.FALSE, id(), false, dataType, this); } @Override @@ -98,7 +98,7 @@ public AttributeSet references() { @Override protected Expression canonicalize() { String s = String.valueOf(value); - return name().equals(s) ? this : Literal.of(location(), value); + return name().equals(s) ? this : Literal.of(source(), value); } @Override @@ -129,11 +129,11 @@ public String toString() { /** * Utility method for creating 'in-line' Literals (out of values instead of expressions). */ - public static Literal of(Location loc, Object value) { + public static Literal of(Source source, Object value) { if (value instanceof Literal) { return (Literal) value; } - return new Literal(loc, value, DataTypes.fromJava(value)); + return new Literal(source, value, DataTypes.fromJava(value)); } /** @@ -161,11 +161,11 @@ public static Literal of(String name, Expression foldable) { if (name == null) { name = foldable instanceof NamedExpression ? ((NamedExpression) foldable).name() : String.valueOf(fold); } - return new Literal(foldable.location(), name, fold, foldable.dataType()); + return new Literal(foldable.source(), name, fold, foldable.dataType()); } public static Literal of(Expression source, Object value) { String name = source instanceof NamedExpression ? ((NamedExpression) source).name() : String.valueOf(value); - return new Literal(source.location(), name, value, source.dataType()); + return new Literal(source.source(), name, value, source.dataType()); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java index ef3db576b6671..6463520cf83ae 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java @@ -6,17 +6,17 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; public class LiteralAttribute extends TypedAttribute { private final Literal literal; - public LiteralAttribute(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, - DataType dataType, Literal literal) { - super(location, name, dataType, qualifier, nullable, id, synthetic); + public LiteralAttribute(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic, + DataType dataType, Literal literal) { + super(source, name, dataType, qualifier, nullability, id, synthetic); this.literal = literal; } @@ -27,9 +27,9 @@ protected NodeInfo info() { } @Override - protected LiteralAttribute clone(Location location, String name, String qualifier, boolean nullable, + protected LiteralAttribute clone(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - return new LiteralAttribute(location, name, qualifier, nullable, id, synthetic, dataType(), literal); + return new LiteralAttribute(source, name, qualifier, nullability, id, synthetic, dataType(), literal); } @Override @@ -41,4 +41,4 @@ protected String label() { public Pipe asPipe() { return literal.asPipe(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/NamedExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/NamedExpression.java index 1cd4510079b92..90b46b29ccf7f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/NamedExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/NamedExpression.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.ConstantInput; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; import java.util.Objects; @@ -26,12 +26,12 @@ public abstract class NamedExpression extends Expression { private Pipe lazyPipe = null; - public NamedExpression(Location location, String name, List children, ExpressionId id) { - this(location, name, children, id, false); + public NamedExpression(Source source, String name, List children, ExpressionId id) { + this(source, name, children, id, false); } - public NamedExpression(Location location, String name, List children, ExpressionId id, boolean synthetic) { - super(location, children); + public NamedExpression(Source source, String name, List children, ExpressionId id, boolean synthetic) { + super(source, children); this.name = name; this.id = id == null ? new ExpressionId() : id; this.synthetic = synthetic; @@ -53,14 +53,14 @@ public boolean synthetic() { public Pipe asPipe() { if (lazyPipe == null) { - lazyPipe = foldable() ? new ConstantInput(location(), this, fold()) : makePipe(); + lazyPipe = foldable() ? new ConstantInput(source(), this, fold()) : makePipe(); } return lazyPipe; } protected Pipe makePipe() { - return new AttributeInput(location(), this, toAttribute()); + return new AttributeInput(source(), this, toAttribute()); } public abstract ScriptTemplate asScript(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Nullability.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Nullability.java new file mode 100644 index 0000000000000..ac46bdf2aa92b --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Nullability.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression; + +public enum Nullability { + TRUE, // Whether the expression can become null + FALSE, // The expression can never become null + UNKNOWN; // Cannot determine if the expression supports possible null folding + + /** + * Return the logical AND of a list of {@code Nullability} + *
+     *  UNKNOWN AND TRUE/FALSE/UNKNOWN = UNKNOWN
+     *  FALSE AND FALSE = FALSE
+     *  TRUE AND FALSE/TRUE = TRUE
+     * 
+ */ + public static Nullability and(Nullability... nullabilities) { + Nullability value = null; + for (Nullability n: nullabilities) { + switch (n) { + case UNKNOWN: + return UNKNOWN; + case TRUE: + value = TRUE; + break; + case FALSE: + if (value == null) { + value = FALSE; + } + } + } + return value != null ? value : FALSE; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Order.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Order.java index 0b62f7035ed3d..6a57c3275d4d1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Order.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Order.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -28,8 +28,8 @@ public enum NullsPosition { private final OrderDirection direction; private final NullsPosition nulls; - public Order(Location location, Expression child, OrderDirection direction, NullsPosition nulls) { - super(location, singletonList(child)); + public Order(Source source, Expression child, OrderDirection direction, NullsPosition nulls) { + super(source, singletonList(child)); this.child = child; this.direction = direction; this.nulls = nulls == null ? (direction == OrderDirection.DESC ? NullsPosition.FIRST : NullsPosition.LAST) : nulls; @@ -41,8 +41,8 @@ protected NodeInfo info() { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } @Override @@ -55,7 +55,7 @@ public Order replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Order(location(), newChildren.get(0), direction, nulls); + return new Order(source(), newChildren.get(0), direction, nulls); } public Expression child() { @@ -95,4 +95,4 @@ public boolean equals(Object obj) { && Objects.equals(nulls, other.nulls) && Objects.equals(child, other.child); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ScalarSubquery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ScalarSubquery.java index 76906a714d0cb..84693cdc79dfc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ScalarSubquery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ScalarSubquery.java @@ -6,18 +6,18 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; public class ScalarSubquery extends SubQueryExpression { - public ScalarSubquery(Location location, LogicalPlan query) { - this(location, query, null); + public ScalarSubquery(Source source, LogicalPlan query) { + this(source, query, null); } - public ScalarSubquery(Location location, LogicalPlan query, ExpressionId id) { - super(location, query, id); + public ScalarSubquery(Source source, LogicalPlan query, ExpressionId id) { + super(source, query, id); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected ScalarSubquery clone(LogicalPlan newQuery) { - return new ScalarSubquery(location(), newQuery); + return new ScalarSubquery(source(), newQuery); } @Override @@ -36,7 +36,7 @@ public DataType dataType() { } @Override - public boolean nullable() { - return true; + public Nullability nullable() { + return Nullability.TRUE; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/SubQueryExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/SubQueryExpression.java index 1a046724a34ec..17ec60b6e6935 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/SubQueryExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/SubQueryExpression.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Collections; import java.util.List; @@ -17,12 +17,12 @@ public abstract class SubQueryExpression extends Expression { private final LogicalPlan query; private final ExpressionId id; - public SubQueryExpression(Location location, LogicalPlan query) { - this(location, query, null); + public SubQueryExpression(Source source, LogicalPlan query) { + this(source, query, null); } - public SubQueryExpression(Location location, LogicalPlan query, ExpressionId id) { - super(location, Collections.emptyList()); + public SubQueryExpression(Source source, LogicalPlan query, ExpressionId id) { + super(source, Collections.emptyList()); this.query = query; this.id = id == null ? new ExpressionId() : id; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypedAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypedAttribute.java index 1c8034f059a81..414ff330bda8f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypedAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypedAttribute.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Objects; @@ -14,13 +14,9 @@ public abstract class TypedAttribute extends Attribute { private final DataType dataType; - protected TypedAttribute(Location location, String name, DataType dataType) { - this(location, name, dataType, null, true, null, false); - } - - protected TypedAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, + protected TypedAttribute(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - super(location, name, qualifier, nullable, id, synthetic); + super(source, name, qualifier, nullability, id, synthetic); this.dataType = dataType; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java index 1dab263f0a4ee..69cb510704288 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnaryExpression.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.List; @@ -17,8 +17,8 @@ public abstract class UnaryExpression extends Expression { private final Expression child; - protected UnaryExpression(Location location, Expression child) { - super(location, singletonList(child)); + protected UnaryExpression(Source source, Expression child) { + super(source, singletonList(child)); this.child = child; } @@ -41,7 +41,7 @@ public boolean foldable() { } @Override - public boolean nullable() { + public Nullability nullable() { return child.nullable(); } @@ -73,4 +73,4 @@ public boolean equals(Object obj) { UnaryExpression other = (UnaryExpression) obj; return Objects.equals(child, other.child); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAlias.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAlias.java index eaa6aeb5afa13..178c4d896959f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAlias.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAlias.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.capabilities.UnresolvedException; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; @@ -18,8 +18,8 @@ public class UnresolvedAlias extends UnresolvedNamedExpression { private final Expression child; - public UnresolvedAlias(Location location, Expression child) { - super(location, singletonList(child)); + public UnresolvedAlias(Source source, Expression child) { + super(source, singletonList(child)); this.child = child; } @@ -33,7 +33,7 @@ public Expression replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new UnresolvedAlias(location(), newChildren.get(0)); + return new UnresolvedAlias(source(), newChildren.get(0)); } public Expression child() { @@ -46,7 +46,7 @@ public String unresolvedMessage() { } @Override - public boolean nullable() { + public Nullability nullable() { throw new UnresolvedException("nullable", this); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java index 0a31b74d636ee..476c69fea0951 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java @@ -7,8 +7,8 @@ import org.elasticsearch.xpack.sql.capabilities.Unresolvable; import org.elasticsearch.xpack.sql.capabilities.UnresolvedException; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.CollectionUtils; @@ -25,21 +25,21 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable { private final boolean customMessage; private final Object resolutionMetadata; - public UnresolvedAttribute(Location location, String name) { - this(location, name, null); + public UnresolvedAttribute(Source source, String name) { + this(source, name, null); } - public UnresolvedAttribute(Location location, String name, String qualifier) { - this(location, name, qualifier, null); + public UnresolvedAttribute(Source source, String name, String qualifier) { + this(source, name, qualifier, null); } - public UnresolvedAttribute(Location location, String name, String qualifier, String unresolvedMessage) { - this(location, name, qualifier, null, unresolvedMessage, null); + public UnresolvedAttribute(Source source, String name, String qualifier, String unresolvedMessage) { + this(source, name, qualifier, null, unresolvedMessage, null); } - public UnresolvedAttribute(Location location, String name, String qualifier, ExpressionId id, String unresolvedMessage, + public UnresolvedAttribute(Source source, String name, String qualifier, ExpressionId id, String unresolvedMessage, Object resolutionMetadata) { - super(location, name, qualifier, id); + super(source, name, qualifier, id); this.customMessage = unresolvedMessage != null; this.unresolvedMsg = unresolvedMessage == null ? errorMessage(qualifiedName(), null) : unresolvedMessage; this.resolutionMetadata = resolutionMetadata; @@ -65,12 +65,13 @@ public boolean resolved() { } @Override - protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { + protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic) { return this; } public UnresolvedAttribute withUnresolvedMessage(String unresolvedMsg) { - return new UnresolvedAttribute(location(), name(), qualifier(), id(), unresolvedMsg, resolutionMetadata()); + return new UnresolvedAttribute(source(), name(), qualifier(), id(), unresolvedMsg, resolutionMetadata()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedNamedExpression.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedNamedExpression.java index a36e534fe3f9a..4d68d32f37434 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedNamedExpression.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedNamedExpression.java @@ -8,15 +8,15 @@ import org.elasticsearch.xpack.sql.capabilities.Unresolvable; import org.elasticsearch.xpack.sql.capabilities.UnresolvedException; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.List; abstract class UnresolvedNamedExpression extends NamedExpression implements Unresolvable { - UnresolvedNamedExpression(Location location, List children) { - super(location, "", children, new ExpressionId()); + UnresolvedNamedExpression(Source source, List children) { + super(source, "", children, new ExpressionId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java index c9ef08eab5aee..4b5a6dfa53758 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.capabilities.UnresolvedException; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; @@ -19,8 +19,8 @@ public class UnresolvedStar extends UnresolvedNamedExpression { // typically used for nested fields or inner/dotted fields private final UnresolvedAttribute qualifier; - public UnresolvedStar(Location location, UnresolvedAttribute qualifier) { - super(location, emptyList()); + public UnresolvedStar(Source source, UnresolvedAttribute qualifier) { + super(source, emptyList()); this.qualifier = qualifier; } @@ -35,7 +35,7 @@ public Expression replaceChildren(List newChildren) { } @Override - public boolean nullable() { + public Nullability nullable() { throw new UnresolvedException("nullable", this); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Function.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Function.java index 000a9c097c98f..f63145f6a25b5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Function.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Function.java @@ -9,7 +9,8 @@ import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.expression.Nullability; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.List; @@ -23,14 +24,14 @@ public abstract class Function extends NamedExpression { private final String functionName, name; - protected Function(Location location, List children) { - this(location, children, null, false); + protected Function(Source source, List children) { + this(source, children, null, false); } // TODO: Functions supporting distinct should add a dedicated constructor Location, List, boolean - protected Function(Location location, List children, ExpressionId id, boolean synthetic) { + protected Function(Source source, List children, ExpressionId id, boolean synthetic) { // cannot detect name yet so override the name - super(location, null, children, id, synthetic); + super(source, null, children, id, synthetic); functionName = StringUtils.camelCaseToUnderscore(getClass().getSimpleName()); name = functionName() + functionArgs(); } @@ -45,7 +46,7 @@ public String name() { } @Override - public boolean nullable() { + public Nullability nullable() { return Expressions.nullable(children()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionAttribute.java index e61549c543c28..36ff097bdae8f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionAttribute.java @@ -6,8 +6,9 @@ package org.elasticsearch.xpack.sql.expression.function; import org.elasticsearch.xpack.sql.expression.ExpressionId; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.TypedAttribute; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Objects; @@ -16,9 +17,9 @@ public abstract class FunctionAttribute extends TypedAttribute { private final String functionId; - protected FunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, - boolean synthetic, String functionId) { - super(location, name, dataType, qualifier, nullable, id, synthetic); + protected FunctionAttribute(Source source, String name, DataType dataType, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic, String functionId) { + super(source, name, dataType, qualifier, nullability, id, synthetic); this.functionId = functionId; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index d6faf167322ad..060808e4cccd0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -96,7 +96,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mod; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.session.Configuration; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; @@ -305,15 +305,15 @@ public Collection listFunctions(String pattern) { * is not aware of time zone and does not support {@code DISTINCT}. */ static FunctionDefinition def(Class function, - java.util.function.Function ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + java.util.function.Function ctorRef, String... names) { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (false == children.isEmpty()) { throw new IllegalArgumentException("expects no arguments"); } if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.apply(location); + return ctorRef.apply(source); }; return def(function, builder, false, names); } @@ -326,20 +326,20 @@ static FunctionDefinition def(Class function, @SuppressWarnings("overloads") static FunctionDefinition def(Class function, ConfigurationAwareFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (false == children.isEmpty()) { throw new IllegalArgumentException("expects no arguments"); } if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, cfg); + return ctorRef.build(source, cfg); }; return def(function, builder, false, names); } interface ConfigurationAwareFunctionBuilder { - T build(Location location, Configuration configuration); + T build(Source source, Configuration configuration); } /** @@ -350,7 +350,7 @@ interface ConfigurationAwareFunctionBuilder { @SuppressWarnings("overloads") static FunctionDefinition def(Class function, UnaryConfigurationAwareFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (children.size() > 1) { throw new IllegalArgumentException("expects exactly one argument"); } @@ -358,13 +358,13 @@ static FunctionDefinition def(Class function, throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } Expression ex = children.size() == 1 ? children.get(0) : null; - return ctorRef.build(location, ex, cfg); + return ctorRef.build(source, ex, cfg); }; return def(function, builder, false, names); } interface UnaryConfigurationAwareFunctionBuilder { - T build(Location location, Expression exp, Configuration configuration); + T build(Source source, Expression exp, Configuration configuration); } @@ -374,15 +374,15 @@ interface UnaryConfigurationAwareFunctionBuilder { */ @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do static FunctionDefinition def(Class function, - BiFunction ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + BiFunction ctorRef, String... names) { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (children.size() != 1) { throw new IllegalArgumentException("expects exactly one argument"); } if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.apply(location, children.get(0)); + return ctorRef.apply(source, children.get(0)); }; return def(function, builder, false, names); } @@ -394,17 +394,17 @@ static FunctionDefinition def(Class function, @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do static FunctionDefinition def(Class function, MultiFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children); + return ctorRef.build(source, children); }; return def(function, builder, false, names); } interface MultiFunctionBuilder { - T build(Location location, List children); + T build(Source source, List children); } /** @@ -414,17 +414,17 @@ interface MultiFunctionBuilder { @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do static FunctionDefinition def(Class function, DistinctAwareUnaryFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (children.size() != 1) { throw new IllegalArgumentException("expects exactly one argument"); } - return ctorRef.build(location, children.get(0), distinct); + return ctorRef.build(source, children.get(0), distinct); }; return def(function, builder, false, names); } interface DistinctAwareUnaryFunctionBuilder { - T build(Location location, Expression target, boolean distinct); + T build(Source source, Expression target, boolean distinct); } /** @@ -434,20 +434,20 @@ interface DistinctAwareUnaryFunctionBuilder { @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do static FunctionDefinition def(Class function, DatetimeUnaryFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (children.size() != 1) { throw new IllegalArgumentException("expects exactly one argument"); } if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), cfg.zoneId()); + return ctorRef.build(source, children.get(0), cfg.zoneId()); }; return def(function, builder, true, names); } interface DatetimeUnaryFunctionBuilder { - T build(Location location, Expression target, ZoneId zi); + T build(Source source, Expression target, ZoneId zi); } /** @@ -456,20 +456,20 @@ interface DatetimeUnaryFunctionBuilder { */ @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do static FunctionDefinition def(Class function, DatetimeBinaryFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (children.size() != 2) { throw new IllegalArgumentException("expects exactly two arguments"); } if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), children.get(1), cfg.zoneId()); + return ctorRef.build(source, children.get(0), children.get(1), cfg.zoneId()); }; return def(function, builder, false, names); } interface DatetimeBinaryFunctionBuilder { - T build(Location location, Expression lhs, Expression rhs, ZoneId zi); + T build(Source source, Expression lhs, Expression rhs, ZoneId zi); } /** @@ -479,7 +479,7 @@ interface DatetimeBinaryFunctionBuilder { @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do static FunctionDefinition def(Class function, BinaryFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { boolean isBinaryOptionalParamFunction = function.isAssignableFrom(Round.class) || function.isAssignableFrom(Truncate.class); if (isBinaryOptionalParamFunction && (children.size() > 2 || children.size() < 1)) { throw new IllegalArgumentException("expects one or two arguments"); @@ -490,13 +490,13 @@ static FunctionDefinition def(Class function, if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), children.size() == 2 ? children.get(1) : null); + return ctorRef.build(source, children.get(0), children.size() == 2 ? children.get(1) : null); }; return def(function, builder, false, names); } interface BinaryFunctionBuilder { - T build(Location location, Expression lhs, Expression rhs); + T build(Source source, Expression lhs, Expression rhs); } /** @@ -512,23 +512,22 @@ private static FunctionDefinition def(Class function, Functi List aliases = Arrays.asList(names).subList(1, names.length); FunctionDefinition.Builder realBuilder = (uf, distinct, cfg) -> { try { - return builder.build(uf.location(), uf.children(), distinct, cfg); + return builder.build(uf.source(), uf.children(), distinct, cfg); } catch (IllegalArgumentException e) { - throw new ParsingException("error building [" + primaryName + "]: " + e.getMessage(), e, - uf.location().getLineNumber(), uf.location().getColumnNumber()); + throw new ParsingException(uf.source(), "error building [" + primaryName + "]: " + e.getMessage(), e); } }; return new FunctionDefinition(primaryName, unmodifiableList(aliases), function, datetime, realBuilder); } private interface FunctionBuilder { - Function build(Location location, List children, boolean distinct, Configuration cfg); + Function build(Source source, List children, boolean distinct, Configuration cfg); } @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do static FunctionDefinition def(Class function, ThreeParametersFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { boolean isLocateFunction = function.isAssignableFrom(Locate.class); if (isLocateFunction && (children.size() > 3 || children.size() < 2)) { throw new IllegalArgumentException("expects two or three arguments"); @@ -538,32 +537,32 @@ static FunctionDefinition def(Class function, if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), children.get(1), children.size() == 3 ? children.get(2) : null); + return ctorRef.build(source, children.get(0), children.get(1), children.size() == 3 ? children.get(2) : null); }; return def(function, builder, false, names); } interface ThreeParametersFunctionBuilder { - T build(Location location, Expression source, Expression exp1, Expression exp2); + T build(Source source, Expression src, Expression exp1, Expression exp2); } @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do static FunctionDefinition def(Class function, FourParametersFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> { + FunctionBuilder builder = (source, children, distinct, cfg) -> { if (children.size() != 4) { throw new IllegalArgumentException("expects exactly four arguments"); } if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), children.get(1), children.get(2), children.get(3)); + return ctorRef.build(source, children.get(0), children.get(1), children.get(2), children.get(3)); }; return def(function, builder, false, names); } interface FourParametersFunctionBuilder { - T build(Location location, Expression source, Expression exp1, Expression exp2, Expression exp3); + T build(Source source, Expression src, Expression exp1, Expression exp2, Expression exp3); } /** @@ -576,12 +575,12 @@ interface FourParametersFunctionBuilder { private static FunctionDefinition def(Class function, CastFunctionBuilder ctorRef, String... names) { - FunctionBuilder builder = (location, children, distinct, cfg) -> - ctorRef.build(location, children.get(0), children.get(0).dataType()); + FunctionBuilder builder = (source, children, distinct, cfg) -> + ctorRef.build(source, children.get(0), children.get(0).dataType()); return def(function, builder, false, names); } private interface CastFunctionBuilder { - T build(Location location, Expression expression, DataType dataType); + T build(Source source, Expression expression, DataType dataType); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Score.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Score.java index e165d4388a06a..23363ff6cbddd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Score.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/Score.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -24,8 +24,8 @@ * with other function. */ public class Score extends Function { - public Score(Location location) { - super(location, emptyList()); + public Score(Source source) { + super(source, emptyList()); } @Override @@ -45,7 +45,7 @@ public DataType dataType() { @Override public Attribute toAttribute() { - return new ScoreAttribute(location()); + return new ScoreAttribute(source()); } @Override @@ -54,12 +54,12 @@ public boolean equals(Object obj) { return false; } Score other = (Score) obj; - return location().equals(other.location()); + return source().equals(other.source()); } @Override public int hashCode() { - return location().hashCode(); + return source().hashCode(); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java index 622f0755ef54b..bcd0aab16c637 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java @@ -7,12 +7,15 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.ExpressionId; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.ScorePipe; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; +import static org.elasticsearch.xpack.sql.expression.Nullability.FALSE; + /** * {@link Attribute} that represents Elasticsearch's {@code _score}. */ @@ -20,16 +23,16 @@ public class ScoreAttribute extends FunctionAttribute { /** * Constructor for normal use. */ - public ScoreAttribute(Location location) { - this(location, "SCORE()", DataType.FLOAT, null, false, null, false); + public ScoreAttribute(Source source) { + this(source, "SCORE()", DataType.FLOAT, null, FALSE, null, false); } /** * Constructor for {@link #clone()} */ - private ScoreAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, + private ScoreAttribute(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - super(location, name, dataType, qualifier, nullable, id, synthetic, "SCORE"); + super(source, name, dataType, qualifier, nullability, id, synthetic, "SCORE"); } @Override @@ -38,13 +41,14 @@ protected NodeInfo info() { } @Override - protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { - return new ScoreAttribute(location, name, dataType(), qualifier, nullable, id, synthetic); + protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic) { + return new ScoreAttribute(source, name, dataType(), qualifier, nullability, id, synthetic); } @Override protected Pipe makePipe() { - return new ScorePipe(location(), this); + return new ScorePipe(source(), this); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java index fa1be78a594fb..82b4d95a9b47b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunction.java @@ -10,10 +10,12 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.Nullability; +import org.elasticsearch.xpack.sql.expression.function.aggregate.Count; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.session.Configuration; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -40,8 +42,8 @@ public class UnresolvedFunction extends Function implements Unresolvable { */ private final boolean analyzed; - public UnresolvedFunction(Location location, String name, ResolutionType resolutionType, List children) { - this(location, name, resolutionType, children, false, null); + public UnresolvedFunction(Source source, String name, ResolutionType resolutionType, List children) { + this(source, name, resolutionType, children, false, null); } /** @@ -49,9 +51,9 @@ public UnresolvedFunction(Location location, String name, ResolutionType resolut * 'did you mean') instead of the default one. * @see #withMessage(String) */ - UnresolvedFunction(Location location, String name, ResolutionType resolutionType, List children, + UnresolvedFunction(Source source, String name, ResolutionType resolutionType, List children, boolean analyzed, String unresolvedMessage) { - super(location, children); + super(source, children); this.name = name; this.resolutionType = resolutionType; this.analyzed = analyzed; @@ -66,11 +68,11 @@ protected NodeInfo info() { @Override public Expression replaceChildren(List newChildren) { - return new UnresolvedFunction(location(), name, resolutionType, newChildren, analyzed, unresolvedMsg); + return new UnresolvedFunction(source(), name, resolutionType, newChildren, analyzed, unresolvedMsg); } public UnresolvedFunction withMessage(String message) { - return new UnresolvedFunction(location(), name(), resolutionType, children(), true, message); + return new UnresolvedFunction(source(), name(), resolutionType, children(), true, message); } public UnresolvedFunction preprocessStar() { @@ -128,6 +130,14 @@ ResolutionType resolutionType() { public boolean analyzed() { return analyzed; } + + public boolean sameAs(Count count) { + if (this.resolutionType == ResolutionType.DISTINCT && count.distinct() + || this.resolutionType == ResolutionType.STANDARD && count.distinct() == false) { + return true; + } + return false; + } @Override public DataType dataType() { @@ -135,7 +145,7 @@ public DataType dataType() { } @Override - public boolean nullable() { + public Nullability nullable() { throw new UnresolvedException("nullable", this); } @@ -191,8 +201,8 @@ public UnresolvedFunction preprocessStar(UnresolvedFunction uf) { // TODO: might be removed // dedicated count optimization if (uf.name.toUpperCase(Locale.ROOT).equals("COUNT")) { - return new UnresolvedFunction(uf.location(), uf.name(), uf.resolutionType, - singletonList(Literal.of(uf.arguments().get(0).location(), Integer.valueOf(1)))); + return new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionType, + singletonList(Literal.of(uf.arguments().get(0).source(), Integer.valueOf(1)))); } return uf; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java index e85824beaff6f..3b646a68d21c7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggNameInput; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.util.CollectionUtils; import java.util.List; @@ -30,12 +30,12 @@ public abstract class AggregateFunction extends Function { private AggregateFunctionAttribute lazyAttribute; - protected AggregateFunction(Location location, Expression field) { - this(location, field, emptyList()); + protected AggregateFunction(Source source, Expression field) { + this(source, field, emptyList()); } - protected AggregateFunction(Location location, Expression field, List parameters) { - super(location, CollectionUtils.combine(singletonList(field), parameters)); + protected AggregateFunction(Source source, Expression field, List parameters) { + super(source, CollectionUtils.combine(singletonList(field), parameters)); this.field = field; this.parameters = parameters; } @@ -52,7 +52,7 @@ public List parameters() { public AggregateFunctionAttribute toAttribute() { if (lazyAttribute == null) { // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) - lazyAttribute = new AggregateFunctionAttribute(location(), name(), dataType(), id(), functionId(), null); + lazyAttribute = new AggregateFunctionAttribute(source(), name(), dataType(), id(), functionId(), null); } return lazyAttribute; } @@ -60,7 +60,7 @@ public AggregateFunctionAttribute toAttribute() { @Override protected Pipe makePipe() { // unresolved AggNameInput (should always get replaced by the folder) - return new AggNameInput(location(), this, name()); + return new AggNameInput(source(), this, name()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java index ab0ae9bfe8b1e..b50b268844c7b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java @@ -8,9 +8,10 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.ExpressionId; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Objects; @@ -19,14 +20,14 @@ public class AggregateFunctionAttribute extends FunctionAttribute { private final String propertyPath; - AggregateFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, + AggregateFunctionAttribute(Source source, String name, DataType dataType, ExpressionId id, String functionId, String propertyPath) { - this(location, name, dataType, null, false, id, false, functionId, propertyPath); + this(source, name, dataType, null, Nullability.FALSE, id, false, functionId, propertyPath); } - public AggregateFunctionAttribute(Location location, String name, DataType dataType, String qualifier, - boolean nullable, ExpressionId id, boolean synthetic, String functionId, String propertyPath) { - super(location, name, dataType, qualifier, nullable, id, synthetic, functionId); + public AggregateFunctionAttribute(Source source, String name, DataType dataType, String qualifier, + Nullability nullability, ExpressionId id, boolean synthetic, String functionId, String propertyPath) { + super(source, name, dataType, qualifier, nullability, id, synthetic, functionId); this.propertyPath = propertyPath; } @@ -42,18 +43,18 @@ public String propertyPath() { @Override protected Expression canonicalize() { - return new AggregateFunctionAttribute(location(), "", dataType(), null, true, id(), false, "", null); + return new AggregateFunctionAttribute(source(), "", dataType(), null, Nullability.TRUE, id(), false, "", null); } @Override - protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { + protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) // that is the functionId is actually derived from the expression id to easily track it across contexts - return new AggregateFunctionAttribute(location, name, dataType(), qualifier, nullable, id, synthetic, functionId(), propertyPath); + return new AggregateFunctionAttribute(source, name, dataType(), qualifier, nullability, id, synthetic, functionId(), propertyPath); } public AggregateFunctionAttribute withFunctionId(String functionId, String propertyPath) { - return new AggregateFunctionAttribute(location(), name(), dataType(), qualifier(), nullable(), + return new AggregateFunctionAttribute(source(), name(), dataType(), qualifier(), nullable(), id(), synthetic(), functionId, propertyPath); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Avg.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Avg.java index 0dfed0d5c0057..b74be4810ed60 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Avg.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function.aggregate; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -17,8 +17,8 @@ */ public class Avg extends NumericAggregate implements EnclosedAgg { - public Avg(Location location, Expression field) { - super(location, field); + public Avg(Source source, Expression field) { + super(source, field); } @Override @@ -31,7 +31,7 @@ public Avg replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Avg(location(), newChildren.get(0)); + return new Avg(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/CompoundNumericAggregate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/CompoundNumericAggregate.java index 3ff2ae0c44e09..9585b55ce3b8d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/CompoundNumericAggregate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/CompoundNumericAggregate.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function.aggregate; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; @@ -14,11 +14,11 @@ // and thus cannot be used directly in SQL and are mainly for internal use public abstract class CompoundNumericAggregate extends NumericAggregate { - CompoundNumericAggregate(Location location, Expression field, List arguments) { - super(location, field, arguments); + CompoundNumericAggregate(Source source, Expression field, List arguments) { + super(source, field, arguments); } - CompoundNumericAggregate(Location location, Expression field) { - super(location, field); + CompoundNumericAggregate(Source source, Expression field) { + super(source, field); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Count.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Count.java index 3653f84be29b6..429b4e7ba0774 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Count.java @@ -5,14 +5,16 @@ */ package org.elasticsearch.xpack.sql.expression.function.aggregate; -import java.util.List; - import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; +import java.util.List; +import java.util.Objects; + /** * Count the number of documents matched ({@code COUNT}) * OR count the number of distinct values @@ -22,8 +24,8 @@ public class Count extends AggregateFunction { private final boolean distinct; - public Count(Location location, Expression field, boolean distinct) { - super(location, field); + public Count(Source source, Expression field, boolean distinct) { + super(source, field); this.distinct = distinct; } @@ -37,7 +39,7 @@ public Count replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Count(location(), newChildren.get(0), distinct); + return new Count(source(), newChildren.get(0), distinct); } public boolean distinct() { @@ -53,14 +55,48 @@ public DataType dataType() { public String functionId() { String functionId = id().toString(); // if count works against a given expression, use its id (to identify the group) - if (field() instanceof NamedExpression) { + // in case of COUNT DISTINCT don't use the expression id to avoid possible duplicate IDs when COUNT and COUNT DISTINCT is used + // in the same query + if (!distinct() && field() instanceof NamedExpression) { functionId = ((NamedExpression) field()).id().toString(); } return functionId; } + @Override + public String name() { + if (distinct()) { + StringBuilder sb = new StringBuilder(super.name()); + sb.insert(sb.indexOf("(") + 1, "DISTINCT "); + return sb.toString(); + } + return super.name(); + } + @Override public AggregateFunctionAttribute toAttribute() { - return new AggregateFunctionAttribute(location(), name(), dataType(), id(), functionId(), "_count"); + // COUNT(*) gets its value from the parent aggregation on which _count is called + if (field() instanceof Literal) { + return new AggregateFunctionAttribute(source(), name(), dataType(), id(), functionId(), "_count"); + } + // COUNT(column) gets its value from a sibling aggregation (an exists filter agg) by calling its id and then _count on it + if (!distinct()) { + return new AggregateFunctionAttribute(source(), name(), dataType(), id(), functionId(), functionId() + "._count"); + } + return super.toAttribute(); + } + + @Override + public boolean equals(Object obj) { + if (false == super.equals(obj)) { + return false; + } + Count other = (Count) obj; + return Objects.equals(other.distinct(), distinct()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), distinct()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/ExtendedStats.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/ExtendedStats.java index 3f0555cd57751..a1245104858f3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/ExtendedStats.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/ExtendedStats.java @@ -7,13 +7,13 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class ExtendedStats extends CompoundNumericAggregate { - public ExtendedStats(Location location, Expression field) { - super(location, field); + public ExtendedStats(Source source, Expression field) { + super(source, field); } @Override @@ -26,6 +26,6 @@ public ExtendedStats replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new ExtendedStats(location(), newChildren.get(0)); + return new ExtendedStats(source(), newChildren.get(0)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/InnerAggregate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/InnerAggregate.java index b7fa7cfe3e5d2..2fdcf6658804c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/InnerAggregate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/InnerAggregate.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.Function; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -22,11 +22,11 @@ public class InnerAggregate extends AggregateFunction { private final Expression innerKey; public InnerAggregate(AggregateFunction inner, CompoundNumericAggregate outer) { - this(inner.location(), inner, outer, null); + this(inner.source(), inner, outer, null); } - public InnerAggregate(Location location, AggregateFunction inner, CompoundNumericAggregate outer, Expression innerKey) { - super(location, outer.field(), outer.arguments()); + public InnerAggregate(Source source, AggregateFunction inner, CompoundNumericAggregate outer, Expression innerKey) { + super(source, outer.field(), outer.arguments()); this.inner = inner; this.outer = outer; this.innerId = ((EnclosedAgg) inner).innerName(); @@ -76,7 +76,7 @@ public String functionId() { @Override public AggregateFunctionAttribute toAttribute() { // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) - return new AggregateFunctionAttribute(location(), name(), dataType(), outer.id(), functionId(), + return new AggregateFunctionAttribute(source(), name(), dataType(), outer.id(), functionId(), aggMetricValue(functionId(), innerId)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Kurtosis.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Kurtosis.java index 5d46fa68f2bed..29b09187b5dea 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Kurtosis.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Kurtosis.java @@ -7,13 +7,13 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Kurtosis extends NumericAggregate implements MatrixStatsEnclosed { - public Kurtosis(Location location, Expression field) { - super(location, field); + public Kurtosis(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ public Kurtosis replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Kurtosis(location(), newChildren.get(0)); + return new Kurtosis(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/MatrixStats.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/MatrixStats.java index fa7697b55c363..fb4d86501dbf1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/MatrixStats.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/MatrixStats.java @@ -7,13 +7,13 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class MatrixStats extends CompoundNumericAggregate { - public MatrixStats(Location location, Expression field) { - super(location, field); + public MatrixStats(Source source, Expression field) { + super(source, field); } @Override @@ -26,6 +26,6 @@ public MatrixStats replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new MatrixStats(location(), newChildren.get(0)); + return new MatrixStats(source(), newChildren.get(0)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java index 9c6b1374f0777..5df73793677ed 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Max.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -19,8 +19,8 @@ */ public class Max extends NumericAggregate implements EnclosedAgg { - public Max(Location location, Expression field) { - super(location, field); + public Max(Source source, Expression field) { + super(source, field); } @Override @@ -30,7 +30,7 @@ protected NodeInfo info() { @Override public Max replaceChildren(List newChildren) { - return new Max(location(), newChildren.get(0)); + return new Max(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java index e0b68999d64a1..09ee16619cd80 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Min.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -19,8 +19,8 @@ */ public class Min extends NumericAggregate implements EnclosedAgg { - public Min(Location location, Expression field) { - super(location, field); + public Min(Source source, Expression field) { + super(source, field); } @Override @@ -33,7 +33,7 @@ public Min replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Min(location(), newChildren.get(0)); + return new Min(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/NumericAggregate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/NumericAggregate.java index f384e157ec4ae..7a6fe313544e7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/NumericAggregate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/NumericAggregate.java @@ -8,19 +8,19 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.List; abstract class NumericAggregate extends AggregateFunction { - NumericAggregate(Location location, Expression field, List parameters) { - super(location, field, parameters); + NumericAggregate(Source source, Expression field, List parameters) { + super(source, field, parameters); } - NumericAggregate(Location location, Expression field) { - super(location, field); + NumericAggregate(Source source, Expression field) { + super(source, field); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Percentile.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Percentile.java index 6e644fb4f751c..ee5304b915c48 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Percentile.java @@ -5,11 +5,12 @@ */ package org.elasticsearch.xpack.sql.expression.function.aggregate; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; import org.elasticsearch.xpack.sql.expression.Foldables; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -21,8 +22,8 @@ public class Percentile extends NumericAggregate implements EnclosedAgg { private final Expression percent; - public Percentile(Location location, Expression field, Expression percent) { - super(location, field, singletonList(percent)); + public Percentile(Source source, Expression field, Expression percent) { + super(source, field, singletonList(percent)); this.percent = percent; } @@ -36,18 +37,22 @@ public Percentile replaceChildren(List newChildren) { if (newChildren.size() != 2) { throw new IllegalArgumentException("expected [2] children but received [" + newChildren.size() + "]"); } - return new Percentile(location(), newChildren.get(0), newChildren.get(1)); + return new Percentile(source(), newChildren.get(0), newChildren.get(1)); } @Override protected TypeResolution resolveType() { - TypeResolution resolution = super.resolveType(); + if (!percent.foldable()) { + throw new SqlIllegalArgumentException("2nd argument of PERCENTILE must be constant, received [{}]", + Expressions.name(percent)); + } - if (TypeResolution.TYPE_RESOLVED.equals(resolution)) { - resolution = Expressions.typeMustBeNumeric(percent(), functionName(), ParamOrdinal.DEFAULT); + TypeResolution resolution = super.resolveType(); + if (resolution.unresolved()) { + return resolution; } - return resolution; + return Expressions.typeMustBeNumeric(percent, functionName(), ParamOrdinal.DEFAULT); } public Expression percent() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileRank.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileRank.java index f01dad8800ccf..c8f51476af3d3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileRank.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileRank.java @@ -5,11 +5,12 @@ */ package org.elasticsearch.xpack.sql.expression.function.aggregate; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; import org.elasticsearch.xpack.sql.expression.Foldables; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -21,8 +22,8 @@ public class PercentileRank extends AggregateFunction implements EnclosedAgg { private final Expression value; - public PercentileRank(Location location, Expression field, Expression value) { - super(location, field, singletonList(value)); + public PercentileRank(Source source, Expression field, Expression value) { + super(source, field, singletonList(value)); this.value = value; } @@ -36,11 +37,16 @@ public Expression replaceChildren(List newChildren) { if (newChildren.size() != 2) { throw new IllegalArgumentException("expected [2] children but received [" + newChildren.size() + "]"); } - return new PercentileRank(location(), newChildren.get(0), newChildren.get(1)); + return new PercentileRank(source(), newChildren.get(0), newChildren.get(1)); } @Override protected TypeResolution resolveType() { + if (!value.foldable()) { + throw new SqlIllegalArgumentException("2nd argument of PERCENTILE_RANK must be constant, received [{}]", + Expressions.name(value)); + } + TypeResolution resolution = super.resolveType(); if (resolution.unresolved()) { return resolution; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileRanks.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileRanks.java index 38c79ebd6210b..5cda86e7f1634 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileRanks.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/PercentileRanks.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function.aggregate; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -14,8 +14,8 @@ public class PercentileRanks extends CompoundNumericAggregate { private final List values; - public PercentileRanks(Location location, Expression field, List values) { - super(location, field, values); + public PercentileRanks(Source source, Expression field, List values) { + super(source, field, values); this.values = values; } @@ -29,7 +29,7 @@ public PercentileRanks replaceChildren(List newChildren) { if (newChildren.size() < 2) { throw new IllegalArgumentException("expected at least [2] children but received [" + newChildren.size() + "]"); } - return new PercentileRanks(location(), newChildren.get(0), newChildren.subList(1, newChildren.size())); + return new PercentileRanks(source(), newChildren.get(0), newChildren.subList(1, newChildren.size())); } public List values() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Percentiles.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Percentiles.java index 932a887806f23..e4238831536ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Percentiles.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Percentiles.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function.aggregate; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -14,8 +14,8 @@ public class Percentiles extends CompoundNumericAggregate { private final List percents; - public Percentiles(Location location, Expression field, List percents) { - super(location, field, percents); + public Percentiles(Source source, Expression field, List percents) { + super(source, field, percents); this.percents = percents; } @@ -29,7 +29,7 @@ public Percentiles replaceChildren(List newChildren) { if (newChildren.size() < 2) { throw new IllegalArgumentException("expected more than one child but received [" + newChildren.size() + "]"); } - return new Percentiles(location(), newChildren.get(0), newChildren.subList(1, newChildren.size())); + return new Percentiles(source(), newChildren.get(0), newChildren.subList(1, newChildren.size())); } public List percents() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Skewness.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Skewness.java index d8514e40ede30..07e857f1eb8f5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Skewness.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Skewness.java @@ -7,13 +7,13 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Skewness extends NumericAggregate implements MatrixStatsEnclosed { - public Skewness(Location location, Expression field) { - super(location, field); + public Skewness(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ public Skewness replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Skewness(location(), newChildren.get(0)); + return new Skewness(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Stats.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Stats.java index 3bfddd9374e0d..4da3fdd2ac573 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Stats.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Stats.java @@ -7,13 +7,13 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Stats extends CompoundNumericAggregate { - public Stats(Location location, Expression field) { - super(location, field); + public Stats(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ public Stats replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Stats(location(), newChildren.get(0)); + return new Stats(source(), newChildren.get(0)); } public static boolean isTypeCompatible(Expression e) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/StddevPop.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/StddevPop.java index acdfaecf55615..a0521b886153c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/StddevPop.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/StddevPop.java @@ -7,13 +7,13 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class StddevPop extends NumericAggregate implements ExtendedStatsEnclosed { - public StddevPop(Location location, Expression field) { - super(location, field); + public StddevPop(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ public StddevPop replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new StddevPop(location(), newChildren.get(0)); + return new StddevPop(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Sum.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Sum.java index 25861542509bf..271cda6630be7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/Sum.java @@ -7,7 +7,7 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class Sum extends NumericAggregate implements EnclosedAgg { - public Sum(Location location, Expression field) { - super(location, field); + public Sum(Source source, Expression field) { + super(source, field); } @Override @@ -30,7 +30,7 @@ public Sum replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new Sum(location(), newChildren.get(0)); + return new Sum(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/SumOfSquares.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/SumOfSquares.java index a52c279e83a5a..c936d3472a7fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/SumOfSquares.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/SumOfSquares.java @@ -7,13 +7,13 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class SumOfSquares extends NumericAggregate implements ExtendedStatsEnclosed { - public SumOfSquares(Location location, Expression field) { - super(location, field); + public SumOfSquares(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ public SumOfSquares replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new SumOfSquares(location(), newChildren.get(0)); + return new SumOfSquares(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/VarPop.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/VarPop.java index 0acfeba279da9..eec44e828db9c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/VarPop.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/VarPop.java @@ -7,13 +7,13 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class VarPop extends NumericAggregate implements ExtendedStatsEnclosed { - public VarPop(Location location, Expression field) { - super(location, field); + public VarPop(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ public VarPop replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new VarPop(location(), newChildren.get(0)); + return new VarPop(source(), newChildren.get(0)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunction.java index dbfef6aeb5dc4..0595e29176a93 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunction.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggNameInput; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.util.CollectionUtils; import java.util.List; @@ -30,12 +30,12 @@ public abstract class GroupingFunction extends Function { private GroupingFunctionAttribute lazyAttribute; - protected GroupingFunction(Location location, Expression field) { - this(location, field, emptyList()); + protected GroupingFunction(Source source, Expression field) { + this(source, field, emptyList()); } - protected GroupingFunction(Location location, Expression field, List parameters) { - super(location, CollectionUtils.combine(singletonList(field), parameters)); + protected GroupingFunction(Source source, Expression field, List parameters) { + super(source, CollectionUtils.combine(singletonList(field), parameters)); this.field = field; this.parameters = parameters; } @@ -51,8 +51,8 @@ public List parameters() { @Override public GroupingFunctionAttribute toAttribute() { if (lazyAttribute == null) { - // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) - lazyAttribute = new GroupingFunctionAttribute(location(), name(), dataType(), id(), functionId()); + // this is highly correlated with QueryFolder$FoldAggregate#addAggFunction (regarding the function name within the querydsl) + lazyAttribute = new GroupingFunctionAttribute(source(), name(), dataType(), id(), functionId()); } return lazyAttribute; } @@ -70,7 +70,7 @@ public final GroupingFunction replaceChildren(List newChildren) { @Override protected Pipe makePipe() { // unresolved AggNameInput (should always get replaced by the folder) - return new AggNameInput(location(), this, name()); + return new AggNameInput(source(), this, name()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java index 4deac8a2f9eee..c33c893141b65 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java @@ -8,20 +8,21 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.ExpressionId; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; public class GroupingFunctionAttribute extends FunctionAttribute { - GroupingFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, String functionId) { - this(location, name, dataType, null, false, id, false, functionId); + GroupingFunctionAttribute(Source source, String name, DataType dataType, ExpressionId id, String functionId) { + this(source, name, dataType, null, Nullability.FALSE, id, false, functionId); } - public GroupingFunctionAttribute(Location location, String name, DataType dataType, String qualifier, - boolean nullable, ExpressionId id, boolean synthetic, String functionId) { - super(location, name, dataType, qualifier, nullable, id, synthetic, functionId); + public GroupingFunctionAttribute(Source source, String name, DataType dataType, String qualifier, + Nullability nullability, ExpressionId id, boolean synthetic, String functionId) { + super(source, name, dataType, qualifier, nullability, id, synthetic, functionId); } @Override @@ -32,18 +33,19 @@ protected NodeInfo info() { @Override protected Expression canonicalize() { - return new GroupingFunctionAttribute(location(), "", dataType(), null, true, id(), false, ""); + return new GroupingFunctionAttribute(source(), "", dataType(), null, Nullability.TRUE, id(), false, ""); } @Override - protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { + protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic) { // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) // that is the functionId is actually derived from the expression id to easily track it across contexts - return new GroupingFunctionAttribute(location, name, dataType(), qualifier, nullable, id, synthetic, functionId()); + return new GroupingFunctionAttribute(source, name, dataType(), qualifier, nullability, id, synthetic, functionId()); } public GroupingFunctionAttribute withFunctionId(String functionId, String propertyPath) { - return new GroupingFunctionAttribute(location(), name(), dataType(), qualifier(), nullable(), + return new GroupingFunctionAttribute(source(), name(), dataType(), qualifier(), nullable(), id(), synthetic(), functionId); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java index 4c1b761b1a00c..1cace59a2cc00 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; import org.elasticsearch.xpack.sql.expression.Literal; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -23,8 +23,8 @@ public class Histogram extends GroupingFunction { private final Literal interval; private final ZoneId zoneId; - public Histogram(Location location, Expression field, Expression interval, ZoneId zoneId) { - super(location, field); + public Histogram(Source source, Expression field, Expression interval, ZoneId zoneId) { + super(source, field); this.interval = (Literal) interval; this.zoneId = zoneId; } @@ -54,7 +54,7 @@ protected TypeResolution resolveType() { @Override protected GroupingFunction replaceChild(Expression newChild) { - return new Histogram(location(), newChild, interval, zoneId); + return new Histogram(source(), newChild, interval, zoneId); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BinaryScalarFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BinaryScalarFunction.java index 706894388458a..0a21fa83606d1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BinaryScalarFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/BinaryScalarFunction.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.gen.script.Scripts; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Arrays; import java.util.List; @@ -18,8 +18,8 @@ public abstract class BinaryScalarFunction extends ScalarFunction { private final Expression left, right; - protected BinaryScalarFunction(Location location, Expression left, Expression right) { - super(location, Arrays.asList(left, right)); + protected BinaryScalarFunction(Source source, Expression left, Expression right) { + super(source, Arrays.asList(left, right)); this.left = left; this.right = right; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java index d4265d123e80b..04f9b13ff3329 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java @@ -6,25 +6,26 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.type.DataTypes; -import java.util.Locale; import java.util.Objects; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public class Cast extends UnaryScalarFunction { private final DataType dataType; - public Cast(Location location, Expression field, DataType dataType) { - super(location, field); + public Cast(Source source, Expression field, DataType dataType) { + super(source, field); this.dataType = dataType; } @@ -35,7 +36,7 @@ protected NodeInfo info() { @Override protected UnaryScalarFunction replaceChild(Expression newChild) { - return new Cast(location(), newChild, dataType); + return new Cast(source(), newChild, dataType); } public DataType from() { @@ -62,8 +63,11 @@ public Object fold() { } @Override - public boolean nullable() { - return field().nullable() || DataTypes.isNull(from()); + public Nullability nullable() { + if (DataTypes.isNull(from())) { + return Nullability.TRUE; + } + return field().nullable(); } @Override @@ -82,7 +86,7 @@ protected Processor makeProcessor() { public ScriptTemplate asScript() { ScriptTemplate fieldAsScript = asScript(field()); return new ScriptTemplate( - formatTemplate(String.format(Locale.ROOT, "{sql}.cast(%s,{})", fieldAsScript.template())), + formatTemplate(format("{sql}.", "cast({},{})", fieldAsScript.template())), paramsBuilder() .script(fieldAsScript.params()) .variable(dataType.name()) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ConfigurationFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ConfigurationFunction.java index 369ae24da6508..39ee00d9b7253 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ConfigurationFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ConfigurationFunction.java @@ -7,9 +7,10 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.session.Configuration; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -21,8 +22,8 @@ public abstract class ConfigurationFunction extends ScalarFunction { private final Configuration configuration; private final DataType dataType; - protected ConfigurationFunction(Location location, Configuration configuration, DataType dataType) { - super(location); + protected ConfigurationFunction(Source source, Configuration configuration, DataType dataType) { + super(source); this.configuration = configuration; this.dataType = dataType; } @@ -42,8 +43,8 @@ public DataType dataType() { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } @Override @@ -73,4 +74,4 @@ public int hashCode() { public boolean equals(Object obj) { return super.equals(obj) && Objects.equals(fold(), ((ConfigurationFunction) obj).fold()); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Database.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Database.java index 3f9d7b30efcf6..4a6836d4856e9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Database.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Database.java @@ -7,14 +7,14 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.session.Configuration; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; public class Database extends ConfigurationFunction { - public Database(Location location, Configuration configuration) { - super(location, configuration, DataType.KEYWORD); + public Database(Source source, Configuration configuration) { + super(source, configuration, DataType.KEYWORD); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java index 0be9ca86c5f76..d836030a3ae40 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptWeaver; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; @@ -24,18 +24,18 @@ public abstract class ScalarFunction extends Function implements ScriptWeaver { private ScalarFunctionAttribute lazyAttribute = null; - protected ScalarFunction(Location location) { - super(location, emptyList()); + protected ScalarFunction(Source source) { + super(source, emptyList()); } - protected ScalarFunction(Location location, List fields) { - super(location, fields); + protected ScalarFunction(Source source, List fields) { + super(source, fields); } @Override public final ScalarFunctionAttribute toAttribute() { if (lazyAttribute == null) { - lazyAttribute = new ScalarFunctionAttribute(location(), name(), dataType(), id(), functionId(), asScript(), orderBy(), + lazyAttribute = new ScalarFunctionAttribute(source(), name(), dataType(), id(), functionId(), asScript(), orderBy(), asPipe()); } return lazyAttribute; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java index f2b0f48a18f06..6a0980c2690d4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java @@ -8,11 +8,12 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.ExpressionId; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Objects; @@ -23,15 +24,15 @@ public class ScalarFunctionAttribute extends FunctionAttribute { private final Expression orderBy; private final Pipe pipe; - ScalarFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, + ScalarFunctionAttribute(Source source, String name, DataType dataType, ExpressionId id, String functionId, ScriptTemplate script, Expression orderBy, Pipe processorDef) { - this(location, name, dataType, null, true, id, false, functionId, script, orderBy, processorDef); + this(source, name, dataType, null, Nullability.TRUE, id, false, functionId, script, orderBy, processorDef); } - public ScalarFunctionAttribute(Location location, String name, DataType dataType, String qualifier, - boolean nullable, ExpressionId id, boolean synthetic, String functionId, ScriptTemplate script, - Expression orderBy, Pipe pipe) { - super(location, name, dataType, qualifier, nullable, id, synthetic, functionId); + public ScalarFunctionAttribute(Source source, String name, DataType dataType, String qualifier, + Nullability nullability, ExpressionId id, boolean synthetic, String functionId, ScriptTemplate script, + Expression orderBy, Pipe pipe) { + super(source, name, dataType, qualifier, nullability, id, synthetic, functionId); this.script = script; this.orderBy = orderBy; @@ -60,14 +61,15 @@ public Pipe asPipe() { @Override protected Expression canonicalize() { - return new ScalarFunctionAttribute(location(), "", dataType(), null, true, id(), false, + return new ScalarFunctionAttribute(source(), "", dataType(), null, Nullability.TRUE, id(), false, functionId(), script, orderBy, pipe); } @Override - protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { - return new ScalarFunctionAttribute(location, name, dataType(), qualifier, nullable, id, synthetic, - functionId(), script, orderBy, pipe); + protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic) { + return new ScalarFunctionAttribute(source, name, dataType(), qualifier, nullability, + id, synthetic, functionId(), script, orderBy, pipe); } @Override @@ -90,4 +92,4 @@ public boolean equals(Object obj) { protected String label() { return "s->" + functionId(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/UnaryScalarFunction.java index 1b639287a1289..9a5f85e943124 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/UnaryScalarFunction.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; @@ -21,13 +21,13 @@ public abstract class UnaryScalarFunction extends ScalarFunction { private final Expression field; - protected UnaryScalarFunction(Location location) { - super(location); + protected UnaryScalarFunction(Source source) { + super(source); this.field = null; } - protected UnaryScalarFunction(Location location, Expression field) { - super(location, singletonList(field)); + protected UnaryScalarFunction(Source source, Expression field) { + super(source, singletonList(field)); this.field = field; } @@ -47,7 +47,7 @@ public Expression field() { @Override public final Pipe makePipe() { - return new UnaryPipe(location(), this, Expressions.pipe(field()), makeProcessor()); + return new UnaryPipe(source(), this, Expressions.pipe(field()), makeProcessor()); } protected abstract Processor makeProcessor(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/User.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/User.java index 8ca883de58ffd..f8b3beb1d8f52 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/User.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/User.java @@ -7,14 +7,14 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.session.Configuration; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; public class User extends ConfigurationFunction { - public User(Location location, Configuration configuration) { - super(location, configuration, DataType.KEYWORD); + public User(Source source, Configuration configuration) { + super(source, configuration, DataType.KEYWORD); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java index 1ac143c2a021d..cf4fab276c12b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.time.ZoneId; @@ -22,8 +22,8 @@ abstract class BaseDateTimeFunction extends UnaryScalarFunction { private final ZoneId zoneId; private final String name; - BaseDateTimeFunction(Location location, Expression field, ZoneId zoneId) { - super(location, field); + BaseDateTimeFunction(Source source, Expression field, ZoneId zoneId) { + super(source, field); this.zoneId = zoneId; StringBuilder sb = new StringBuilder(super.name()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java index 3eecf174e0a7f..50a7f8868141a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.ConfigurationFunction; import org.elasticsearch.xpack.sql.session.Configuration; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,8 +20,8 @@ public class CurrentDateTime extends ConfigurationFunction { private final Expression precision; private final ZonedDateTime dateTime; - public CurrentDateTime(Location location, Expression precision, Configuration configuration) { - super(location, configuration, DataType.DATE); + public CurrentDateTime(Source source, Expression precision, Configuration configuration) { + super(source, configuration, DataType.DATE); this.precision = precision; int p = precision != null ? ((Number) precision.fold()).intValue() : 0; this.dateTime = nanoPrecision(configuration().now(), p); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java index 1ad00c8785f4a..9a55548c921bb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.time.ZoneId; @@ -23,8 +23,8 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { private final DateTimeExtractor extractor; - DateTimeFunction(Location location, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { - super(location, field, zoneId); + DateTimeFunction(Source source, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { + super(source, field, zoneId); this.extractor = extractor; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java index 0a59c4d52eaf5..d5e867e0b715a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.time.ZoneId; @@ -17,8 +17,8 @@ */ public abstract class DateTimeHistogramFunction extends DateTimeFunction { - DateTimeHistogramFunction(Location location, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { - super(location, field, zoneId, extractor); + DateTimeHistogramFunction(Source source, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { + super(source, field, zoneId, extractor); } /** diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java index b5144020e632b..28972eb241844 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -17,8 +17,8 @@ */ public class DayName extends NamedDateTimeFunction { - public DayName(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, NameExtractor.DAY_NAME); + public DayName(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, NameExtractor.DAY_NAME); } @Override @@ -28,6 +28,6 @@ protected NodeCtor2 ctorForInfo() { @Override protected DayName replaceChild(Expression newChild) { - return new DayName(location(), newChild, zoneId()); + return new DayName(source(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java index 837779888f2df..a228f4fe79bde 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -16,8 +16,8 @@ * Extract the day of the month from a datetime. */ public class DayOfMonth extends DateTimeFunction { - public DayOfMonth(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.DAY_OF_MONTH); + public DayOfMonth(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.DAY_OF_MONTH); } @Override @@ -27,7 +27,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected DayOfMonth replaceChild(Expression newChild) { - return new DayOfMonth(location(), newChild, zoneId()); + return new DayOfMonth(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java index 5bc54654bdf16..c8c0a1d568d2b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -17,8 +17,8 @@ */ public class DayOfWeek extends NonIsoDateTimeFunction { - public DayOfWeek(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, NonIsoDateTimeExtractor.DAY_OF_WEEK); + public DayOfWeek(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, NonIsoDateTimeExtractor.DAY_OF_WEEK); } @Override @@ -28,6 +28,6 @@ protected NodeCtor2 ctorForInfo() { @Override protected DayOfWeek replaceChild(Expression newChild) { - return new DayOfWeek(location(), newChild, zoneId()); + return new DayOfWeek(source(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java index 9cacb78b34211..6a54b53802a61 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -17,8 +17,8 @@ * Extract the day of the year from a datetime. */ public class DayOfYear extends DateTimeFunction { - public DayOfYear(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.DAY_OF_YEAR); + public DayOfYear(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.DAY_OF_YEAR); } @Override @@ -28,7 +28,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected UnaryScalarFunction replaceChild(Expression newChild) { - return new DayOfYear(location(), newChild, zoneId()); + return new DayOfYear(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java index 490ec721042cd..32de1179965f6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -16,8 +16,8 @@ * Extract the hour of the day from a datetime. */ public class HourOfDay extends DateTimeFunction { - public HourOfDay(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.HOUR_OF_DAY); + public HourOfDay(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.HOUR_OF_DAY); } @Override @@ -27,7 +27,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected HourOfDay replaceChild(Expression newChild) { - return new HourOfDay(location(), newChild, zoneId()); + return new HourOfDay(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java index ff02f6490d0a7..8bc5903834a0e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -16,8 +16,8 @@ * Extract the day of the week (following the ISO standard) from a datetime. 1 is Monday, 2 is Tuesday, etc. */ public class IsoDayOfWeek extends DateTimeFunction { - public IsoDayOfWeek(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.ISO_DAY_OF_WEEK); + public IsoDayOfWeek(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.ISO_DAY_OF_WEEK); } @Override @@ -27,7 +27,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected IsoDayOfWeek replaceChild(Expression newChild) { - return new IsoDayOfWeek(location(), newChild, zoneId()); + return new IsoDayOfWeek(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java index f50deec9fe05a..04a45b1732693 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -16,8 +16,8 @@ * Extract the week of the year from a datetime following the ISO standard. */ public class IsoWeekOfYear extends DateTimeFunction { - public IsoWeekOfYear(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.ISO_WEEK_OF_YEAR); + public IsoWeekOfYear(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.ISO_WEEK_OF_YEAR); } @Override @@ -27,7 +27,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected IsoWeekOfYear replaceChild(Expression newChild) { - return new IsoWeekOfYear(location(), newChild, zoneId()); + return new IsoWeekOfYear(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java index e16e0caa83632..1ef450b3f650d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -17,8 +17,8 @@ */ public class MinuteOfDay extends DateTimeFunction { - public MinuteOfDay(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.MINUTE_OF_DAY); + public MinuteOfDay(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.MINUTE_OF_DAY); } @Override @@ -28,7 +28,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected MinuteOfDay replaceChild(Expression newChild) { - return new MinuteOfDay(location(), newChild, zoneId()); + return new MinuteOfDay(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java index 0a49bb042f97b..9c4cf4884b3f6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -16,8 +16,8 @@ * Exract the minute of the hour from a datetime. */ public class MinuteOfHour extends DateTimeFunction { - public MinuteOfHour(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.MINUTE_OF_HOUR); + public MinuteOfHour(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.MINUTE_OF_HOUR); } @Override @@ -27,7 +27,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected MinuteOfHour replaceChild(Expression newChild) { - return new MinuteOfHour(location(), newChild, zoneId()); + return new MinuteOfHour(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java index 570a4a2ea2d9c..17fbe374aacc3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -17,8 +17,8 @@ */ public class MonthName extends NamedDateTimeFunction { - public MonthName(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, NameExtractor.MONTH_NAME); + public MonthName(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, NameExtractor.MONTH_NAME); } @Override @@ -28,6 +28,6 @@ protected NodeCtor2 ctorForInfo() { @Override protected MonthName replaceChild(Expression newChild) { - return new MonthName(location(), newChild, zoneId()); + return new MonthName(source(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java index 88c025a723122..8265dedb7ba2e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -16,8 +16,8 @@ * Extract the month of the year from a datetime. */ public class MonthOfYear extends DateTimeFunction { - public MonthOfYear(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.MONTH_OF_YEAR); + public MonthOfYear(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.MONTH_OF_YEAR); } @Override @@ -27,7 +27,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected MonthOfYear replaceChild(Expression newChild) { - return new MonthOfYear(location(), newChild, zoneId()); + return new MonthOfYear(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java index d42c18ce88c13..b5d7305d2bbd2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -28,8 +28,8 @@ abstract class NamedDateTimeFunction extends BaseDateTimeFunction { private final NameExtractor nameExtractor; - NamedDateTimeFunction(Location location, Expression field, ZoneId zoneId, NameExtractor nameExtractor) { - super(location, field, zoneId); + NamedDateTimeFunction(Source source, Expression field, ZoneId zoneId, NameExtractor nameExtractor) { + super(source, field, zoneId); this.nameExtractor = nameExtractor; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java index 82af7380d5383..4d5fb4ad91efd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -28,8 +28,8 @@ abstract class NonIsoDateTimeFunction extends BaseDateTimeFunction { private final NonIsoDateTimeExtractor extractor; - NonIsoDateTimeFunction(Location location, Expression field, ZoneId zoneId, NonIsoDateTimeExtractor extractor) { - super(location, field, zoneId); + NonIsoDateTimeFunction(Source source, Expression field, ZoneId zoneId, NonIsoDateTimeExtractor extractor) { + super(source, field, zoneId); this.extractor = extractor; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java index 63455c76ba0ea..4837b7c4a8603 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import org.elasticsearch.xpack.sql.type.DataType; @@ -22,8 +22,8 @@ public class Quarter extends BaseDateTimeFunction { - public Quarter(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId); + public Quarter(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId); } @Override @@ -48,7 +48,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected Quarter replaceChild(Expression newChild) { - return new Quarter(location(), newChild, zoneId()); + return new Quarter(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java index c06d48ba287a3..4b7c354f412d9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -16,8 +16,8 @@ * Extract the second of the minute from a datetime. */ public class SecondOfMinute extends DateTimeFunction { - public SecondOfMinute(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.SECOND_OF_MINUTE); + public SecondOfMinute(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.SECOND_OF_MINUTE); } @Override @@ -27,7 +27,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected SecondOfMinute replaceChild(Expression newChild) { - return new SecondOfMinute(location(), newChild, zoneId()); + return new SecondOfMinute(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java index a3d8a128fbc62..566d3c4443c5e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -17,8 +17,8 @@ */ public class WeekOfYear extends NonIsoDateTimeFunction { - public WeekOfYear(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, NonIsoDateTimeExtractor.WEEK_OF_YEAR); + public WeekOfYear(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, NonIsoDateTimeExtractor.WEEK_OF_YEAR); } @Override @@ -28,6 +28,6 @@ protected NodeCtor2 ctorForInfo() { @Override protected WeekOfYear replaceChild(Expression newChild) { - return new WeekOfYear(location(), newChild, zoneId()); + return new WeekOfYear(source(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java index 0f78cf4d78a1a..12355c0baa073 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import java.time.ZoneId; @@ -20,8 +20,8 @@ public class Year extends DateTimeHistogramFunction { private static long YEAR_IN_MILLIS = TimeUnit.DAYS.toMillis(1) * 365L; - public Year(Location location, Expression field, ZoneId zoneId) { - super(location, field, zoneId, DateTimeExtractor.YEAR); + public Year(Source source, Expression field, ZoneId zoneId) { + super(source, field, zoneId, DateTimeExtractor.YEAR); } @Override @@ -31,7 +31,7 @@ protected NodeCtor2 ctorForInfo() { @Override protected Year replaceChild(Expression newChild) { - return new Year(location(), newChild, zoneId()); + return new Year(source(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ACos.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ACos.java index d4e01329cd33e..692535019e229 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ACos.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ACos.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; @@ -16,8 +16,8 @@ * function. */ public class ACos extends MathFunction { - public ACos(Location location, Expression field) { - super(location, field); + public ACos(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected ACos replaceChild(Expression newChild) { - return new ACos(location(), newChild); + return new ACos(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ASin.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ASin.java index 26362af968fac..b4698bad48cc7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ASin.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ASin.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class ASin extends MathFunction { - public ASin(Location location, Expression field) { - super(location, field); + public ASin(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected ASin replaceChild(Expression newChild) { - return new ASin(location(), newChild); + return new ASin(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan.java index ae64ab2e36456..06f5f140ac27b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class ATan extends MathFunction { - public ATan(Location location, Expression field) { - super(location, field); + public ATan(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected ATan replaceChild(Expression newChild) { - return new ATan(location(), newChild); + return new ATan(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java index 9ade51084061f..9b15b2a67b470 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -16,8 +16,8 @@ */ public class ATan2 extends BinaryNumericFunction { - public ATan2(Location location, Expression left, Expression right) { - super(location, left, right, BinaryMathOperation.ATAN2); + public ATan2(Source source, Expression left, Expression right) { + super(source, left, right, BinaryMathOperation.ATAN2); } @Override @@ -27,6 +27,6 @@ protected NodeInfo info() { @Override protected ATan2 replaceChildren(Expression newLeft, Expression newRight) { - return new ATan2(location(), newLeft, newRight); + return new ATan2(source(), newLeft, newRight); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Abs.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Abs.java index def66bfe4a887..d0c889db66f4c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Abs.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ * function. */ public class Abs extends MathFunction { - public Abs(Location location, Expression field) { - super(location, field); + public Abs(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected Abs replaceChild(Expression newChild) { - return new Abs(location(), newChild); + return new Abs(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathPipe.java index 199b333c566f5..5e266639d0d6b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathPipe.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; @@ -21,8 +21,8 @@ public class BinaryMathPipe extends BinaryPipe { private final BinaryMathOperation operation; - public BinaryMathPipe(Location location, Expression expression, Pipe left, Pipe right, BinaryMathOperation operation) { - super(location, expression, left, right); + public BinaryMathPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryMathOperation operation) { + super(source, expression, left, right); this.operation = operation; } @@ -37,7 +37,7 @@ public BinaryMathOperation operation() { @Override protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryMathPipe(location(), expression(), left, right, operation); + return new BinaryMathPipe(source(), expression(), left, right, operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericFunction.java index 6b067a9a8755e..5f04edec65051 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryNumericFunction.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Objects; @@ -20,8 +20,8 @@ public abstract class BinaryNumericFunction extends BinaryScalarFunction { private final BinaryMathOperation operation; - BinaryNumericFunction(Location location, Expression left, Expression right, BinaryMathOperation operation) { - super(location, left, right); + BinaryNumericFunction(Source source, Expression left, Expression right, BinaryMathOperation operation) { + super(source, left, right); this.operation = operation; } @@ -51,7 +51,7 @@ public Object fold() { @Override protected Pipe makePipe() { - return new BinaryMathPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right()), operation); + return new BinaryMathPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cbrt.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cbrt.java index 323e343d97cdc..d6cc7e85dcedc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cbrt.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cbrt.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Cbrt extends MathFunction { - public Cbrt(Location location, Expression field) { - super(location, field); + public Cbrt(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Cbrt replaceChild(Expression newChild) { - return new Cbrt(location(), newChild); + return new Cbrt(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Ceil.java index 0203a1374662d..556f53918d892 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Ceil.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; @@ -17,8 +17,8 @@ * function. */ public class Ceil extends MathFunction { - public Ceil(Location location, Expression field) { - super(location, field); + public Ceil(Source source, Expression field) { + super(source, field); } @Override @@ -28,7 +28,7 @@ protected NodeInfo info() { @Override protected Ceil replaceChild(Expression newChild) { - return new Ceil(location(), newChild); + return new Ceil(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cos.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cos.java index 5458caf552108..48cb486121d44 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cos.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Cos extends MathFunction { - public Cos(Location location, Expression field) { - super(location, field); + public Cos(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Cos replaceChild(Expression newChild) { - return new Cos(location(), newChild); + return new Cos(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cosh.java index 77df82212185e..ce5bd2bcc45ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cosh.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Cosh extends MathFunction { - public Cosh(Location location, Expression field) { - super(location, field); + public Cosh(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Cosh replaceChild(Expression newChild) { - return new Cosh(location(), newChild); + return new Cosh(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cot.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cot.java index 692b3e7cbd7f0..060702bd62d6a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cot.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Cot.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Cot extends MathFunction { - public Cot(Location location, Expression field) { - super(location, field); + public Cot(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Cot replaceChild(Expression newChild) { - return new Cot(location(), newChild); + return new Cot(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Degrees.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Degrees.java index c7a2df1aae158..f4b9ec6080e09 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Degrees.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Degrees.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * to
degrees. */ public class Degrees extends MathFunction { - public Degrees(Location location, Expression field) { - super(location, field); + public Degrees(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Degrees replaceChild(Expression newChild) { - return new Degrees(location(), newChild); + return new Degrees(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/E.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/E.java index e9d5d4a37e89b..843ecccb265af 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/E.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/E.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; import org.elasticsearch.xpack.sql.expression.gen.script.Params; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -20,8 +20,8 @@ public class E extends MathFunction { private static final ScriptTemplate TEMPLATE = new ScriptTemplate("Math.E", Params.EMPTY, DataType.DOUBLE); - public E(Location location) { - super(location, new Literal(location, "E", Math.E, DataType.DOUBLE)); + public E(Source source) { + super(source, new Literal(source, "E", Math.E, DataType.DOUBLE)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Exp.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Exp.java index b8ec51ca27d2e..8eab843d17028 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Exp.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Exp.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Exp extends MathFunction { - public Exp(Location location, Expression field) { - super(location, field); + public Exp(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Exp replaceChild(Expression newChild) { - return new Exp(location(), newChild); + return new Exp(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Expm1.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Expm1.java index 3a844f8c39b2b..6f362fe63b864 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Expm1.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Expm1.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Expm1 extends MathFunction { - public Expm1(Location location, Expression field) { - super(location, field); + public Expm1(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Expm1 replaceChild(Expression newChild) { - return new Expm1(location(), newChild); + return new Expm1(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Floor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Floor.java index 7548b03f78412..03d6606b0e9fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Floor.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; @@ -17,8 +17,8 @@ * function. */ public class Floor extends MathFunction { - public Floor(Location location, Expression field) { - super(location, field); + public Floor(Source source, Expression field) { + super(source, field); } @Override @@ -28,7 +28,7 @@ protected NodeInfo info() { @Override protected Floor replaceChild(Expression newChild) { - return new Floor(location(), newChild); + return new Floor(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Log.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Log.java index 0202f61a7abb3..7c0f3e46bedb1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Log.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Log.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Log extends MathFunction { - public Log(Location location, Expression field) { - super(location, field); + public Log(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Log replaceChild(Expression newChild) { - return new Log(location(), newChild); + return new Log(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Log10.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Log10.java index 0005488c619c5..e4583d1eb8ecf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Log10.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * base 10 function. */ public class Log10 extends MathFunction { - public Log10(Location location, Expression field) { - super(location, field); + public Log10(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Log10 replaceChild(Expression newChild) { - return new Log10(location(), newChild); + return new Log10(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathFunction.java index 412f509854b84..e0555ab0ea3bf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/MathFunction.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Locale; @@ -21,12 +21,12 @@ public abstract class MathFunction extends UnaryScalarFunction { - protected MathFunction(Location location) { - super(location); + protected MathFunction(Source source) { + super(source); } - protected MathFunction(Location location, Expression field) { - super(location, field); + protected MathFunction(Source source, Expression field) { + super(source, field); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Pi.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Pi.java index 202d2763fc5a6..f6dabb1f211dd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Pi.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Pi.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; import org.elasticsearch.xpack.sql.expression.gen.script.Params; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -20,8 +20,8 @@ public class Pi extends MathFunction { private static final ScriptTemplate TEMPLATE = new ScriptTemplate("Math.PI", Params.EMPTY, DataType.DOUBLE); - public Pi(Location location) { - super(location, new Literal(location, "PI", Math.PI, DataType.DOUBLE)); + public Pi(Source source) { + super(source, new Literal(source, "PI", Math.PI, DataType.DOUBLE)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java index 646fb80102fb2..ec4261d3f07a6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java @@ -7,13 +7,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Power extends BinaryNumericFunction { - public Power(Location location, Expression left, Expression right) { - super(location, left, right, BinaryMathOperation.POWER); + public Power(Source source, Expression left, Expression right) { + super(source, left, right, BinaryMathOperation.POWER); } @Override @@ -23,6 +23,6 @@ protected NodeInfo info() { @Override protected Power replaceChildren(Expression newLeft, Expression newRight) { - return new Power(location(), newLeft, newRight); + return new Power(source(), newLeft, newRight); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Radians.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Radians.java index 49e3d5f41ca71..ee63bfa3b0332 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Radians.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Radians.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * to radians. */ public class Radians extends MathFunction { - public Radians(Location location, Expression field) { - super(location, field); + public Radians(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Radians replaceChild(Expression newChild) { - return new Radians(location(), newChild); + return new Radians(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Random.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Random.java index 88b45ba613e2d..70dff7d1ccd44 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Random.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Random.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ */ public class Random extends MathFunction { - public Random(Location location, Expression field) { - super(location, field); + public Random(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Random replaceChild(Expression newChild) { - return new Random(location(), newChild); + return new Random(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Round.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Round.java index 05244c2a74e95..f1aad79f8b4ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Round.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,8 +20,8 @@ */ public class Round extends BinaryNumericFunction { - public Round(Location location, Expression left, Expression right) { - super(location, left, right == null ? Literal.of(left.location(), 0) : right, BinaryMathOperation.ROUND); + public Round(Source source, Expression left, Expression right) { + super(source, left, right == null ? Literal.of(left.source(), 0) : right, BinaryMathOperation.ROUND); } @Override @@ -31,7 +31,7 @@ protected NodeInfo info() { @Override protected Round replaceChildren(Expression newLeft, Expression newRight) { - return new Round(location(), newLeft, newRight); + return new Round(source(), newLeft, newRight); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sign.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sign.java index 176a98ecbf824..d9ebe34e3594c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sign.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sign.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,8 +20,8 @@ * */ public class Sign extends MathFunction { - public Sign(Location location, Expression field) { - super(location, field); + public Sign(Source source, Expression field) { + super(source, field); } @Override @@ -31,7 +31,7 @@ protected NodeInfo info() { @Override protected Sign replaceChild(Expression newChild) { - return new Sign(location(), newChild); + return new Sign(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sin.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sin.java index e61ba739e5238..8fff2be48b3a8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sin.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * fuction. */ public class Sin extends MathFunction { - public Sin(Location location, Expression field) { - super(location, field); + public Sin(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Sin replaceChild(Expression newChild) { - return new Sin(location(), newChild); + return new Sin(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sinh.java index 52a358176d0fd..80830af0d5cd3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sinh.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Sinh extends MathFunction { - public Sinh(Location location, Expression field) { - super(location, field); + public Sinh(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Sinh replaceChild(Expression newChild) { - return new Sinh(location(), newChild); + return new Sinh(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sqrt.java index f9daf25d2188b..30778a8f9791e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Sqrt.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Sqrt extends MathFunction { - public Sqrt(Location location, Expression field) { - super(location, field); + public Sqrt(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Sqrt replaceChild(Expression newChild) { - return new Sqrt(location(), newChild); + return new Sqrt(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Tan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Tan.java index 25409c84ff39c..2c8a7ef142f15 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Tan.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ * function. */ public class Tan extends MathFunction { - public Tan(Location location, Expression field) { - super(location, field); + public Tan(Source source, Expression field) { + super(source, field); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected Tan replaceChild(Expression newChild) { - return new Tan(location(), newChild); + return new Tan(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Truncate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Truncate.java index 2dedd75d5b323..3920b8219a647 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Truncate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Truncate.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -20,8 +20,8 @@ */ public class Truncate extends BinaryNumericFunction { - public Truncate(Location location, Expression left, Expression right) { - super(location, left, right == null ? Literal.of(left.location(), 0) : right, BinaryMathOperation.TRUNCATE); + public Truncate(Source source, Expression left, Expression right) { + super(source, left, right == null ? Literal.of(left.source(), 0) : right, BinaryMathOperation.TRUNCATE); } @Override @@ -31,7 +31,7 @@ protected NodeInfo info() { @Override protected Truncate replaceChildren(Expression newLeft, Expression newRight) { - return new Truncate(location(), newLeft, newRight); + return new Truncate(source(), newLeft, newRight); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Ascii.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Ascii.java index 7f74a22cd80af..65c173cb9035f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Ascii.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Ascii.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class Ascii extends UnaryStringFunction { - public Ascii(Location location, Expression field) { - super(location, field); + public Ascii(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected Ascii replaceChild(Expression newChild) { - return new Ascii(location(), newChild); + return new Ascii(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringFunction.java index b18ebe4f4916d..eb0d9423e9d3d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringFunction.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Locale; import java.util.Objects; @@ -25,8 +25,8 @@ */ public abstract class BinaryStringFunction extends BinaryScalarFunction { - protected BinaryStringFunction(Location location, Expression left, Expression right) { - super(location, left, right); + protected BinaryStringFunction(Source source, Expression left, Expression right) { + super(source, left, right); } /* diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericFunction.java index 8cc90e050e09f..1c15e3ec5d73f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericFunction.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; /** @@ -17,8 +17,8 @@ */ public abstract class BinaryStringNumericFunction extends BinaryStringFunction { - public BinaryStringNumericFunction(Location location, Expression left, Expression right) { - super(location, left, right); + public BinaryStringNumericFunction(Source source, Expression left, Expression right) { + super(source, left, right); } @Override @@ -31,7 +31,7 @@ protected TypeResolution resolveSecondParameterInputType(Expression e) { @Override protected Pipe makePipe() { - return new BinaryStringNumericPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right()), operation()); + return new BinaryStringNumericPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), operation()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipe.java index e8898533543da..f64dea2a8a081 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipe.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; @@ -21,9 +21,9 @@ public class BinaryStringNumericPipe extends BinaryPipe { private final BinaryStringNumericOperation operation; - public BinaryStringNumericPipe(Location location, Expression expression, Pipe left, Pipe right, + public BinaryStringNumericPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryStringNumericOperation operation) { - super(location, expression, left, right); + super(source, expression, left, right); this.operation = operation; } @@ -38,7 +38,7 @@ public BinaryStringNumericOperation operation() { @Override protected BinaryPipe replaceChildren(Pipe newLeft, Pipe newRight) { - return new BinaryStringNumericPipe(location(), expression(), newLeft, newRight, operation()); + return new BinaryStringNumericPipe(source(), expression(), newLeft, newRight, operation()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringFunction.java index 3d4816cedb0df..9a937d4647f50 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringFunction.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; /** @@ -15,8 +15,8 @@ */ public abstract class BinaryStringStringFunction extends BinaryStringFunction { - public BinaryStringStringFunction(Location location, Expression left, Expression right) { - super(location, left, right); + public BinaryStringStringFunction(Source source, Expression left, Expression right) { + super(source, left, right); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipe.java index c660a7bcf6f47..23f3940d73d08 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipe.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; @@ -21,8 +21,8 @@ public class BinaryStringStringPipe extends BinaryPipe { private final BinaryStringStringOperation operation; - public BinaryStringStringPipe(Location location, Expression expression, Pipe left, Pipe right, BinaryStringStringOperation operation) { - super(location, expression, left, right); + public BinaryStringStringPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryStringStringOperation operation) { + super(source, expression, left, right); this.operation = operation; } @@ -37,7 +37,7 @@ public BinaryStringStringOperation operation() { @Override protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryStringStringPipe(location(), expression(), left, right, operation); + return new BinaryStringStringPipe(source(), expression(), left, right, operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java index 0cd6268c838d5..11806d4ab9951 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class BitLength extends UnaryStringFunction { - public BitLength(Location location, Expression field) { - super(location, field); + public BitLength(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected BitLength replaceChild(Expression newChild) { - return new BitLength(location(), newChild); + return new BitLength(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Char.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Char.java index 06d1c3d81cc42..2216d211cd264 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Char.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Char.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class Char extends UnaryStringIntFunction { - public Char(Location location, Expression field) { - super(location, field); + public Char(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected Char replaceChild(Expression newChild) { - return new Char(location(), newChild); + return new Char(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/CharLength.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/CharLength.java index bdf43fbeb4ee9..45f0c1e781a0a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/CharLength.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/CharLength.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class CharLength extends UnaryStringFunction { - public CharLength(Location location, Expression field) { - super(location, field); + public CharLength(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected CharLength replaceChild(Expression newChild) { - return new CharLength(location(), newChild); + return new CharLength(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java index d89d8fe6efbff..9a132c012a817 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java @@ -9,10 +9,11 @@ import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -26,8 +27,8 @@ */ public class Concat extends BinaryScalarFunction { - public Concat(Location location, Expression source1, Expression source2) { - super(location, source1, source2); + public Concat(Source source, Expression source1, Expression source2) { + super(source, source1, source2); } @Override @@ -46,12 +47,12 @@ protected TypeResolution resolveType() { @Override protected Pipe makePipe() { - return new ConcatFunctionPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right())); + return new ConcatFunctionPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right())); } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } @Override @@ -66,7 +67,7 @@ public Object fold() { @Override protected Concat replaceChildren(Expression newLeft, Expression newRight) { - return new Concat(location(), newLeft, newRight); + return new Concat(source(), newLeft, newRight); } @Override @@ -85,4 +86,4 @@ public ScriptTemplate scriptWithField(FieldAttribute field) { public DataType dataType() { return DataType.KEYWORD; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipe.java index 6624f5434e37a..03be678709459 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipe.java @@ -8,15 +8,15 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; public class ConcatFunctionPipe extends BinaryPipe { - public ConcatFunctionPipe(Location location, Expression expression, Pipe left, Pipe right) { - super(location, expression, left, right); + public ConcatFunctionPipe(Source source, Expression expression, Pipe left, Pipe right) { + super(source, expression, left, right); } @Override @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new ConcatFunctionPipe(location(), expression(), left, right); + return new ConcatFunctionPipe(source(), expression(), left, right); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Insert.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Insert.java index c3c496fc67151..7f15e9ce6117f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Insert.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Insert.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Arrays; @@ -32,9 +32,9 @@ public class Insert extends ScalarFunction { private final Expression source, start, length, replacement; - public Insert(Location location, Expression source, Expression start, Expression length, Expression replacement) { - super(location, Arrays.asList(source, start, length, replacement)); - this.source = source; + public Insert(Source source, Expression src, Expression start, Expression length, Expression replacement) { + super(source, Arrays.asList(src, start, length, replacement)); + this.source = src; this.start = start; this.length = length; this.replacement = replacement; @@ -79,7 +79,7 @@ public Object fold() { @Override protected Pipe makePipe() { - return new InsertFunctionPipe(location(), this, + return new InsertFunctionPipe(source(), this, Expressions.pipe(source), Expressions.pipe(start), Expressions.pipe(length), @@ -134,6 +134,6 @@ public Expression replaceChildren(List newChildren) { throw new IllegalArgumentException("expected [4] children but received [" + newChildren.size() + "]"); } - return new Insert(location(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); + return new Insert(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipe.java index 7c147bca93cbd..592fbdafb5a26 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipe.java @@ -8,8 +8,8 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Arrays; import java.util.List; @@ -19,11 +19,11 @@ public class InsertFunctionPipe extends Pipe { private final Pipe source, start, length, replacement; - public InsertFunctionPipe(Location location, Expression expression, - Pipe source, Pipe start, + public InsertFunctionPipe(Source source, Expression expression, + Pipe src, Pipe start, Pipe length, Pipe replacement) { - super(location, expression, Arrays.asList(source, start, length, replacement)); - this.source = source; + super(source, expression, Arrays.asList(src, start, length, replacement)); + this.source = src; this.start = start; this.length = length; this.replacement = replacement; @@ -43,9 +43,9 @@ public final Pipe resolveAttributes(AttributeResolver resolver) { Pipe newStart = start.resolveAttributes(resolver); Pipe newLength = length.resolveAttributes(resolver); Pipe newReplacement = replacement.resolveAttributes(resolver); - if (newSource == source - && newStart == start - && newLength == length + if (newSource == source + && newStart == start + && newLength == length && newReplacement == replacement) { return this; } @@ -54,8 +54,8 @@ public final Pipe resolveAttributes(AttributeResolver resolver) { @Override public boolean supportedByAggsOnlyQuery() { - return source.supportedByAggsOnlyQuery() - && start.supportedByAggsOnlyQuery() + return source.supportedByAggsOnlyQuery() + && start.supportedByAggsOnlyQuery() && length.supportedByAggsOnlyQuery() && replacement.supportedByAggsOnlyQuery(); } @@ -65,11 +65,11 @@ public boolean resolved() { return source.resolved() && start.resolved() && length.resolved() && replacement.resolved(); } - protected Pipe replaceChildren(Pipe newSource, - Pipe newStart, + protected Pipe replaceChildren(Pipe newSource, + Pipe newStart, Pipe newLength, Pipe newReplacement) { - return new InsertFunctionPipe(location(), expression(), newSource, newStart, newLength, newReplacement); + return new InsertFunctionPipe(source(), expression(), newSource, newStart, newLength, newReplacement); } @Override @@ -90,7 +90,7 @@ public InsertFunctionProcessor asProcessor() { return new InsertFunctionProcessor(source.asProcessor(), start.asProcessor(), length.asProcessor(), replacement.asProcessor()); } - public Pipe source() { + public Pipe src() { return source; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LCase.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LCase.java index a074fcb3b98b6..6648694929e07 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LCase.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LCase.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class LCase extends UnaryStringFunction { - public LCase(Location location, Expression field) { - super(location, field); + public LCase(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected LCase replaceChild(Expression newChild) { - return new LCase(location(), newChild); + return new LCase(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LTrim.java index 616f8ccdfedfc..92f05fa6783c3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LTrim.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class LTrim extends UnaryStringFunction { - public LTrim(Location location, Expression field) { - super(location, field); + public LTrim(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected LTrim replaceChild(Expression newChild) { - return new LTrim(location(), newChild); + return new LTrim(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Left.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Left.java index 728c38e8f6284..8db48e415f1a2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Left.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ */ public class Left extends BinaryStringNumericFunction { - public Left(Location location, Expression left, Expression right) { - super(location, left, right); + public Left(Source source, Expression left, Expression right) { + super(source, left, right); } @Override @@ -26,7 +26,7 @@ protected BinaryStringNumericOperation operation() { @Override protected Left replaceChildren(Expression newLeft, Expression newRight) { - return new Left(location(), newLeft, newRight); + return new Left(source(), newLeft, newRight); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Length.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Length.java index 8e3efbfceec22..193660e9fc4b6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Length.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class Length extends UnaryStringFunction { - public Length(Location location, Expression field) { - super(location, field); + public Length(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected Length replaceChild(Expression newChild) { - return new Length(location(), newChild); + return new Length(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java index f8650db70682a..cf3d7ed6e6a12 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Arrays; @@ -35,10 +35,10 @@ public class Locate extends ScalarFunction { private final Expression pattern, source, start; - public Locate(Location location, Expression pattern, Expression source, Expression start) { - super(location, start != null ? Arrays.asList(pattern, source, start) : Arrays.asList(pattern, source)); + public Locate(Source source, Expression pattern, Expression src, Expression start) { + super(source, start != null ? Arrays.asList(pattern, src, start) : Arrays.asList(pattern, src)); this.pattern = pattern; - this.source = source; + this.source = src; this.start = start; } @@ -65,7 +65,7 @@ protected TypeResolution resolveType() { @Override protected Pipe makePipe() { - return new LocateFunctionPipe(location(), this, + return new LocateFunctionPipe(source(), this, Expressions.pipe(pattern), Expressions.pipe(source), start == null ? null : Expressions.pipe(start)); @@ -137,6 +137,6 @@ public Expression replaceChildren(List newChildren) { throw new IllegalArgumentException("expected [3] children but received [" + newChildren.size() + "]"); } - return new Locate(location(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + return new Locate(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipe.java index 8477ff34277d9..b33138297a607 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipe.java @@ -8,8 +8,8 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Arrays; import java.util.List; @@ -19,11 +19,11 @@ public class LocateFunctionPipe extends Pipe { private final Pipe pattern, source, start; - public LocateFunctionPipe(Location location, Expression expression, Pipe pattern, - Pipe source, Pipe start) { - super(location, expression, start == null ? Arrays.asList(pattern, source) : Arrays.asList(pattern, source, start)); + public LocateFunctionPipe(Source source, Expression expression, Pipe pattern, + Pipe src, Pipe start) { + super(source, expression, start == null ? Arrays.asList(pattern, src) : Arrays.asList(pattern, src, start)); this.pattern = pattern; - this.source = source; + this.source = src; this.start = start; } @@ -60,7 +60,7 @@ public boolean resolved() { protected Pipe replaceChildren(Pipe newPattern, Pipe newSource, Pipe newStart) { - return new LocateFunctionPipe(location(), expression(), newPattern, newSource, newStart); + return new LocateFunctionPipe(source(), expression(), newPattern, newSource, newStart); } @Override @@ -82,7 +82,7 @@ public LocateFunctionProcessor asProcessor() { return new LocateFunctionProcessor(pattern.asProcessor(), source.asProcessor(), start == null ? null : start.asProcessor()); } - public Pipe source() { + public Pipe src() { return source; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/OctetLength.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/OctetLength.java index 4153769f50ca5..63e2424708679 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/OctetLength.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/OctetLength.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class OctetLength extends UnaryStringFunction { - public OctetLength(Location location, Expression field) { - super(location, field); + public OctetLength(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected OctetLength replaceChild(Expression newChild) { - return new OctetLength(location(), newChild); + return new OctetLength(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Position.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Position.java index 68549474e3a0a..916328a2d0704 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Position.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Position.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.function.BiFunction; @@ -19,8 +19,8 @@ */ public class Position extends BinaryStringStringFunction { - public Position(Location location, Expression left, Expression right) { - super(location, left, right); + public Position(Source source, Expression left, Expression right) { + super(source, left, right); } @Override @@ -30,12 +30,12 @@ protected BiFunction operation() { @Override protected Position replaceChildren(Expression newLeft, Expression newRight) { - return new Position(location(), newLeft, newRight); + return new Position(source(), newLeft, newRight); } @Override protected Pipe makePipe() { - return new BinaryStringStringPipe(location(), this, + return new BinaryStringStringPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), BinaryStringStringOperation.POSITION); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/RTrim.java index 433668420d35f..8f1f3f3f552f6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/RTrim.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class RTrim extends UnaryStringFunction { - public RTrim(Location location, Expression field) { - super(location, field); + public RTrim(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected RTrim replaceChild(Expression newChild) { - return new RTrim(location(), newChild); + return new RTrim(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Repeat.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Repeat.java index 05ebcfdc7597f..956d2857bc6ac 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Repeat.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Repeat.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ */ public class Repeat extends BinaryStringNumericFunction { - public Repeat(Location location, Expression left, Expression right) { - super(location, left, right); + public Repeat(Source source, Expression left, Expression right) { + super(source, left, right); } @Override @@ -26,7 +26,7 @@ protected BinaryStringNumericOperation operation() { @Override protected Repeat replaceChildren(Expression newLeft, Expression newRight) { - return new Repeat(location(), newLeft, newRight); + return new Repeat(source(), newLeft, newRight); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java index 55710047b2c19..82994fae9f000 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Arrays; @@ -31,9 +31,9 @@ public class Replace extends ScalarFunction { private final Expression source, pattern, replacement; - public Replace(Location location, Expression source, Expression pattern, Expression replacement) { - super(location, Arrays.asList(source, pattern, replacement)); - this.source = source; + public Replace(Source source, Expression src, Expression pattern, Expression replacement) { + super(source, Arrays.asList(src, pattern, replacement)); + this.source = src; this.pattern = pattern; this.replacement = replacement; } @@ -59,7 +59,7 @@ protected TypeResolution resolveType() { @Override protected Pipe makePipe() { - return new ReplaceFunctionPipe(location(), this, + return new ReplaceFunctionPipe(source(), this, Expressions.pipe(source), Expressions.pipe(pattern), Expressions.pipe(replacement)); @@ -122,6 +122,6 @@ public Expression replaceChildren(List newChildren) { throw new IllegalArgumentException("expected [3] children but received [" + newChildren.size() + "]"); } - return new Replace(location(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + return new Replace(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipe.java index 173ea3e9c0c39..087dfc0bb703a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipe.java @@ -8,8 +8,8 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Arrays; import java.util.List; @@ -19,10 +19,10 @@ public class ReplaceFunctionPipe extends Pipe { private final Pipe source, pattern, replacement; - public ReplaceFunctionPipe(Location location, Expression expression, Pipe source, + public ReplaceFunctionPipe(Source source, Expression expression, Pipe src, Pipe pattern, Pipe replacement) { - super(location, expression, Arrays.asList(source, pattern, replacement)); - this.source = source; + super(source, expression, Arrays.asList(src, pattern, replacement)); + this.source = src; this.pattern = pattern; this.replacement = replacement; } @@ -58,7 +58,7 @@ public boolean resolved() { protected Pipe replaceChildren(Pipe newSource, Pipe newPattern, Pipe newReplacement) { - return new ReplaceFunctionPipe(location(), expression(), newSource, newPattern, newReplacement); + return new ReplaceFunctionPipe(source(), expression(), newSource, newPattern, newReplacement); } @Override @@ -78,7 +78,7 @@ public ReplaceFunctionProcessor asProcessor() { return new ReplaceFunctionProcessor(source.asProcessor(), pattern.asProcessor(), replacement.asProcessor()); } - public Pipe source() { + public Pipe src() { return source; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Right.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Right.java index 39d220e2d658e..3ab7c9c8dafe3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Right.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ */ public class Right extends BinaryStringNumericFunction { - public Right(Location location, Expression left, Expression right) { - super(location, left, right); + public Right(Source source, Expression left, Expression right) { + super(source, left, right); } @Override @@ -26,7 +26,7 @@ protected BinaryStringNumericOperation operation() { @Override protected Right replaceChildren(Expression newLeft, Expression newRight) { - return new Right(location(), newLeft, newRight); + return new Right(source(), newLeft, newRight); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Space.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Space.java index 37809482c219b..bad23a8ea526f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Space.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Space.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class Space extends UnaryStringIntFunction { - public Space(Location location, Expression field) { - super(location, field); + public Space(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected Space replaceChild(Expression newChild) { - return new Space(location(), newChild); + return new Space(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Substring.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Substring.java index ea8378a224d91..c049551f90ef9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Substring.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.Arrays; @@ -32,9 +32,9 @@ public class Substring extends ScalarFunction { private final Expression source, start, length; - public Substring(Location location, Expression source, Expression start, Expression length) { - super(location, Arrays.asList(source, start, length)); - this.source = source; + public Substring(Source source, Expression src, Expression start, Expression length) { + super(source, Arrays.asList(src, start, length)); + this.source = src; this.start = start; this.length = length; } @@ -60,7 +60,7 @@ protected TypeResolution resolveType() { @Override protected Pipe makePipe() { - return new SubstringFunctionPipe(location(), this, + return new SubstringFunctionPipe(source(), this, Expressions.pipe(source), Expressions.pipe(start), Expressions.pipe(length)); @@ -122,6 +122,6 @@ public Expression replaceChildren(List newChildren) { throw new IllegalArgumentException("expected [3] children but received [" + newChildren.size() + "]"); } - return new Substring(location(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + return new Substring(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipe.java index 078defec40b61..0805e9e325f4c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipe.java @@ -8,8 +8,8 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Arrays; import java.util.List; @@ -19,10 +19,10 @@ public class SubstringFunctionPipe extends Pipe { private final Pipe source, start, length; - public SubstringFunctionPipe(Location location, Expression expression, Pipe source, + public SubstringFunctionPipe(Source source, Expression expression, Pipe src, Pipe start, Pipe length) { - super(location, expression, Arrays.asList(source, start, length)); - this.source = source; + super(source, expression, Arrays.asList(src, start, length)); + this.source = src; this.start = start; this.length = length; } @@ -58,7 +58,7 @@ public boolean resolved() { protected Pipe replaceChildren(Pipe newSource, Pipe newStart, Pipe newLength) { - return new SubstringFunctionPipe(location(), expression(), newSource, newStart, newLength); + return new SubstringFunctionPipe(source(), expression(), newSource, newStart, newLength); } @Override @@ -78,7 +78,7 @@ public SubstringFunctionProcessor asProcessor() { return new SubstringFunctionProcessor(source.asProcessor(), start.asProcessor(), length.asProcessor()); } - public Pipe source() { + public Pipe src() { return source; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UCase.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UCase.java index a030eeee7b97c..1b0ad36ebf022 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UCase.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UCase.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -16,8 +16,8 @@ */ public class UCase extends UnaryStringFunction { - public UCase(Location location, Expression field) { - super(location, field); + public UCase(Source source, Expression field) { + super(source, field); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected UCase replaceChild(Expression newChild) { - return new UCase(location(), newChild); + return new UCase(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java index 8c64fefc36b40..ef3944a9093a4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Locale; @@ -24,8 +24,8 @@ public abstract class UnaryStringFunction extends UnaryScalarFunction { - protected UnaryStringFunction(Location location, Expression field) { - super(location, field); + protected UnaryStringFunction(Source source, Expression field) { + super(source, field); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java index a14acef60e578..5603a29d81d7c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Locale; import java.util.Objects; @@ -26,8 +26,8 @@ */ public abstract class UnaryStringIntFunction extends UnaryScalarFunction { - protected UnaryStringIntFunction(Location location, Expression field) { - super(location, field); + protected UnaryStringIntFunction(Source source, Expression field) { + super(source, field); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index 6d39fa6fbc226..01d56188ed2ed 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -346,6 +346,10 @@ public static Integer weekOfYear(Object dateTime, String tzId) { } public static ZonedDateTime asDateTime(Object dateTime) { + return (ZonedDateTime) asDateTime(dateTime, false); + } + + private static Object asDateTime(Object dateTime, boolean lenient) { if (dateTime == null) { return null; } @@ -355,11 +359,14 @@ public static ZonedDateTime asDateTime(Object dateTime) { if (dateTime instanceof ZonedDateTime) { return (ZonedDateTime) dateTime; } - if (dateTime instanceof Number) { - return DateUtils.of(((Number) dateTime).longValue()); + if (false == lenient) { + if (dateTime instanceof Number) { + return DateUtils.of(((Number) dateTime).longValue()); + } + + throw new SqlIllegalArgumentException("Invalid date encountered [{}]", dateTime); } - - throw new SqlIllegalArgumentException("Invalid date encountered [{}]", dateTime); + return dateTime; } public static IntervalDayTime intervalDayTime(String text, String typeName) { @@ -468,6 +475,8 @@ public static String ucase(String s) { // Casting // public static Object cast(Object value, String typeName) { - return DataTypeConversion.convert(value, DataType.fromTypeName(typeName)); + // we call asDateTime here to make sure we handle JodaCompatibleZonedDateTime properly, + // since casting works for ZonedDateTime objects only + return DataTypeConversion.convert(asDateTime(value, true), DataType.fromTypeName(typeName)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggExtractorInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggExtractorInput.java index 15aff1e415547..554b7d3e3c885 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggExtractorInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggExtractorInput.java @@ -11,15 +11,15 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.BucketExtractorProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class AggExtractorInput extends LeafInput { private final Processor chained; - public AggExtractorInput(Location location, Expression expression, Processor processor, BucketExtractor context) { - super(location, expression, context); + public AggExtractorInput(Source source, Expression expression, Processor processor, BucketExtractor context) { + super(source, expression, context); this.chained = processor; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggNameInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggNameInput.java index 9e4a1bc857cdf..7f3a12b560cf2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggNameInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggNameInput.java @@ -6,12 +6,12 @@ package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class AggNameInput extends CommonNonExecutableInput { - public AggNameInput(Location location, Expression expression, String context) { - super(location, expression, context); + public AggNameInput(Source source, Expression expression, String context) { + super(source, expression, context); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggPathInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggPathInput.java index 8e78f0e2ad407..30c0968b9ef23 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggPathInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AggPathInput.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.execution.search.AggRef; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; @@ -19,7 +19,7 @@ public class AggPathInput extends CommonNonExecutableInput { private final Processor action; public AggPathInput(Expression expression, AggRef context) { - this(Location.EMPTY, expression, context, null); + this(Source.EMPTY, expression, context, null); } /** @@ -28,8 +28,8 @@ public AggPathInput(Expression expression, AggRef context) { * The action is used for handling corner-case results such as date histogram which returns * a full date object for year which requires additional extraction. */ - public AggPathInput(Location location, Expression expression, AggRef context, Processor action) { - super(location, expression, context); + public AggPathInput(Source source, Expression expression, AggRef context, Processor action) { + super(source, expression, context); this.action = action; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInput.java index ed1823e75a61e..72f2396582894 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInput.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -16,8 +16,8 @@ * before it can be further processed. */ public class AttributeInput extends NonExecutableInput { - public AttributeInput(Location location, Expression expression, Attribute context) { - super(location, expression, context); + public AttributeInput(Source source, Expression expression, Attribute context) { + super(source, expression, context); } @Override @@ -32,7 +32,7 @@ public final boolean supportedByAggsOnlyQuery() { @Override public Pipe resolveAttributes(AttributeResolver resolver) { - return new ReferenceInput(location(), expression(), resolver.resolve(context())); + return new ReferenceInput(source(), expression(), resolver.resolve(context())); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipe.java index ac564e5b20cda..64f81f4f5b3ad 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipe.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Arrays; import java.util.List; @@ -17,8 +17,8 @@ public abstract class BinaryPipe extends Pipe { private final Pipe left, right; - public BinaryPipe(Location location, Expression expression, Pipe left, Pipe right) { - super(location, expression, Arrays.asList(left, right)); + public BinaryPipe(Source source, Expression expression, Pipe left, Pipe right) { + super(source, expression, Arrays.asList(left, right)); this.left = left; this.right = right; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/CommonNonExecutableInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/CommonNonExecutableInput.java index 666b4c093d090..cb1072bc6b884 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/CommonNonExecutableInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/CommonNonExecutableInput.java @@ -9,15 +9,15 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; /** * Implementation common to most subclasses of * {@link NonExecutableInput} but not shared by all. */ abstract class CommonNonExecutableInput extends NonExecutableInput { - CommonNonExecutableInput(Location location, Expression expression, T context) { - super(location, expression, context); + CommonNonExecutableInput(Source source, Expression expression, T context) { + super(source, expression, context); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ConstantInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ConstantInput.java index 4af4dfc5cb741..21a8a86566881 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ConstantInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ConstantInput.java @@ -9,13 +9,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class ConstantInput extends LeafInput { - public ConstantInput(Location location, Expression expression, Object context) { - super(location, expression, context); + public ConstantInput(Source source, Expression expression, Object context) { + super(source, expression, context); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/HitExtractorInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/HitExtractorInput.java index 750b948a48b7c..e58c97e883717 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/HitExtractorInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/HitExtractorInput.java @@ -10,13 +10,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.HitExtractorProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class HitExtractorInput extends LeafInput { - public HitExtractorInput(Location location, Expression expression, HitExtractor context) { - super(location, expression, context); + public HitExtractorInput(Source source, Expression expression, HitExtractor context) { + super(source, expression, context); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/LeafInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/LeafInput.java index b2a15ab44714b..ff5ed1ced6d6b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/LeafInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/LeafInput.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression.gen.pipeline; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; import java.util.Objects; @@ -17,8 +17,8 @@ public abstract class LeafInput extends Pipe { private T context; - public LeafInput(Location location, Expression expression, T context) { - super(location, expression, emptyList()); + public LeafInput(Source source, Expression expression, T context) { + super(source, expression, emptyList()); this.context = context; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/MultiPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/MultiPipe.java index d25e7a2e660a2..f0fc3e075895b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/MultiPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/MultiPipe.java @@ -8,15 +8,15 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.List; public abstract class MultiPipe extends Pipe { - protected MultiPipe(Location location, Expression expression, List children) { - super(location, expression, children); + protected MultiPipe(Source source, Expression expression, List children) { + super(source, expression, children); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/NonExecutableInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/NonExecutableInput.java index be3eded19b2bd..eae0f56df7be4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/NonExecutableInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/NonExecutableInput.java @@ -8,11 +8,11 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; public abstract class NonExecutableInput extends LeafInput { - NonExecutableInput(Location location, Expression expression, T context) { - super(location, expression, context); + NonExecutableInput(Source source, Expression expression, T context) { + super(source, expression, context); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java index b92cb9a15eae4..b013714bf5ab1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/Pipe.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.Node; import java.util.ArrayList; @@ -32,8 +32,8 @@ public abstract class Pipe extends Node implements FieldExtraction, Resolv private final Expression expression; - public Pipe(Location location, Expression expression, List children) { - super(location, children); + public Pipe(Source source, Expression expression, List children) { + super(source, children); this.expression = expression; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ReferenceInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ReferenceInput.java index ec4850a3e18d4..d9bbdb8631b43 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ReferenceInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ReferenceInput.java @@ -8,12 +8,12 @@ import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class ReferenceInput extends NonExecutableInput { - public ReferenceInput(Location location, Expression expression, FieldExtraction context) { - super(location, expression, context); + public ReferenceInput(Source source, Expression expression, FieldExtraction context) { + super(source, expression, context); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ScorePipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ScorePipe.java index c65070405a84f..c8bf1463f10dd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ScorePipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/ScorePipe.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.HitExtractorProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -18,8 +18,8 @@ import static java.util.Collections.emptyList; public class ScorePipe extends Pipe { - public ScorePipe(Location location, Expression expression) { - super(location, expression, emptyList()); + public ScorePipe(Source source, Expression expression) { + super(source, expression, emptyList()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipe.java index 8e2c87dc75cda..4920c3935bbb1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipe.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -22,8 +22,8 @@ public final class UnaryPipe extends Pipe { private final Pipe child; private final Processor action; - public UnaryPipe(Location location, Expression expression, Pipe child, Processor action) { - super(location, expression, singletonList(child)); + public UnaryPipe(Source source, Expression expression, Pipe child, Processor action) { + super(source, expression, singletonList(child)); this.child = child; this.action = action; } @@ -38,7 +38,7 @@ public Pipe replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new UnaryPipe(location(), expression(), newChildren.get(0), action); + return new UnaryPipe(source(), expression(), newChildren.get(0), action); } public Pipe child() { @@ -70,7 +70,7 @@ public Pipe resolveAttributes(AttributeResolver resolver) { if (newChild == child) { return this; } - return new UnaryPipe(location(), expression(), newChild, action); + return new UnaryPipe(source(), expression(), newChild, action); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Param.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Param.java index 579eacd036b0e..e8151ada18a9c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Param.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Param.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression.gen.script; -import java.util.Locale; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; abstract class Param { private final T value; @@ -22,6 +22,6 @@ T value() { @Override public String toString() { - return String.format(Locale.ROOT, "{%s=%s}", prefix(), value); + return format(null, "{{}={}}", prefix(), value); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java index 0d194be105f6d..d5d13429e15af 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.parser.ParsingException; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -63,7 +63,7 @@ public static long inMillis(Literal literal) { return millis; } - public static TemporalAmount of(Location source, long duration, TimeUnit unit) { + public static TemporalAmount of(Source source, long duration, TimeUnit unit) { // Cannot use Period.of since it accepts int so use plus which accepts long // Further more Period and Duration have inconsistent addition methods but plus is there try { @@ -90,7 +90,7 @@ public static TemporalAmount of(Location source, long duration, TimeUnit unit) { } } - public static DataType intervalType(Location source, TimeUnit leading, TimeUnit trailing) { + public static DataType intervalType(Source source, TimeUnit leading, TimeUnit trailing) { if (trailing == null) { switch (leading) { case YEAR: @@ -208,7 +208,7 @@ private static class Parser { this.name = name; } - TemporalAmount parse(Location source, String string) { + TemporalAmount parse(Source source, String string) { int unitIndex = 0; int startToken = 0; int endToken = 0; @@ -408,7 +408,7 @@ public static TemporalAmount negate(TemporalAmount interval) { .build()); } - public static TemporalAmount parseInterval(Location source, String value, DataType intervalType) { + public static TemporalAmount parseInterval(Source source, String value, DataType intervalType) { return PARSERS.get(intervalType).parse(source, value); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryOperator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryOperator.java index a63e6f3a51e7d..318cfd9b62475 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryOperator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryOperator.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Expressions.ParamOrdinal; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; /** * Operator is a specialized binary predicate where both sides have the compatible types @@ -16,8 +16,8 @@ */ public abstract class BinaryOperator> extends BinaryPredicate { - protected BinaryOperator(Location location, Expression left, Expression right, F function) { - super(location, left, right, function); + protected BinaryOperator(Source source, Expression left, Expression right, F function) { + super(source, left, right, function); } protected abstract TypeResolution resolveInputType(Expression e, Expressions.ParamOrdinal paramOrdinal); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryPredicate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryPredicate.java index 6303d2799552d..277fa7e4998e5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryPredicate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/BinaryPredicate.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Objects; @@ -24,8 +24,8 @@ public abstract class BinaryPredicate splitOr(Expression exp) { } public static Expression combineOr(List exps) { - return combine(exps, (l, r) -> new Or(l.location(), l, r)); + return combine(exps, (l, r) -> new Or(l.source(), l, r)); } public static Expression combineAnd(List exps) { - return combine(exps, (l, r) -> new And(l.location(), l, r)); + return combine(exps, (l, r) -> new And(l.source(), l, r)); } /** diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java index 42021cad5901e..a189b7fda0eeb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/Range.java @@ -8,6 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.Params; @@ -17,7 +18,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonPipe; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -36,8 +37,8 @@ public class Range extends ScalarFunction { private final Expression value, lower, upper; private final boolean includeLower, includeUpper; - public Range(Location location, Expression value, Expression lower, boolean includeLower, Expression upper, boolean includeUpper) { - super(location, asList(value, lower, upper)); + public Range(Source source, Expression value, Expression lower, boolean includeLower, Expression upper, boolean includeUpper) { + super(source, asList(value, lower, upper)); this.value = value; this.lower = lower; @@ -62,7 +63,7 @@ public Expression replaceChildren(List newChildren) { if (newChildren.size() != 3) { throw new IllegalArgumentException("expected [3] children but received [" + newChildren.size() + "]"); } - return new Range(location(), newChildren.get(0), newChildren.get(1), includeLower, newChildren.get(2), includeUpper); + return new Range(source(), newChildren.get(0), newChildren.get(1), includeLower, newChildren.get(2), includeUpper); } public Expression value() { @@ -119,8 +120,8 @@ private boolean areBoundariesInvalid() { } @Override - public boolean nullable() { - return value.nullable() && lower.nullable() && upper.nullable(); + public Nullability nullable() { + return Nullability.and(value.nullable(), lower.nullable(), upper.nullable()); } @Override @@ -156,11 +157,11 @@ public ScriptTemplate asScript() { @Override protected Pipe makePipe() { - BinaryComparisonPipe lowerPipe = new BinaryComparisonPipe(location(), this, Expressions.pipe(value()), Expressions.pipe(lower()), + BinaryComparisonPipe lowerPipe = new BinaryComparisonPipe(source(), this, Expressions.pipe(value()), Expressions.pipe(lower()), includeLower() ? BinaryComparisonOperation.GTE : BinaryComparisonOperation.GT); - BinaryComparisonPipe upperPipe = new BinaryComparisonPipe(location(), this, Expressions.pipe(value()), Expressions.pipe(upper()), + BinaryComparisonPipe upperPipe = new BinaryComparisonPipe(source(), this, Expressions.pipe(value()), Expressions.pipe(upper()), includeUpper() ? BinaryComparisonOperation.LTE : BinaryComparisonOperation.LT); - BinaryLogicPipe and = new BinaryLogicPipe(location(), this, lowerPipe, upperPipe, BinaryLogicOperation.AND); + BinaryLogicPipe and = new BinaryLogicPipe(source(), this, lowerPipe, upperPipe, BinaryLogicOperation.AND); return and; } @@ -216,4 +217,4 @@ private static String name(Expression value, Expression lower, Expression upper, public String toString() { return name(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ArbitraryConditionalFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ArbitraryConditionalFunction.java index 9a3c24c3729f3..ecc5835d1aaab 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ArbitraryConditionalFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ArbitraryConditionalFunction.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.predicate.conditional.ConditionalProcessor.ConditionalOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataTypeConversion; import java.util.ArrayList; @@ -28,8 +28,8 @@ public abstract class ArbitraryConditionalFunction extends ConditionalFunction { private final ConditionalOperation operation; - ArbitraryConditionalFunction(Location location, List fields, ConditionalOperation operation) { - super(location, fields); + ArbitraryConditionalFunction(Source source, List fields, ConditionalOperation operation) { + super(source, fields); this.operation = operation; } @@ -43,7 +43,7 @@ protected TypeResolution resolveType() { @Override protected Pipe makePipe() { - return new ConditionalPipe(location(), this, Expressions.pipe(children()), operation); + return new ConditionalPipe(source(), this, Expressions.pipe(children()), operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Coalesce.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Coalesce.java index bd958f052d7ec..faaa6ec1802da 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Coalesce.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Coalesce.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.expression.predicate.conditional; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -16,8 +16,8 @@ public class Coalesce extends ArbitraryConditionalFunction { - public Coalesce(Location location, List fields) { - super(location, fields, COALESCE); + public Coalesce(Source source, List fields) { + super(source, fields, COALESCE); } @Override @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override public Expression replaceChildren(List newChildren) { - return new Coalesce(location(), newChildren); + return new Coalesce(source(), newChildren); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalFunction.java index 0ffe8621fb221..13b765e941ce0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalFunction.java @@ -8,8 +8,9 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.List; @@ -21,8 +22,8 @@ public abstract class ConditionalFunction extends ScalarFunction { protected DataType dataType = DataType.NULL; - ConditionalFunction(Location location, List fields) { - super(location, fields); + ConditionalFunction(Source source, List fields) { + super(source, fields); } @Override @@ -36,7 +37,7 @@ public boolean foldable() { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.UNKNOWN; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalPipe.java index 0c45438d58285..d543a87aef083 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/ConditionalPipe.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.predicate.conditional.ConditionalProcessor.ConditionalOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -21,8 +21,8 @@ public class ConditionalPipe extends MultiPipe { private final ConditionalOperation operation; - public ConditionalPipe(Location location, Expression expression, List children, ConditionalOperation operation) { - super(location, expression, children); + public ConditionalPipe(Source source, Expression expression, List children, ConditionalOperation operation) { + super(source, expression, children); this.operation = operation; } @@ -33,7 +33,7 @@ protected NodeInfo info() { @Override public Pipe replaceChildren(List newChildren) { - return new ConditionalPipe(location(), expression(), newChildren, operation); + return new ConditionalPipe(source(), expression(), newChildren, operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Greatest.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Greatest.java index 09104f136fe26..60df99e27e05f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Greatest.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Greatest.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Foldables; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.ArrayList; @@ -19,8 +19,8 @@ public class Greatest extends ArbitraryConditionalFunction { - public Greatest(Location location, List fields) { - super(location, new ArrayList<>(new LinkedHashSet<>(fields)), GREATEST); + public Greatest(Source source, List fields) { + super(source, new ArrayList<>(new LinkedHashSet<>(fields)), GREATEST); } @Override @@ -30,7 +30,7 @@ protected NodeInfo info() { @Override public Expression replaceChildren(List newChildren) { - return new Greatest(location(), newChildren); + return new Greatest(source(), newChildren); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/IfNull.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/IfNull.java index 9b129c75ce2e0..2133a6b421336 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/IfNull.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/IfNull.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.expression.predicate.conditional; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Arrays; @@ -18,17 +18,17 @@ */ public class IfNull extends Coalesce { - public IfNull(Location location, Expression first, Expression second) { - this(location, Arrays.asList(first, second)); + public IfNull(Source source, Expression first, Expression second) { + this(source, Arrays.asList(first, second)); } - private IfNull(Location location, List expressions) { - super(location, expressions); + private IfNull(Source source, List expressions) { + super(source, expressions); } @Override public Expression replaceChildren(List newChildren) { - return new IfNull(location(), newChildren); + return new IfNull(source(), newChildren); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Least.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Least.java index 8ddf8a39277ce..7eec55b265f91 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Least.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Least.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Foldables; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.ArrayList; @@ -19,8 +19,8 @@ public class Least extends ArbitraryConditionalFunction { - public Least(Location location, List fields) { - super(location, new ArrayList<>(new LinkedHashSet<>(fields)), LEAST); + public Least(Source source, List fields) { + super(source, new ArrayList<>(new LinkedHashSet<>(fields)), LEAST); } @Override @@ -30,7 +30,7 @@ protected NodeInfo info() { @Override public Expression replaceChildren(List newChildren) { - return new Least(location(), newChildren); + return new Least(source(), newChildren); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java index 0f8bb3f208506..ef1a71c7ed425 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIf.java @@ -8,10 +8,11 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -26,8 +27,8 @@ */ public class NullIf extends ConditionalFunction { - public NullIf(Location location, Expression left, Expression right) { - super(location, Arrays.asList(left, right)); + public NullIf(Source source, Expression left, Expression right) { + super(source, Arrays.asList(left, right)); } @Override @@ -37,7 +38,7 @@ protected NodeInfo info() { @Override public Expression replaceChildren(List newChildren) { - return new NullIf(location(), newChildren.get(0), newChildren.get(1)); + return new NullIf(source(), newChildren.get(0), newChildren.get(1)); } @Override @@ -57,8 +58,8 @@ public boolean foldable() { } @Override - public boolean nullable() { - return true; + public Nullability nullable() { + return Nullability.UNKNOWN; } @Override @@ -80,7 +81,7 @@ public ScriptTemplate asScript() { @Override protected Pipe makePipe() { - return new NullIfPipe(location(), this, + return new NullIfPipe(source(), this, Expressions.pipe(children().get(0)), Expressions.pipe(children().get(1))); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIfPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIfPipe.java index 9ce70d2a2c3b9..3d52d232e1262 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIfPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/NullIfPipe.java @@ -10,18 +10,18 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class NullIfPipe extends BinaryPipe { - public NullIfPipe(Location location, Expression expression, Pipe left, Pipe right) { - super(location, expression, left, right); + public NullIfPipe(Source source, Expression expression, Pipe left, Pipe right) { + super(source, expression, left, right); } @Override protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new NullIfPipe(location(), expression(), left, right); + return new NullIfPipe(source(), expression(), left, right); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextPredicate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextPredicate.java index 07f284c90ca9b..7af299b97f5d5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextPredicate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextPredicate.java @@ -6,7 +6,8 @@ package org.elasticsearch.xpack.sql.expression.predicate.fulltext; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.expression.Nullability; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.util.List; @@ -30,12 +31,12 @@ public org.elasticsearch.index.query.Operator toEs() { // common properties private final String analyzer; - FullTextPredicate(Location location, String query, String options, List children) { - super(location, children); + FullTextPredicate(Source source, String query, String options, List children) { + super(source, children); this.query = query; this.options = options; // inferred - this.optionMap = FullTextUtils.parseSettings(options, location); + this.optionMap = FullTextUtils.parseSettings(options, source); this.analyzer = optionMap.get("analyzer"); } @@ -56,8 +57,8 @@ public String analyzer() { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } @Override @@ -84,4 +85,4 @@ public boolean equals(Object obj) { return Objects.equals(query, other.query) && Objects.equals(options, other.options); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtils.java index bb57c7a154930..f1191e07d3a06 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtils.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.FullTextPredicate.Operator; import org.elasticsearch.xpack.sql.parser.ParsingException; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.LinkedHashMap; import java.util.Locale; @@ -21,7 +21,7 @@ abstract class FullTextUtils { private static final String DELIMITER = ";"; - static Map parseSettings(String options, Location location) { + static Map parseSettings(String options, Source source) { if (!Strings.hasText(options)) { return emptyMap(); } @@ -31,23 +31,23 @@ static Map parseSettings(String options, Location location) { for (String entry : list) { String[] split = splitInTwo(entry, "="); if (split == null) { - throw new ParsingException(location, "Cannot parse entry {} in options {}", entry, options); + throw new ParsingException(source, "Cannot parse entry {} in options {}", entry, options); } String previous = op.put(split[0], split[1]); if (previous != null) { - throw new ParsingException(location, "Duplicate option {} detected in options {}", entry, options); + throw new ParsingException(source, "Duplicate option {} detected in options {}", entry, options); } } return op; } - static Map parseFields(Map options, Location location) { - return parseFields(options.get("fields"), location); + static Map parseFields(Map options, Source source) { + return parseFields(options.get("fields"), source); } - static Map parseFields(String fieldString, Location location) { + static Map parseFields(String fieldString, Source source) { if (!Strings.hasText(fieldString)) { return emptyMap(); } @@ -66,7 +66,7 @@ static Map parseFields(String fieldString, Location location) { try { fields.put(split[0], Float.parseFloat(split[1])); } catch (NumberFormatException nfe) { - throw new ParsingException(location, "Cannot parse boosting for {}", fieldName); + throw new ParsingException(source, "Cannot parse boosting for {}", fieldName); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/MatchQueryPredicate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/MatchQueryPredicate.java index 1235cb1a205e6..eaab089d98877 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/MatchQueryPredicate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/MatchQueryPredicate.java @@ -8,7 +8,7 @@ import java.util.Objects; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import static java.util.Collections.singletonList; @@ -19,8 +19,8 @@ public class MatchQueryPredicate extends FullTextPredicate { private final Expression field; - public MatchQueryPredicate(Location location, Expression field, String query, String options) { - super(location, query, options, singletonList(field)); + public MatchQueryPredicate(Source source, Expression field, String query, String options) { + super(source, query, options, singletonList(field)); this.field = field; } @@ -34,7 +34,7 @@ public MatchQueryPredicate replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new MatchQueryPredicate(location(), newChildren.get(0), query(), options()); + return new MatchQueryPredicate(source(), newChildren.get(0), query(), options()); } public Expression field() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/MultiMatchQueryPredicate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/MultiMatchQueryPredicate.java index eb3df01d6ef58..785cb4883334c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/MultiMatchQueryPredicate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/MultiMatchQueryPredicate.java @@ -9,7 +9,7 @@ import java.util.Objects; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import static java.util.Collections.emptyList; @@ -21,11 +21,11 @@ public class MultiMatchQueryPredicate extends FullTextPredicate { private final String fieldString; private final Map fields; - public MultiMatchQueryPredicate(Location location, String fieldString, String query, String options) { - super(location, query, options, emptyList()); + public MultiMatchQueryPredicate(Source source, String fieldString, String query, String options) { + super(source, query, options, emptyList()); this.fieldString = fieldString; // inferred - this.fields = FullTextUtils.parseFields(fieldString, location); + this.fields = FullTextUtils.parseFields(fieldString, source); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/StringQueryPredicate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/StringQueryPredicate.java index 3275044477495..02364f6f8b25f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/StringQueryPredicate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/StringQueryPredicate.java @@ -9,7 +9,7 @@ import java.util.List; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import static java.util.Collections.emptyList; @@ -18,11 +18,11 @@ public class StringQueryPredicate extends FullTextPredicate { private final Map fields; - public StringQueryPredicate(Location location, String query, String options) { - super(location, query, options, emptyList()); + public StringQueryPredicate(Source source, String query, String options) { + super(source, query, options, emptyList()); // inferred - this.fields = FullTextUtils.parseFields(optionMap(), location); + this.fields = FullTextUtils.parseFields(optionMap(), source); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/And.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/And.java index 3b7242e6279de..1508cfab91cf8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/And.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/And.java @@ -8,13 +8,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class And extends BinaryLogic implements Negatable { - public And(Location location, Expression left, Expression right) { - super(location, left, right, BinaryLogicOperation.AND); + public And(Source source, Expression left, Expression right) { + super(source, left, right, BinaryLogicOperation.AND); } @Override @@ -24,16 +24,16 @@ protected NodeInfo info() { @Override protected And replaceChildren(Expression newLeft, Expression newRight) { - return new And(location(), newLeft, newRight); + return new And(source(), newLeft, newRight); } @Override public And swapLeftAndRight() { - return new And(location(), right(), left()); + return new And(source(), right(), left()); } @Override public Or negate() { - return new Or(location(), new Not(location(), left()), new Not(location(), right())); + return new Or(source(), new Not(source(), left()), new Not(source(), right())); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/BinaryLogic.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/BinaryLogic.java index 5e4175797eb58..d4bce9feab3ec 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/BinaryLogic.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/BinaryLogic.java @@ -7,16 +7,17 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.sql.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; public abstract class BinaryLogic extends BinaryOperator { - protected BinaryLogic(Location location, Expression left, Expression right, BinaryLogicOperation operation) { - super(location, left, right, operation); + protected BinaryLogic(Source source, Expression left, Expression right, BinaryLogicOperation operation) { + super(source, left, right, operation); } @Override @@ -31,12 +32,12 @@ protected TypeResolution resolveInputType(Expression e, Expressions.ParamOrdinal @Override protected Pipe makePipe() { - return new BinaryLogicPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right()), function()); + return new BinaryLogicPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), function()); } @Override - public boolean nullable() { + public Nullability nullable() { // Cannot fold null due to 3vl, constant folding will do any possible folding. - return false; + return Nullability.UNKNOWN; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/BinaryLogicPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/BinaryLogicPipe.java index 26c19ab124e60..4dd51d6adea9f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/BinaryLogicPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/BinaryLogicPipe.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; @@ -18,8 +18,8 @@ public class BinaryLogicPipe extends BinaryPipe { private final BinaryLogicOperation operation; - public BinaryLogicPipe(Location location, Expression expression, Pipe left, Pipe right, BinaryLogicOperation operation) { - super(location, expression, left, right); + public BinaryLogicPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryLogicOperation operation) { + super(source, expression, left, right); this.operation = operation; } @@ -30,7 +30,7 @@ protected NodeInfo info() { @Override protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryLogicPipe(location(), expression(), left, right, operation); + return new BinaryLogicPipe(source(), expression(), left, right, operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/Not.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/Not.java index 81a268491189f..8d326c570292b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/Not.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/Not.java @@ -12,14 +12,14 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.Scripts; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; public class Not extends UnaryScalarFunction { - public Not(Location location, Expression child) { - super(location, child); + public Not(Source source, Expression child) { + super(source, child); } @Override @@ -29,7 +29,7 @@ protected NodeInfo info() { @Override protected Not replaceChild(Expression newChild) { - return new Not(location(), newChild); + return new Not(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/Or.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/Or.java index 0b42b389051c6..01b85bf4a9b47 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/Or.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/Or.java @@ -8,13 +8,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Or extends BinaryLogic implements Negatable { - public Or(Location location, Expression left, Expression right) { - super(location, left, right, BinaryLogicOperation.OR); + public Or(Source source, Expression left, Expression right) { + super(source, left, right, BinaryLogicOperation.OR); } @Override @@ -24,16 +24,16 @@ protected NodeInfo info() { @Override protected Or replaceChildren(Expression newLeft, Expression newRight) { - return new Or(location(), newLeft, newRight); + return new Or(source(), newLeft, newRight); } @Override public Or swapLeftAndRight() { - return new Or(location(), right(), left()); + return new Or(source(), right(), left()); } @Override public And negate() { - return new And(location(), new Not(location(), left()), new Not(location(), right())); + return new And(source(), new Not(source(), left()), new Not(source(), right())); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/nulls/IsNotNull.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/nulls/IsNotNull.java index 8ac38cea603d4..f43e12e0b405c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/nulls/IsNotNull.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/nulls/IsNotNull.java @@ -6,20 +6,21 @@ package org.elasticsearch.xpack.sql.expression.predicate.nulls; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.Scripts; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.nulls.CheckNullProcessor.CheckNullOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; public class IsNotNull extends UnaryScalarFunction implements Negatable { - public IsNotNull(Location location, Expression field) { - super(location, field); + public IsNotNull(Source source, Expression field) { + super(source, field); } @Override @@ -29,7 +30,7 @@ protected NodeInfo info() { @Override protected IsNotNull replaceChild(Expression newChild) { - return new IsNotNull(location(), newChild); + return new IsNotNull(source(), newChild); } @Override @@ -48,8 +49,8 @@ public String processScript(String script) { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } @Override @@ -59,6 +60,6 @@ public DataType dataType() { @Override public UnaryScalarFunction negate() { - return new IsNull(location(), field()); + return new IsNull(source(), field()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/nulls/IsNull.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/nulls/IsNull.java index 225881ff5ec31..b873f2770c724 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/nulls/IsNull.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/nulls/IsNull.java @@ -6,20 +6,21 @@ package org.elasticsearch.xpack.sql.expression.predicate.nulls; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.Scripts; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.nulls.CheckNullProcessor.CheckNullOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; public class IsNull extends UnaryScalarFunction implements Negatable { - public IsNull(Location location, Expression field) { - super(location, field); + public IsNull(Source source, Expression field) { + super(source, field); } @Override @@ -29,7 +30,7 @@ protected NodeInfo info() { @Override protected IsNull replaceChild(Expression newChild) { - return new IsNull(location(), newChild); + return new IsNull(source(), newChild); } @Override @@ -48,8 +49,8 @@ public String processScript(String script) { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } @Override @@ -59,6 +60,6 @@ public DataType dataType() { @Override public UnaryScalarFunction negate() { - return new IsNotNull(location(), field()); + return new IsNotNull(source(), field()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Add.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Add.java index 00140a1247cd5..85b468b9a5b30 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Add.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Add.java @@ -7,15 +7,15 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** * Addition function ({@code a + b}). */ public class Add extends DateTimeArithmeticOperation { - public Add(Location location, Expression left, Expression right) { - super(location, left, right, BinaryArithmeticOperation.ADD); + public Add(Source source, Expression left, Expression right) { + super(source, left, right, BinaryArithmeticOperation.ADD); } @Override @@ -25,6 +25,6 @@ protected NodeInfo info() { @Override protected Add replaceChildren(Expression left, Expression right) { - return new Add(location(), left, right); + return new Add(source(), left, right); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/ArithmeticOperation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/ArithmeticOperation.java index e4bbc79650571..d1d28e3683863 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/ArithmeticOperation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/ArithmeticOperation.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; @@ -18,8 +18,8 @@ public abstract class ArithmeticOperation extends BinaryOperator info() { @Override protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryArithmeticPipe(location(), expression(), left, right, operation); + return new BinaryArithmeticPipe(source(), expression(), left, right, operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java index 1b7afa203077d..a0fd57e30d0ca 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java @@ -76,7 +76,7 @@ public enum BinaryArithmeticOperation implements PredicateBiFunction) { - throw new SqlIllegalArgumentException("Cannot substract a date from an interval; do you mean the reverse?"); + throw new SqlIllegalArgumentException("Cannot subtract a date from an interval; do you mean the reverse?"); } throw new SqlIllegalArgumentException("Cannot compute [-] between [{}] [{}]", l.getClass().getSimpleName(), diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 15be13e1e7533..da42ffe523b64 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -17,8 +17,8 @@ abstract class DateTimeArithmeticOperation extends ArithmeticOperation { - DateTimeArithmeticOperation(Location location, Expression left, Expression right, BinaryArithmeticOperation operation) { - super(location, left, right, operation); + DateTimeArithmeticOperation(Source source, Expression left, Expression right, BinaryArithmeticOperation operation) { + super(source, left, right, operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Div.java index 643286b1ae40b..c0431f268ba56 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Div.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Div.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; @@ -17,8 +17,8 @@ */ public class Div extends ArithmeticOperation { - public Div(Location location, Expression left, Expression right) { - super(location, left, right, BinaryArithmeticOperation.DIV); + public Div(Source source, Expression left, Expression right) { + super(source, left, right, BinaryArithmeticOperation.DIV); } @Override @@ -28,7 +28,7 @@ protected NodeInfo
info() { @Override protected Div replaceChildren(Expression newLeft, Expression newRight) { - return new Div(location(), newLeft, newRight); + return new Div(source(), newLeft, newRight); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mod.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mod.java index 95485281acdc2..907ef1dab4f19 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mod.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mod.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -18,8 +18,8 @@ */ public class Mod extends ArithmeticOperation { - public Mod(Location location, Expression left, Expression right) { - super(location, left, right, BinaryArithmeticOperation.MOD); + public Mod(Source source, Expression left, Expression right) { + super(source, left, right, BinaryArithmeticOperation.MOD); } @Override @@ -29,6 +29,6 @@ protected NodeInfo info() { @Override protected Mod replaceChildren(Expression newLeft, Expression newRight) { - return new Mod(location(), newLeft, newRight); + return new Mod(source(), newLeft, newRight); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java index 40e4bdfaaed3b..7a09bbedebfa3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -21,8 +21,8 @@ public class Mul extends ArithmeticOperation { private DataType dataType; - public Mul(Location location, Expression left, Expression right) { - super(location, left, right, BinaryArithmeticOperation.MUL); + public Mul(Source source, Expression left, Expression right) { + super(source, left, right, BinaryArithmeticOperation.MUL); } @Override @@ -65,6 +65,6 @@ protected NodeInfo info() { @Override protected Mul replaceChildren(Expression newLeft, Expression newRight) { - return new Mul(location(), newLeft, newRight); + return new Mul(source(), newLeft, newRight); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Neg.java index c297604fb2390..265a1eb993c94 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Neg.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.gen.script.Scripts; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.UnaryArithmeticProcessor.UnaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -22,8 +22,8 @@ */ public class Neg extends UnaryScalarFunction { - public Neg(Location location, Expression field) { - super(location, field); + public Neg(Source source, Expression field) { + super(source, field); } @Override @@ -33,7 +33,7 @@ protected NodeInfo info() { @Override protected Neg replaceChild(Expression newChild) { - return new Neg(location(), newChild); + return new Neg(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java index 6eda503bb6618..32acfa8ed685d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +15,8 @@ */ public class Sub extends DateTimeArithmeticOperation { - public Sub(Location location, Expression left, Expression right) { - super(location, left, right, BinaryArithmeticOperation.SUB); + public Sub(Source source, Expression left, Expression right) { + super(source, left, right, BinaryArithmeticOperation.SUB); } @Override @@ -26,6 +26,6 @@ protected NodeInfo info() { @Override protected Sub replaceChildren(Expression newLeft, Expression newRight) { - return new Sub(location(), newLeft, newRight); + return new Sub(source(), newLeft, newRight); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparison.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparison.java index b8c21c1448acc..815fa10b12fd1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparison.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparison.java @@ -10,14 +10,14 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; // marker class to indicate operations that rely on values public abstract class BinaryComparison extends BinaryOperator { - protected BinaryComparison(Location location, Expression left, Expression right, BinaryComparisonOperation operation) { - super(location, left, right, operation); + protected BinaryComparison(Source source, Expression left, Expression right, BinaryComparisonOperation operation) { + super(source, left, right, operation); } @Override @@ -37,7 +37,7 @@ public DataType dataType() { @Override protected Pipe makePipe() { - return new BinaryComparisonPipe(location(), this, Expressions.pipe(left()), Expressions.pipe(right()), function()); + return new BinaryComparisonPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), function()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonPipe.java index 84c9221fb4762..0ffebadff9e53 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonPipe.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Objects; @@ -18,8 +18,8 @@ public class BinaryComparisonPipe extends BinaryPipe { private final BinaryComparisonOperation operation; - public BinaryComparisonPipe(Location location, Expression expression, Pipe left, Pipe right, BinaryComparisonOperation operation) { - super(location, expression, left, right); + public BinaryComparisonPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryComparisonOperation operation) { + super(source, expression, left, right); this.operation = operation; } @@ -30,7 +30,7 @@ protected NodeInfo info() { @Override protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryComparisonPipe(location(), expression(), left, right, operation); + return new BinaryComparisonPipe(source(), expression(), left, right, operation); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Equals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Equals.java index 21032a253e857..a8d609ac9020d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Equals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/Equals.java @@ -8,13 +8,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Equals extends BinaryComparison implements Negatable { - public Equals(Location location, Expression left, Expression right) { - super(location, left, right, BinaryComparisonOperation.EQ); + public Equals(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonOperation.EQ); } @Override @@ -24,16 +24,16 @@ protected NodeInfo info() { @Override protected Equals replaceChildren(Expression newLeft, Expression newRight) { - return new Equals(location(), newLeft, newRight); + return new Equals(source(), newLeft, newRight); } @Override public Equals swapLeftAndRight() { - return new Equals(location(), right(), left()); + return new Equals(source(), right(), left()); } @Override public BinaryComparison negate() { - return new NotEquals(location(), left(), right()); + return new NotEquals(source(), left(), right()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThan.java index 743f539fd063f..9388a26fe886f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThan.java @@ -8,13 +8,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class GreaterThan extends BinaryComparison implements Negatable { - public GreaterThan(Location location, Expression left, Expression right) { - super(location, left, right, BinaryComparisonOperation.GT); + public GreaterThan(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonOperation.GT); } @Override @@ -24,16 +24,16 @@ protected NodeInfo info() { @Override protected GreaterThan replaceChildren(Expression newLeft, Expression newRight) { - return new GreaterThan(location(), newLeft, newRight); + return new GreaterThan(source(), newLeft, newRight); } @Override public LessThan swapLeftAndRight() { - return new LessThan(location(), right(), left()); + return new LessThan(source(), right(), left()); } @Override public LessThanOrEqual negate() { - return new LessThanOrEqual(location(), left(), right()); + return new LessThanOrEqual(source(), left(), right()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThanOrEqual.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThanOrEqual.java index 508cdb8d8d4f2..51b5e926bef7d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThanOrEqual.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/GreaterThanOrEqual.java @@ -8,13 +8,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class GreaterThanOrEqual extends BinaryComparison implements Negatable { - public GreaterThanOrEqual(Location location, Expression left, Expression right) { - super(location, left, right, BinaryComparisonOperation.GTE); + public GreaterThanOrEqual(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonOperation.GTE); } @Override @@ -24,16 +24,16 @@ protected NodeInfo info() { @Override protected GreaterThanOrEqual replaceChildren(Expression newLeft, Expression newRight) { - return new GreaterThanOrEqual(location(), newLeft, newRight); + return new GreaterThanOrEqual(source(), newLeft, newRight); } @Override public LessThanOrEqual swapLeftAndRight() { - return new LessThanOrEqual(location(), right(), left()); + return new LessThanOrEqual(source(), right(), left()); } @Override public LessThan negate() { - return new LessThan(location(), left(), right()); + return new LessThan(source(), left(), right()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/In.java index 329dc307da8ff..f9e6b72fb5958 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/In.java @@ -8,22 +8,23 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Foldables; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.CollectionUtils; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; -import java.util.Locale; import java.util.Objects; import java.util.StringJoiner; import java.util.stream.Collectors; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public class In extends ScalarFunction { @@ -31,8 +32,8 @@ public class In extends ScalarFunction { private final Expression value; private final List list; - public In(Location location, Expression value, List list) { - super(location, CollectionUtils.combine(list, value)); + public In(Source source, Expression value, List list) { + super(source, CollectionUtils.combine(list, value)); this.value = value; this.list = new ArrayList<>(new LinkedHashSet<>(list)); } @@ -47,7 +48,7 @@ public Expression replaceChildren(List newChildren) { if (newChildren.size() < 2) { throw new IllegalArgumentException("expected at least [2] children but received [" + newChildren.size() + "]"); } - return new In(location(), newChildren.get(newChildren.size() - 1), newChildren.subList(0, newChildren.size() - 1)); + return new In(source(), newChildren.get(newChildren.size() - 1), newChildren.subList(0, newChildren.size() - 1)); } public Expression value() { @@ -64,8 +65,8 @@ public DataType dataType() { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.UNKNOWN; } @Override @@ -99,7 +100,7 @@ public ScriptTemplate asScript() { List values = new ArrayList<>(new LinkedHashSet<>(Foldables.valuesOf(list, value.dataType()))); return new ScriptTemplate( - formatTemplate(String.format(Locale.ROOT, "{sql}.in(%s, {})", leftScript.template())), + formatTemplate(format("{sql}.","in({}, {})", leftScript.template())), paramsBuilder() .script(leftScript.params()) .variable(values) @@ -109,7 +110,7 @@ public ScriptTemplate asScript() { @Override protected Pipe makePipe() { - return new InPipe(location(), this, children().stream().map(Expressions::pipe).collect(Collectors.toList())); + return new InPipe(source(), this, children().stream().map(Expressions::pipe).collect(Collectors.toList())); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InPipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InPipe.java index 4ae72b4b49e7a..77c6d2bdc7670 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InPipe.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InPipe.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.ArrayList; @@ -22,8 +22,8 @@ public class InPipe extends Pipe { private List pipes; - public InPipe(Location location, Expression expression, List pipes) { - super(location, expression, pipes); + public InPipe(Source source, Expression expression, List pipes) { + super(source, expression, pipes); this.pipes = pipes; } @@ -32,7 +32,7 @@ public final Pipe replaceChildren(List newChildren) { if (newChildren.size() < 2) { throw new IllegalArgumentException("expected at least [2] children but received [" + newChildren.size() + "]"); } - return new InPipe(location(), expression(), newChildren); + return new InPipe(source(), expression(), newChildren); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThan.java index d887ca72b4eed..d1d7b414ea5fe 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThan.java @@ -8,13 +8,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class LessThan extends BinaryComparison implements Negatable { - public LessThan(Location location, Expression left, Expression right) { - super(location, left, right, BinaryComparisonOperation.LT); + public LessThan(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonOperation.LT); } @Override @@ -24,16 +24,16 @@ protected NodeInfo info() { @Override protected LessThan replaceChildren(Expression newLeft, Expression newRight) { - return new LessThan(location(), newLeft, newRight); + return new LessThan(source(), newLeft, newRight); } @Override public GreaterThan swapLeftAndRight() { - return new GreaterThan(location(), right(), left()); + return new GreaterThan(source(), right(), left()); } @Override public GreaterThanOrEqual negate() { - return new GreaterThanOrEqual(location(), left(), right()); + return new GreaterThanOrEqual(source(), left(), right()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThanOrEqual.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThanOrEqual.java index 3656c1f4b16cb..00f4f01937eaa 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThanOrEqual.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/LessThanOrEqual.java @@ -8,13 +8,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class LessThanOrEqual extends BinaryComparison implements Negatable { - public LessThanOrEqual(Location location, Expression left, Expression right) { - super(location, left, right, BinaryComparisonOperation.LTE); + public LessThanOrEqual(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonOperation.LTE); } @Override @@ -24,16 +24,16 @@ protected NodeInfo info() { @Override protected LessThanOrEqual replaceChildren(Expression newLeft, Expression newRight) { - return new LessThanOrEqual(location(), newLeft, newRight); + return new LessThanOrEqual(source(), newLeft, newRight); } @Override public GreaterThanOrEqual swapLeftAndRight() { - return new GreaterThanOrEqual(location(), right(), left()); + return new GreaterThanOrEqual(source(), right(), left()); } @Override public GreaterThan negate() { - return new GreaterThan(location(), left(), right()); + return new GreaterThan(source(), left(), right()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/NotEquals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/NotEquals.java index 78830f8d20ab8..8d88e851dce86 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/NotEquals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/NotEquals.java @@ -8,13 +8,13 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.Negatable; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class NotEquals extends BinaryComparison implements Negatable { - public NotEquals(Location location, Expression left, Expression right) { - super(location, left, right, BinaryComparisonOperation.NEQ); + public NotEquals(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonOperation.NEQ); } @Override @@ -24,16 +24,16 @@ protected NodeInfo info() { @Override protected NotEquals replaceChildren(Expression newLeft, Expression newRight) { - return new NotEquals(location(), newLeft, newRight); + return new NotEquals(source(), newLeft, newRight); } @Override public NotEquals swapLeftAndRight() { - return new NotEquals(location(), right(), left()); + return new NotEquals(source(), right(), left()); } @Override public BinaryComparison negate() { - return new Equals(location(), left(), right()); + return new Equals(source(), left(), right()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/NullEquals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/NullEquals.java index ce2728267508a..70479e2d704e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/NullEquals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/NullEquals.java @@ -6,8 +6,9 @@ package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -15,8 +16,8 @@ */ public class NullEquals extends BinaryComparison { - public NullEquals(Location location, Expression left, Expression right) { - super(location, left, right, BinaryComparisonOperation.NULLEQ); + public NullEquals(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonOperation.NULLEQ); } @Override @@ -26,16 +27,16 @@ protected NodeInfo info() { @Override protected NullEquals replaceChildren(Expression newLeft, Expression newRight) { - return new NullEquals(location(), newLeft, newRight); + return new NullEquals(source(), newLeft, newRight); } @Override public NullEquals swapLeftAndRight() { - return new NullEquals(location(), right(), left()); + return new NullEquals(source(), right(), left()); } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java index 9dc3c69fd2971..72c486cf65eba 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java @@ -6,15 +6,15 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Like extends RegexMatch { private final LikePattern pattern; - public Like(Location location, Expression left, LikePattern pattern) { - super(location, left, pattern.asJavaRegex()); + public Like(Source source, Expression left, LikePattern pattern) { + super(source, left, pattern.asJavaRegex()); this.pattern = pattern; } @@ -29,6 +29,6 @@ protected NodeInfo info() { @Override protected Like replaceChild(Expression newLeft) { - return new Like(location(), newLeft, pattern); + return new Like(source(), newLeft, pattern); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java index a09586fd35fb4..b925bd769ea4c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java @@ -6,15 +6,15 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class RLike extends RegexMatch { private final String pattern; - public RLike(Location location, Expression left, String pattern) { - super(location, left, pattern); + public RLike(Source source, Expression left, String pattern) { + super(source, left, pattern); this.pattern = pattern; } @@ -29,6 +29,6 @@ protected NodeInfo info() { @Override protected RLike replaceChild(Expression newChild) { - return new RLike(location(), newChild, pattern); + return new RLike(source(), newChild, pattern); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java index f9390fdfa4514..ed65b1fcaf9cb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java @@ -7,18 +7,19 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.predicate.regex.RegexProcessor.RegexOperation; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; public abstract class RegexMatch extends UnaryScalarFunction { private final String pattern; - protected RegexMatch(Location location, Expression value, String pattern) { - super(location, value); + protected RegexMatch(Source source, Expression value, String pattern) { + super(source, value); this.pattern = pattern; } @@ -28,8 +29,11 @@ public DataType dataType() { } @Override - public boolean nullable() { - return field().nullable() && pattern != null; + public Nullability nullable() { + if (pattern == null) { + return Nullability.TRUE; + } + return field().nullable(); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index 3dc894bda3fff..ade69463d1345 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; @@ -44,7 +45,6 @@ import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.conditional.ArbitraryConditionalFunction; import org.elasticsearch.xpack.sql.expression.predicate.conditional.Coalesce; -import org.elasticsearch.xpack.sql.expression.predicate.conditional.NullIf; import org.elasticsearch.xpack.sql.expression.predicate.logical.And; import org.elasticsearch.xpack.sql.expression.predicate.logical.Not; import org.elasticsearch.xpack.sql.expression.predicate.logical.Or; @@ -173,7 +173,7 @@ protected LogicalPlan rule(Aggregate agg) { } ExpressionSet unique = new ExpressionSet<>(groupings); if (unique.size() != groupings.size()) { - return new Aggregate(agg.location(), agg.child(), new ArrayList<>(unique), agg.aggregates()); + return new Aggregate(agg.source(), agg.child(), new ArrayList<>(unique), agg.aggregates()); } return agg; } @@ -206,7 +206,7 @@ protected LogicalPlan rule(Aggregate agg) { for (NamedExpression ne : aggs) { newAggs.add(unique.get(reverse.get(ne))); } - return new Aggregate(agg.location(), agg.child(), agg.groupings(), newAggs); + return new Aggregate(agg.source(), agg.child(), agg.groupings(), newAggs); } return agg; @@ -243,11 +243,11 @@ protected Expression rule(Expression e, Map seen, Map seen, ExtendedStats extendedStats = seen.get(argument); if (extendedStats == null) { - extendedStats = new ExtendedStats(f.location(), argument); + extendedStats = new ExtendedStats(f.source(), argument); seen.put(argument, extendedStats); } @@ -353,7 +353,7 @@ private Expression collect(Expression e, Map seen) { Match match = seen.get(argument); if (match == null) { - match = new Match(new Stats(f.location(), argument)); + match = new Match(new Stats(f.source(), argument)); match.functionTypes.add(f.getClass()); seen.put(argument, match); } @@ -519,7 +519,7 @@ public LogicalPlan apply(LogicalPlan p) { Map percentilesPerField = new LinkedHashMap<>(); // create a Percentile agg for each field (and its associated percents) percentsPerField.forEach((k, v) -> { - percentilesPerField.put(k, new Percentiles(v.iterator().next().location(), k, new ArrayList<>(v))); + percentilesPerField.put(k, new Percentiles(v.iterator().next().source(), k, new ArrayList<>(v))); }); // now replace the agg with pointer to the main ones @@ -577,7 +577,7 @@ public LogicalPlan apply(LogicalPlan p) { Map ranksPerField = new LinkedHashMap<>(); // create a PercentileRanks agg for each field (and its associated values) valuesPerField.forEach((k, v) -> { - ranksPerField.put(k, new PercentileRanks(v.iterator().next().location(), k, new ArrayList<>(v))); + ranksPerField.put(k, new PercentileRanks(v.iterator().next().source(), k, new ArrayList<>(v))); }); // now replace the agg with pointer to the main ones @@ -633,12 +633,12 @@ protected LogicalPlan rule(Filter filter) { return filter.child(); } if (FALSE.equals(condition) || Expressions.isNull(condition)) { - return new LocalRelation(filter.location(), new EmptyExecutable(filter.output())); + return new LocalRelation(filter.source(), new EmptyExecutable(filter.output())); } } if (!condition.equals(filter.condition())) { - return new Filter(filter.location(), filter.child(), condition); + return new Filter(filter.source(), filter.child(), condition); } return filter; } @@ -681,7 +681,7 @@ protected LogicalPlan rule(Filter filter) { }, AggregateFunctionAttribute.class); if (newCondition != cond) { - return new Filter(filter.location(), filter.child(), newCondition); + return new Filter(filter.source(), filter.child(), newCondition); } } return filter; @@ -762,12 +762,12 @@ protected LogicalPlan rule(Project project) { // no orders left, eliminate it all-together if (orders.isEmpty()) { - return new Project(project.location(), ob.child(), project.projections()); + return new Project(project.source(), ob.child(), project.projections()); } if (orders.size() != ob.order().size()) { - OrderBy newOrder = new OrderBy(ob.location(), ob.child(), orders); - return new Project(project.location(), newOrder, project.projections()); + OrderBy newOrder = new OrderBy(ob.source(), ob.child(), orders); + return new Project(project.source(), newOrder, project.projections()); } } return project; @@ -801,7 +801,7 @@ protected LogicalPlan rule(OrderBy ob) { return true; }).collect(toList()); - return nonAgg.isEmpty() ? ob.child() : new OrderBy(ob.location(), ob.child(), nonAgg); + return nonAgg.isEmpty() ? ob.child() : new OrderBy(ob.source(), ob.child(), nonAgg); } } return ob; @@ -840,8 +840,8 @@ protected LogicalPlan rule(OrderBy ob) { } if (orderChanged) { - Aggregate newAgg = new Aggregate(a.location(), a.child(), groupings, a.aggregates()); - return new OrderBy(ob.location(), newAgg, ob.order()); + Aggregate newAgg = new Aggregate(a.source(), a.child(), groupings, a.aggregates()); + return new OrderBy(ob.source(), newAgg, ob.order()); } } return ob; @@ -880,7 +880,7 @@ protected LogicalPlan rule(LogicalPlan plan) { Cast c = (Cast) as.child(); if (c.from() == c.to()) { - Alias newAs = new Alias(as.location(), as.name(), as.qualifier(), c.field(), as.id(), as.synthetic()); + Alias newAs = new Alias(as.source(), as.name(), as.qualifier(), c.field(), as.id(), as.synthetic()); replacedCast.put(as.toAttribute(), newAs.toAttribute()); return newAs; } @@ -925,7 +925,7 @@ protected LogicalPlan rule(LogicalPlan plan) { } } - return changed ? new Project(p.location(), p.child(), newProjections) : p; + return changed ? new Project(p.source(), p.child(), newProjections) : p; }, Project.class); } @@ -974,11 +974,11 @@ protected LogicalPlan rule(Project project) { if (child instanceof Project) { Project p = (Project) child; // eliminate lower project but first replace the aliases in the upper one - return new Project(p.location(), p.child(), combineProjections(project.projections(), p.projections())); + return new Project(p.source(), p.child(), combineProjections(project.projections(), p.projections())); } if (child instanceof Aggregate) { Aggregate a = (Aggregate) child; - return new Aggregate(a.location(), a.child(), a.groupings(), combineProjections(project.projections(), a.aggregates())); + return new Aggregate(a.source(), a.child(), a.groupings(), combineProjections(project.projections(), a.aggregates())); } return project; @@ -1097,13 +1097,13 @@ static class FoldNull extends OptimizerExpressionRule { @Override protected Expression rule(Expression e) { if (e instanceof IsNotNull) { - if (((IsNotNull) e).field().nullable() == false) { - return new Literal(e.location(), Expressions.name(e), Boolean.TRUE, DataType.BOOLEAN); + if (((IsNotNull) e).field().nullable() == Nullability.FALSE) { + return new Literal(e.source(), Expressions.name(e), Boolean.TRUE, DataType.BOOLEAN); } } else if (e instanceof IsNull) { - if (((IsNull) e).field().nullable() == false) { - return new Literal(e.location(), Expressions.name(e), Boolean.FALSE, DataType.BOOLEAN); + if (((IsNull) e).field().nullable() == Nullability.FALSE) { + return new Literal(e.source(), Expressions.name(e), Boolean.FALSE, DataType.BOOLEAN); } } else if (e instanceof In) { @@ -1112,10 +1112,7 @@ protected Expression rule(Expression e) { return Literal.of(in, null); } - } else if (e instanceof NullIf) { - return e; - - } else if (e.nullable() && Expressions.anyMatch(e.children(), Expressions::isNull)) { + } else if (e.nullable() == Nullability.TRUE && Expressions.anyMatch(e.children(), Expressions::isNull)) { return Literal.of(e, null); } @@ -1230,7 +1227,7 @@ private Expression simplifyAndOr(BinaryPredicate bc) { // (a || b || c || ... ) && (a || b || d || ... ) => ((c || ...) && (d || ...)) || a || b Expression combineLeft = combineOr(lDiff); Expression combineRight = combineOr(rDiff); - return combineOr(combine(common, new And(combineLeft.location(), combineLeft, combineRight))); + return combineOr(combine(common, new And(combineLeft.source(), combineLeft, combineRight))); } if (bc instanceof Or) { @@ -1268,7 +1265,7 @@ private Expression simplifyAndOr(BinaryPredicate bc) { // (a || b || c || ... ) && (a || b || d || ... ) => ((c || ...) && (d || ...)) || a || b Expression combineLeft = combineAnd(lDiff); Expression combineRight = combineAnd(rDiff); - return combineAnd(combine(common, new Or(combineLeft.location(), combineLeft, combineRight))); + return combineAnd(combine(common, new Or(combineLeft.source(), combineLeft, combineRight))); } // TODO: eliminate conjunction/disjunction @@ -1314,7 +1311,7 @@ private Expression simplify(BinaryComparison bc) { // true for equality if (bc instanceof Equals || bc instanceof GreaterThanOrEqual || bc instanceof LessThanOrEqual) { - if (!l.nullable() && !r.nullable() && l.semanticEquals(r)) { + if (l.nullable() == Nullability.FALSE && r.nullable() == Nullability.FALSE && l.semanticEquals(r)) { return TRUE; } } @@ -1323,13 +1320,13 @@ private Expression simplify(BinaryComparison bc) { return TRUE; } if (Expressions.isNull(r)) { - return new IsNull(bc.location(), l); + return new IsNull(bc.source(), l); } } // false for equality if (bc instanceof NotEquals || bc instanceof GreaterThan || bc instanceof LessThan) { - if (!l.nullable() && !r.nullable() && l.semanticEquals(r)) { + if (l.nullable() == Nullability.FALSE && r.nullable() == Nullability.FALSE && l.semanticEquals(r)) { return FALSE; } } @@ -1524,7 +1521,7 @@ private Expression combine(And and) { bcs.remove(j); bcs.remove(i); - ranges.add(new Range(and.location(), main.left(), + ranges.add(new Range(and.source(), main.left(), main.right(), main instanceof GreaterThanOrEqual, other.right(), other instanceof LessThanOrEqual)); @@ -1536,7 +1533,7 @@ else if ((other instanceof GreaterThan || other instanceof GreaterThanOrEqual) bcs.remove(j); bcs.remove(i); - ranges.add(new Range(and.location(), main.left(), + ranges.add(new Range(and.source(), main.left(), other.right(), other instanceof GreaterThanOrEqual, main.right(), main instanceof LessThanOrEqual)); @@ -1659,7 +1656,7 @@ private static boolean findExistingRange(Range main, List ranges, boolean if (lower || upper) { ranges.remove(i); ranges.add(i, - new Range(main.location(), main.value(), + new Range(main.source(), main.value(), lower ? main.lower() : other.lower(), lower ? main.includeLower() : other.includeLower(), upper ? main.upper() : other.upper(), @@ -1675,7 +1672,7 @@ private static boolean findExistingRange(Range main, List ranges, boolean if (lower && upper) { ranges.remove(i); ranges.add(i, - new Range(main.location(), main.value(), + new Range(main.source(), main.value(), lower ? main.lower() : other.lower(), lower ? main.includeLower() : other.includeLower(), upper ? main.upper() : other.upper(), @@ -1712,7 +1709,7 @@ private boolean findConjunctiveComparisonInRange(BinaryComparison main, List { protected LogicalPlan rule(Limit limit) { if (limit.limit() instanceof Literal) { if (Integer.valueOf(0).equals((((Literal) limit.limit()).fold()))) { - return new LocalRelation(limit.location(), new EmptyExecutable(limit.output())); + return new LocalRelation(limit.source(), new EmptyExecutable(limit.output())); } } return limit; @@ -1852,14 +1849,14 @@ protected LogicalPlan rule(LogicalPlan plan) { List values = extractConstants(p.projections()); if (values.size() == p.projections().size() && !(p.child() instanceof EsRelation) && isNotQueryWithFromClauseAndFilterFoldedToFalse(p)) { - return new LocalRelation(p.location(), new SingletonExecutable(p.output(), values.toArray())); + return new LocalRelation(p.source(), new SingletonExecutable(p.output(), values.toArray())); } } if (plan instanceof Aggregate) { Aggregate a = (Aggregate) plan; List values = extractConstants(a.aggregates()); if (values.size() == a.aggregates().size() && isNotQueryWithFromClauseAndFilterFoldedToFalse(a)) { - return new LocalRelation(a.location(), new SingletonExecutable(a.output(), values.toArray())); + return new LocalRelation(a.source(), new SingletonExecutable(a.output(), values.toArray())); } } return plan; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java index 67721b35d7e29..1d6c264321787 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java @@ -12,6 +12,7 @@ import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.util.Check; import java.util.ArrayList; @@ -64,32 +65,31 @@ protected List visitList(List contexts, Clas return results; } - static Location source(ParseTree ctx) { + static Source source(ParseTree ctx) { if (ctx instanceof ParserRuleContext) { return source((ParserRuleContext) ctx); } - return Location.EMPTY; + return Source.EMPTY; } - static Location source(TerminalNode terminalNode) { + static Source source(TerminalNode terminalNode) { Check.notNull(terminalNode, "terminalNode is null"); return source(terminalNode.getSymbol()); } - static Location source(ParserRuleContext parserRuleContext) { + static Source source(ParserRuleContext parserRuleContext) { Check.notNull(parserRuleContext, "parserRuleContext is null"); - return source(parserRuleContext.getStart()); + Token start = parserRuleContext.start; + Token stop = parserRuleContext.stop != null ? parserRuleContext.stop : start; + Interval interval = new Interval(start.getStartIndex(), stop.getStopIndex()); + String text = start.getInputStream().getText(interval); + return new Source(new Location(start.getLine(), start.getCharPositionInLine()), text); } - static Location source(Token token) { + static Source source(Token token) { Check.notNull(token, "token is null"); - return new Location(token.getLine(), token.getCharPositionInLine()); - } - - static String text(ParserRuleContext parserRuleContext) { - Check.notNull(parserRuleContext, "parserRuleContext is null"); - Interval interval = new Interval(parserRuleContext.start.getStartIndex(), parserRuleContext.stop.getStopIndex()); - return parserRuleContext.start.getInputStream().getText(interval); + String text = token.getInputStream().getText(new Interval(token.getStartIndex(), token.getStopIndex())); + return new Source(new Location(token.getLine(), token.getCharPositionInLine()), text); } /** @@ -113,6 +113,7 @@ static String unquoteString(String text) { @Override public Object visitTerminal(TerminalNode node) { - throw new ParsingException(source(node), "Does not know how to handle {}", node.getText()); + Source source = source(node); + throw new ParsingException(source, "Does not know how to handle {}", source.text()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java index e0cd8cf80d0cf..532cef01f555e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java @@ -35,7 +35,7 @@ import org.elasticsearch.xpack.sql.plan.logical.command.sys.SysTables; import org.elasticsearch.xpack.sql.plan.logical.command.sys.SysTypes; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.ArrayList; @@ -52,12 +52,12 @@ protected CommandBuilder(Map params) { @Override public Command visitDebug(DebugContext ctx) { - Location loc = source(ctx); + Source source = source(ctx); if (ctx.FORMAT().size() > 1) { - throw new ParsingException(loc, "Debug FORMAT should be specified at most once"); + throw new ParsingException(source, "Debug FORMAT should be specified at most once"); } if (ctx.PLAN().size() > 1) { - throw new ParsingException(loc, "Debug PLAN should be specified at most once"); + throw new ParsingException(source, "Debug PLAN should be specified at most once"); } Debug.Type type = null; @@ -73,21 +73,21 @@ public Command visitDebug(DebugContext ctx) { boolean graphViz = ctx.format != null && ctx.format.getType() == SqlBaseLexer.GRAPHVIZ; Debug.Format format = graphViz ? Debug.Format.GRAPHVIZ : Debug.Format.TEXT; - return new Debug(loc, plan(ctx.statement()), type, format); + return new Debug(source, plan(ctx.statement()), type, format); } @Override public Command visitExplain(ExplainContext ctx) { - Location loc = source(ctx); + Source source = source(ctx); if (ctx.PLAN().size() > 1) { - throw new ParsingException(loc, "Explain TYPE should be specified at most once"); + throw new ParsingException(source, "Explain TYPE should be specified at most once"); } if (ctx.FORMAT().size() > 1) { - throw new ParsingException(loc, "Explain FORMAT should be specified at most once"); + throw new ParsingException(source, "Explain FORMAT should be specified at most once"); } if (ctx.VERIFY().size() > 1) { - throw new ParsingException(loc, "Explain VERIFY should be specified at most once"); + throw new ParsingException(source, "Explain VERIFY should be specified at most once"); } Explain.Type type = null; @@ -117,7 +117,7 @@ public Command visitExplain(ExplainContext ctx) { Explain.Format format = graphViz ? Explain.Format.GRAPHVIZ : Explain.Format.TEXT; boolean verify = (ctx.verify != null ? Booleans.parseBoolean(ctx.verify.getText().toLowerCase(Locale.ROOT), true) : true); - return new Explain(loc, plan(ctx.statement()), type, format, verify); + return new Explain(source, plan(ctx.statement()), type, format, verify); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index a75ad78521f7f..e103976760fb3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -106,7 +106,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimestampEscapedLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ValueExpressionDefaultContext; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -183,25 +183,25 @@ public Expression visitComparison(ComparisonContext ctx) { Expression right = expression(ctx.right); TerminalNode op = (TerminalNode) ctx.comparisonOperator().getChild(0); - Location loc = source(ctx); + Source source = source(ctx); switch (op.getSymbol().getType()) { case SqlBaseParser.EQ: - return new Equals(loc, left, right); + return new Equals(source, left, right); case SqlBaseParser.NULLEQ: - return new NullEquals(loc, left, right); + return new NullEquals(source, left, right); case SqlBaseParser.NEQ: - return new NotEquals(loc, left, right); + return new NotEquals(source, left, right); case SqlBaseParser.LT: - return new LessThan(loc, left, right); + return new LessThan(source, left, right); case SqlBaseParser.LTE: - return new LessThanOrEqual(loc, left, right); + return new LessThanOrEqual(source, left, right); case SqlBaseParser.GT: - return new GreaterThan(loc, left, right); + return new GreaterThan(source, left, right); case SqlBaseParser.GTE: - return new GreaterThanOrEqual(loc, left, right); + return new GreaterThanOrEqual(source, left, right); default: - throw new ParsingException(loc, "Unknown operator {}", op.getSymbol().getText()); + throw new ParsingException(source, "Unknown operator {}", source.text()); } } @@ -215,37 +215,37 @@ public Expression visitPredicated(PredicatedContext ctx) { } PredicateContext pCtx = ctx.predicate(); - Location loc = source(pCtx); + Source source = source(pCtx); Expression e = null; switch (pCtx.kind.getType()) { case SqlBaseParser.BETWEEN: - e = new Range(loc, exp, expression(pCtx.lower), true, expression(pCtx.upper), true); + e = new Range(source, exp, expression(pCtx.lower), true, expression(pCtx.upper), true); break; case SqlBaseParser.IN: if (pCtx.query() != null) { - throw new ParsingException(loc, "IN query not supported yet"); + throw new ParsingException(source, "IN query not supported yet"); } - e = new In(loc, exp, expressions(pCtx.valueExpression())); + e = new In(source, exp, expressions(pCtx.valueExpression())); break; case SqlBaseParser.LIKE: - e = new Like(loc, exp, visitPattern(pCtx.pattern())); + e = new Like(source, exp, visitPattern(pCtx.pattern())); break; case SqlBaseParser.RLIKE: - e = new RLike(loc, exp, string(pCtx.regex)); + e = new RLike(source, exp, string(pCtx.regex)); break; case SqlBaseParser.NULL: // shortcut to avoid double negation later on (since there's no IsNull (missing in ES is a negated exists)) if (pCtx.NOT() != null) { - return new IsNotNull(loc, exp); + return new IsNotNull(source, exp); } else { - return new IsNull(loc, exp); + return new IsNull(source, exp); } default: - throw new ParsingException(loc, "Unknown predicate {}", pCtx.kind.getText()); + throw new ParsingException(source, "Unknown predicate {}", source.text()); } - return pCtx.NOT() != null ? new Not(loc, e) : e; + return pCtx.NOT() != null ? new Not(source, e) : e; } @Override @@ -311,7 +311,7 @@ public LikePattern visitPattern(PatternContext ctx) { @Override public Object visitArithmeticUnary(ArithmeticUnaryContext ctx) { Expression value = expression(ctx.valueExpression()); - Location loc = source(ctx); + Source source = source(ctx); switch (ctx.operator.getType()) { case SqlBaseParser.PLUS: @@ -322,7 +322,7 @@ public Object visitArithmeticUnary(ArithmeticUnaryContext ctx) { } return new Neg(source(ctx.operator), value); default: - throw new ParsingException(loc, "Unknown arithmetic {}", ctx.operator.getText()); + throw new ParsingException(source, "Unknown arithmetic {}", source.text()); } } @@ -331,21 +331,21 @@ public Object visitArithmeticBinary(ArithmeticBinaryContext ctx) { Expression left = expression(ctx.left); Expression right = expression(ctx.right); - Location loc = source(ctx.operator); + Source source = source(ctx.operator); switch (ctx.operator.getType()) { case SqlBaseParser.ASTERISK: - return new Mul(loc, left, right); + return new Mul(source, left, right); case SqlBaseParser.SLASH: - return new Div(loc, left, right); + return new Div(source, left, right); case SqlBaseParser.PERCENT: - return new Mod(loc, left, right); + return new Mod(source, left, right); case SqlBaseParser.PLUS: - return new Add(loc, left, right); + return new Add(source, left, right); case SqlBaseParser.MINUS: - return new Sub(loc, left, right); + return new Sub(source, left, right); default: - throw new ParsingException(loc, "Unknown arithmetic {}", ctx.operator.getText()); + throw new ParsingException(source, "Unknown arithmetic {}", source.text()); } } @@ -464,12 +464,12 @@ public Function visitExtractExpression(ExtractExpressionContext ctx) { public Object visitBuiltinDateTimeFunction(BuiltinDateTimeFunctionContext ctx) { // maps current_XXX to their respective functions // since the functions need access to the Configuration, the parser only registers the definition and not the actual function - Location source = source(ctx); + Source source = source(ctx); Literal p = null; if (ctx.precision != null) { try { - Location pSource = source(ctx.precision); + Source pSource = source(ctx.precision); short safeShort = DataTypeConversion.safeToShort(StringUtils.parseLong(ctx.precision.getText())); if (safeShort > 9 || safeShort < 0) { throw new ParsingException(pSource, "Precision needs to be between [0-9], received [{}]", safeShort); @@ -523,17 +523,17 @@ public Object visitLogicalNot(LogicalNotContext ctx) { @Override public Object visitLogicalBinary(LogicalBinaryContext ctx) { int type = ctx.operator.getType(); - Location loc = source(ctx); + Source source = source(ctx); Expression left = expression(ctx.left); Expression right = expression(ctx.right); if (type == SqlBaseParser.AND) { - return new And(loc, left, right); + return new And(source, left, right); } if (type == SqlBaseParser.OR) { - return new Or(loc, left, right); + return new Or(source, left, right); } - throw new ParsingException(loc, "Don't know how to parse {}", ctx); + throw new ParsingException(source, "Don't know how to parse {}", ctx); } @@ -629,8 +629,8 @@ private TemporalAmount of(NumberContext valueNumeric, TimeUnit unit) { private TemporalAmount of(StringContext valuePattern, boolean negative, DataType intervalType) { String valueString = string(valuePattern); - Location loc = source(valuePattern); - TemporalAmount interval = Intervals.parseInterval(loc, valueString, intervalType); + Source source = source(valuePattern); + TemporalAmount interval = Intervals.parseInterval(source, valueString, intervalType); if (negative) { interval = Intervals.negate(interval); } @@ -722,27 +722,27 @@ public Literal visitIntegerLiteral(IntegerLiteralContext ctx) { public Literal visitParamLiteral(ParamLiteralContext ctx) { SqlTypedParamValue param = param(ctx.PARAM()); DataType dataType = DataType.fromTypeName(param.type); - Location loc = source(ctx); + Source source = source(ctx); if (param.value == null) { // no conversion is required for null values - return new Literal(loc, null, dataType); + return new Literal(source, null, dataType); } final DataType sourceType; try { sourceType = DataTypes.fromJava(param.value); } catch (SqlIllegalArgumentException ex) { - throw new ParsingException(ex, loc, "Unexpected actual parameter type [{}] for type [{}]", param.value.getClass().getName(), + throw new ParsingException(ex, source, "Unexpected actual parameter type [{}] for type [{}]", param.value.getClass().getName(), param.type); } if (sourceType == dataType) { // no conversion is required if the value is already have correct type - return new Literal(loc, param.value, dataType); + return new Literal(source, param.value, dataType); } // otherwise we need to make sure that xcontent-serialized value is converted to the correct type try { - return new Literal(loc, conversionFor(sourceType, dataType).convert(param.value), dataType); + return new Literal(source, conversionFor(sourceType, dataType).convert(param.value), dataType); } catch (SqlIllegalArgumentException ex) { - throw new ParsingException(ex, loc, "Unexpected actual parameter type [{}] for type [{}]", sourceType, param.type); + throw new ParsingException(ex, source, "Unexpected actual parameter type [{}] for type [{}]", sourceType, param.type); } } @@ -783,28 +783,28 @@ private SqlTypedParamValue param(TerminalNode node) { @Override public Literal visitDateEscapedLiteral(DateEscapedLiteralContext ctx) { String string = string(ctx.string()); - Location loc = source(ctx); + Source source = source(ctx); // parse yyyy-MM-dd DateTime dt = null; try { dt = ISODateTimeFormat.date().parseDateTime(string); } catch(IllegalArgumentException ex) { - throw new ParsingException(loc, "Invalid date received; {}", ex.getMessage()); + throw new ParsingException(source, "Invalid date received; {}", ex.getMessage()); } - return new Literal(loc, DateUtils.of(dt), DataType.DATE); + return new Literal(source, DateUtils.of(dt), DataType.DATE); } @Override public Literal visitTimeEscapedLiteral(TimeEscapedLiteralContext ctx) { String string = string(ctx.string()); - Location loc = source(ctx); + Source source = source(ctx); // parse HH:mm:ss DateTime dt = null; try { dt = ISODateTimeFormat.hourMinuteSecond().parseDateTime(string); } catch (IllegalArgumentException ex) { - throw new ParsingException(loc, "Invalid time received; {}", ex.getMessage()); + throw new ParsingException(source, "Invalid time received; {}", ex.getMessage()); } throw new SqlIllegalArgumentException("Time (only) literals are not supported; a date component is required as well"); @@ -814,7 +814,7 @@ public Literal visitTimeEscapedLiteral(TimeEscapedLiteralContext ctx) { public Literal visitTimestampEscapedLiteral(TimestampEscapedLiteralContext ctx) { String string = string(ctx.string()); - Location loc = source(ctx); + Source source = source(ctx); // parse yyyy-mm-dd hh:mm:ss(.f...) DateTime dt = null; try { @@ -825,16 +825,16 @@ public Literal visitTimestampEscapedLiteral(TimestampEscapedLiteralContext ctx) .toFormatter(); dt = formatter.parseDateTime(string); } catch (IllegalArgumentException ex) { - throw new ParsingException(loc, "Invalid timestamp received; {}", ex.getMessage()); + throw new ParsingException(source, "Invalid timestamp received; {}", ex.getMessage()); } - return new Literal(loc, DateUtils.of(dt), DataType.DATE); + return new Literal(source, DateUtils.of(dt), DataType.DATE); } @Override public Literal visitGuidEscapedLiteral(GuidEscapedLiteralContext ctx) { String string = string(ctx.string()); - Location loc = source(ctx.string()); + Source source = source(ctx.string()); // basic validation String lowerCase = string.toLowerCase(Locale.ROOT); // needs to be format nnnnnnnn-nnnn-nnnn-nnnn-nnnnnnnnnnnn @@ -844,13 +844,13 @@ public Literal visitGuidEscapedLiteral(GuidEscapedLiteralContext ctx) { String errorPrefix = "Invalid GUID, "; if (lowerCase.length() != 36) { - throw new ParsingException(loc, "{}too {}", errorPrefix, lowerCase.length() > 36 ? "long" : "short"); + throw new ParsingException(source, "{}too {}", errorPrefix, lowerCase.length() > 36 ? "long" : "short"); } int[] separatorPos = { 8, 13, 18, 23 }; for (int pos : separatorPos) { if (lowerCase.charAt(pos) != '-') { - throw new ParsingException(loc, "{}expected group separator at offset [{}], found [{}]", + throw new ParsingException(source, "{}expected group separator at offset [{}], found [{}]", errorPrefix, pos, string.charAt(pos)); } } @@ -869,7 +869,7 @@ public Literal visitGuidEscapedLiteral(GuidEscapedLiteralContext ctx) { } } if (inspect && HEXA.indexOf(lowerCase.charAt(i)) < 0) { - throw new ParsingException(loc, "{}expected hexadecimal at offset[{}], found [{}]", errorPrefix, i, string.charAt(i)); + throw new ParsingException(source, "{}expected hexadecimal at offset[{}], found [{}]", errorPrefix, i, string.charAt(i)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java index 2c9e8e314ef72..37adb44a95557 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QualifiedNameContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TableIdentifierContext; import org.elasticsearch.xpack.sql.plan.TableIdentifier; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; abstract class IdentifierBuilder extends AbstractBuilder { @@ -21,7 +21,7 @@ public TableIdentifier visitTableIdentifier(TableIdentifierContext ctx) { return null; } - Location source = source(ctx); + Source source = source(ctx); ParseTree tree = ctx.name != null ? ctx.name : ctx.TABLE_IDENTIFIER(); String index = tree.getText(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index 14654f7e50d10..e4cc65cf50500 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -67,7 +67,7 @@ public LogicalPlan visitQuery(QueryContext ctx) { Map cteRelations = new LinkedHashMap<>(namedQueries.size()); for (SubQueryAlias namedQuery : namedQueries) { if (cteRelations.put(namedQuery.alias(), namedQuery) != null) { - throw new ParsingException(namedQuery.location(), "Duplicate alias {}", namedQuery.alias()); + throw new ParsingException(namedQuery.source(), "Duplicate alias {}", namedQuery.alias()); } } @@ -119,7 +119,7 @@ public LogicalPlan visitQuerySpecification(QuerySpecificationContext ctx) { // SELECT a, b, c ... if (!ctx.selectItem().isEmpty()) { selectTarget = expressions(ctx.selectItem()).stream() - .map(e -> (e instanceof NamedExpression) ? (NamedExpression) e : new UnresolvedAlias(e.location(), e)) + .map(e -> (e instanceof NamedExpression) ? (NamedExpression) e : new UnresolvedAlias(e.source(), e)) .collect(toList()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java index efcce00ab6647..a4a2a8df94089 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ParsingException.java @@ -7,9 +7,9 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.sql.ClientSqlException; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; -import java.util.Locale; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; public class ParsingException extends ClientSqlException { private final int line; @@ -22,19 +22,19 @@ public ParsingException(String message, Exception cause, int line, int charPosit } ParsingException(String message, Object... args) { - this(Location.EMPTY, message, args); + this(Source.EMPTY, message, args); } - public ParsingException(Location nodeLocation, String message, Object... args) { + public ParsingException(Source source, String message, Object... args) { super(message, args); - this.line = nodeLocation.getLineNumber(); - this.charPositionInLine = nodeLocation.getColumnNumber(); + this.line = source.source().getLineNumber(); + this.charPositionInLine = source.source().getColumnNumber(); } - public ParsingException(Exception cause, Location nodeLocation, String message, Object... args) { + public ParsingException(Exception cause, Source source, String message, Object... args) { super(cause, message, args); - this.line = nodeLocation.getLineNumber(); - this.charPositionInLine = nodeLocation.getColumnNumber(); + this.line = source.source().getLineNumber(); + this.charPositionInLine = source.source().getColumnNumber(); } public int getLineNumber() { @@ -56,6 +56,6 @@ public RestStatus status() { @Override public String getMessage() { - return String.format(Locale.ROOT, "line %s:%s: %s", getLineNumber(), getColumnNumber(), getErrorMessage()); + return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java index 232989695c385..e414abef8dae6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.parser; import com.carrotsearch.hppc.ObjectShortHashMap; + import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CommonToken; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/QueryPlan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/QueryPlan.java index e56aa7819fc22..35b85da028341 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/QueryPlan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/QueryPlan.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.AttributeSet; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.Node; import org.elasticsearch.xpack.sql.type.DataType; @@ -28,8 +28,8 @@ public abstract class QueryPlan> extends No private AttributeSet lazyInputSet; - public QueryPlan(Location location, List children) { - super(location, children); + public QueryPlan(Source source, List children) { + super(source, children); } public abstract List output(); @@ -41,7 +41,7 @@ public AttributeSet outputSet() { return lazyOutputSet; } - public AttributeSet intputSet() { + public AttributeSet inputSet() { if (lazyInputSet == null) { List attrs = new ArrayList<>(); for (PlanType child : children()) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/TableIdentifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/TableIdentifier.java index 99913fc1272d7..3cb9b52fd8a9a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/TableIdentifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/TableIdentifier.java @@ -5,19 +5,19 @@ */ package org.elasticsearch.xpack.sql.plan; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Objects; public class TableIdentifier { - private final Location location; + private final Source source; private final String cluster; private final String index; - public TableIdentifier(Location location, String catalog, String index) { - this.location = location; + public TableIdentifier(Source source, String catalog, String index) { + this.source = source; this.cluster = catalog; this.index = index; } @@ -49,8 +49,8 @@ public boolean equals(Object obj) { return Objects.equals(index, other.index) && Objects.equals(cluster, other.cluster); } - public Location location() { - return location; + public Source source() { + return source; } public String qualifiedIndex() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java index b588bff38657e..35d93e3a68c8c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -21,8 +21,8 @@ public class Aggregate extends UnaryPlan { private final List groupings; private final List aggregates; - public Aggregate(Location location, LogicalPlan child, List groupings, List aggregates) { - super(location, child); + public Aggregate(Source source, LogicalPlan child, List groupings, List aggregates) { + super(source, child); this.groupings = groupings; this.aggregates = aggregates; } @@ -34,7 +34,7 @@ protected NodeInfo info() { @Override protected Aggregate replaceChild(LogicalPlan newChild) { - return new Aggregate(location(), newChild, groupings, aggregates); + return new Aggregate(source(), newChild, groupings, aggregates); } public List groupings() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/BinaryPlan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/BinaryPlan.java index daee3a97ee8d7..8763336d3c671 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/BinaryPlan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/BinaryPlan.java @@ -8,14 +8,14 @@ import java.util.Arrays; import java.util.Objects; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; abstract class BinaryPlan extends LogicalPlan { private final LogicalPlan left, right; - BinaryPlan(Location location, LogicalPlan left, LogicalPlan right) { - super(location, Arrays.asList(left, right)); + BinaryPlan(Source source, LogicalPlan left, LogicalPlan right) { + super(source, Arrays.asList(left, right)); this.left = left; this.right = right; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Distinct.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Distinct.java index 63759f944129d..92282045f8a6c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Distinct.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Distinct.java @@ -5,13 +5,13 @@ */ package org.elasticsearch.xpack.sql.plan.logical; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Distinct extends UnaryPlan { - public Distinct(Location location, LogicalPlan child) { - super(location, child); + public Distinct(Source source, LogicalPlan child) { + super(source, child); } @Override @@ -21,7 +21,7 @@ protected NodeInfo info() { @Override protected Distinct replaceChild(LogicalPlan newChild) { - return new Distinct(location(), newChild); + return new Distinct(source(), newChild); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java index 73a953854465b..a90fb751c5e70 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java @@ -8,8 +8,8 @@ import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.FieldAttribute; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.EsField; import java.util.ArrayList; @@ -25,10 +25,10 @@ public class EsRelation extends LeafPlan { private final EsIndex index; private final List attrs; - public EsRelation(Location location, EsIndex index) { - super(location); + public EsRelation(Source source, EsIndex index) { + super(source); this.index = index; - attrs = flatten(location, index.mapping()); + attrs = flatten(source, index.mapping()); } @Override @@ -36,11 +36,11 @@ protected NodeInfo info() { return NodeInfo.create(this, EsRelation::new, index); } - private static List flatten(Location location, Map mapping) { - return flatten(location, mapping, null); + private static List flatten(Source source, Map mapping) { + return flatten(source, mapping, null); } - private static List flatten(Location location, Map mapping, FieldAttribute parent) { + private static List flatten(Source source, Map mapping, FieldAttribute parent) { List list = new ArrayList<>(); for (Entry entry : mapping.entrySet()) { @@ -48,11 +48,11 @@ private static List flatten(Location location, Map m EsField t = entry.getValue(); if (t != null) { - FieldAttribute f = new FieldAttribute(location, parent, parent != null ? parent.name() + "." + name : name, t); + FieldAttribute f = new FieldAttribute(source, parent, parent != null ? parent.name() + "." + name : name, t); list.add(f); // object or nested if (t.getProperties().isEmpty() == false) { - list.addAll(flatten(location, t.getProperties(), f)); + list.addAll(flatten(source, t.getProperties(), f)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Filter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Filter.java index 3a7dcdd991947..beeb97c563f0a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Filter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Filter.java @@ -8,7 +8,7 @@ import java.util.Objects; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -20,8 +20,8 @@ public class Filter extends UnaryPlan { private final Expression condition; - public Filter(Location location, LogicalPlan child, Expression condition) { - super(location, child); + public Filter(Source source, LogicalPlan child, Expression condition) { + super(source, child); this.condition = condition; } @@ -32,7 +32,7 @@ protected NodeInfo info() { @Override protected Filter replaceChild(LogicalPlan newChild) { - return new Filter(location(), newChild, condition); + return new Filter(source(), newChild, condition); } public Expression condition() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Join.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Join.java index 74083a408b86c..f09b43ec41ddb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Join.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Join.java @@ -5,17 +5,17 @@ */ package org.elasticsearch.xpack.sql.plan.logical; -import java.util.List; -import java.util.Objects; - import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; -import static java.util.stream.Collectors.toList; +import java.util.List; +import java.util.Objects; +import static java.util.stream.Collectors.toList; import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine; public class Join extends BinaryPlan { @@ -31,8 +31,8 @@ public enum JoinType { IMPLICIT, } - public Join(Location location, LogicalPlan left, LogicalPlan right, JoinType type, Expression condition) { - super(location, left, right); + public Join(Source source, LogicalPlan left, LogicalPlan right, JoinType type, Expression condition) { + super(source, left, right); this.type = type; this.condition = condition; } @@ -47,7 +47,7 @@ public LogicalPlan replaceChildren(List newChildren) { if (newChildren.size() != 2) { throw new IllegalArgumentException("expected [2] children but received [" + newChildren.size() + "]"); } - return new Join(location(), newChildren.get(0), newChildren.get(1), type, condition); + return new Join(source(), newChildren.get(0), newChildren.get(1), type, condition); } public JoinType type() { @@ -78,7 +78,7 @@ public List output() { private static List makeNullable(List output) { return output.stream() - .map(a -> a.withNullability(true)) + .map(a -> a.withNullability(Nullability.TRUE)) .collect(toList()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LeafPlan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LeafPlan.java index a68eeb53e1d60..9612d8786e3f7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LeafPlan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LeafPlan.java @@ -8,12 +8,12 @@ import java.util.Collections; import java.util.List; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; abstract class LeafPlan extends LogicalPlan { - protected LeafPlan(Location location) { - super(location, Collections.emptyList()); + protected LeafPlan(Source source) { + super(source, Collections.emptyList()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Limit.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Limit.java index ef194c4eae379..85735a071e7d0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Limit.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Limit.java @@ -8,15 +8,15 @@ import java.util.Objects; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Limit extends UnaryPlan { private final Expression limit; - public Limit(Location location, Expression limit, LogicalPlan child) { - super(location, child); + public Limit(Source source, Expression limit, LogicalPlan child) { + super(source, child); this.limit = limit; } @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected Limit replaceChild(LogicalPlan newChild) { - return new Limit(location(), limit, newChild); + return new Limit(source(), limit, newChild); } public Expression limit() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LocalRelation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LocalRelation.java index cf6b4933787e7..53a485a3b0542 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LocalRelation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LocalRelation.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.session.Executable; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -22,8 +22,8 @@ public class LocalRelation extends LogicalPlan implements Executable { private final Executable executable; - public LocalRelation(Location location, Executable executable) { - super(location, emptyList()); + public LocalRelation(Source source, Executable executable) { + super(source, emptyList()); this.executable = executable; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LogicalPlan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LogicalPlan.java index c5960c113de24..79614242012f7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/LogicalPlan.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.capabilities.Resolvable; import org.elasticsearch.xpack.sql.capabilities.Resolvables; import org.elasticsearch.xpack.sql.plan.QueryPlan; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; @@ -32,8 +32,8 @@ public enum Stage { private Boolean lazyChildrenResolved = null; private Boolean lazyResolved = null; - public LogicalPlan(Location location, List children) { - super(location, children); + public LogicalPlan(Source source, List children) { + super(source, children); } public boolean preAnalyzed() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/OrderBy.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/OrderBy.java index 8800dcaae656b..a3e3d9576b0d5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/OrderBy.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/OrderBy.java @@ -10,15 +10,15 @@ import org.elasticsearch.xpack.sql.capabilities.Resolvables; import org.elasticsearch.xpack.sql.expression.Order; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class OrderBy extends UnaryPlan { private final List order; - public OrderBy(Location location, LogicalPlan child, List order) { - super(location, child); + public OrderBy(Source source, LogicalPlan child, List order) { + super(source, child); this.order = order; } @@ -29,7 +29,7 @@ protected NodeInfo info() { @Override protected OrderBy replaceChild(LogicalPlan newChild) { - return new OrderBy(location(), newChild, order); + return new OrderBy(source(), newChild, order); } public List order() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Project.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Project.java index 4e15b2843a511..e57b2cdce4657 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Project.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Project.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.function.Functions; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; /** @@ -23,8 +23,8 @@ public class Project extends UnaryPlan { private final List projections; - public Project(Location location, LogicalPlan child, List projections) { - super(location, child); + public Project(Source source, LogicalPlan child, List projections) { + super(source, child); this.projections = projections; } @@ -35,7 +35,7 @@ protected NodeInfo info() { @Override protected Project replaceChild(LogicalPlan newChild) { - return new Project(location(), newChild, projections); + return new Project(source(), newChild, projections); } public List projections() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/SubQueryAlias.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/SubQueryAlias.java index b068d09febf5b..980cd0a849a52 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/SubQueryAlias.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/SubQueryAlias.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.plan.logical; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -18,8 +18,8 @@ public class SubQueryAlias extends UnaryPlan { private final String alias; - public SubQueryAlias(Location location, LogicalPlan child, String alias) { - super(location, child); + public SubQueryAlias(Source source, LogicalPlan child, String alias) { + super(source, child); this.alias = alias; } @@ -30,7 +30,7 @@ protected NodeInfo info() { @Override protected SubQueryAlias replaceChild(LogicalPlan newChild) { - return new SubQueryAlias(location(), newChild, alias); + return new SubQueryAlias(source(), newChild, alias); } public String alias() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnaryPlan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnaryPlan.java index 637e2594e5345..f81a48d64fd5b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnaryPlan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnaryPlan.java @@ -10,7 +10,7 @@ import java.util.Objects; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; /** * A {@code UnaryPlan} is a {@code LogicalPlan} with exactly one child, for example, {@code WHERE x} in a @@ -20,8 +20,8 @@ public abstract class UnaryPlan extends LogicalPlan { private final LogicalPlan child; - UnaryPlan(Location location, LogicalPlan child) { - super(location, Collections.singletonList(child)); + UnaryPlan(Source source, LogicalPlan child) { + super(source, Collections.singletonList(child)); this.child = child; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelation.java index 472503af4b98f..2613b05b97129 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelation.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.capabilities.Unresolvable; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.plan.TableIdentifier; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Collections; @@ -21,12 +21,12 @@ public class UnresolvedRelation extends LeafPlan implements Unresolvable { private final String alias; private final String unresolvedMsg; - public UnresolvedRelation(Location location, TableIdentifier table, String alias) { - this(location, table, alias, null); + public UnresolvedRelation(Source source, TableIdentifier table, String alias) { + this(source, table, alias, null); } - public UnresolvedRelation(Location location, TableIdentifier table, String alias, String unresolvedMessage) { - super(location); + public UnresolvedRelation(Source source, TableIdentifier table, String alias, String unresolvedMessage) { + super(source); this.table = table; this.alias = alias; this.unresolvedMsg = unresolvedMessage == null ? "Unknown index [" + table.index() + "]" : unresolvedMessage; @@ -67,7 +67,7 @@ public String unresolvedMessage() { @Override public int hashCode() { - return Objects.hash(location(), table, alias, unresolvedMsg); + return Objects.hash(source(), table, alias, unresolvedMsg); } @Override @@ -81,7 +81,7 @@ public boolean equals(Object obj) { } UnresolvedRelation other = (UnresolvedRelation) obj; - return location().equals(other.location()) + return source().equals(other.source()) && table.equals(other.table) && Objects.equals(alias, other.alias) && unresolvedMsg.equals(other.unresolvedMsg); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/With.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/With.java index 7c76ba4e9ba0a..6c30aab11b994 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/With.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/With.java @@ -8,14 +8,14 @@ import java.util.Map; import java.util.Objects; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class With extends UnaryPlan { private final Map subQueries; - public With(Location location, LogicalPlan child, Map subQueries) { - super(location, child); + public With(Source source, LogicalPlan child, Map subQueries) { + super(source, child); this.subQueries = subQueries; } @@ -26,7 +26,7 @@ protected NodeInfo info() { @Override protected With replaceChild(LogicalPlan newChild) { - return new With(location(), newChild, subQueries); + return new With(source(), newChild, subQueries); } public Map subQueries() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Command.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Command.java index aec44a9c6fbaf..72ae456a33b77 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Command.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Command.java @@ -8,7 +8,7 @@ import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.session.Executable; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.KeywordEsField; @@ -20,8 +20,8 @@ public abstract class Command extends LogicalPlan implements Executable { - protected Command(Location location) { - super(location, emptyList()); + protected Command(Source source) { + super(source, emptyList()); } @Override @@ -49,6 +49,6 @@ protected final FieldAttribute field(String name, DataType type) { } private FieldAttribute field(String name, EsField field) { - return new FieldAttribute(location(), name, field); + return new FieldAttribute(source(), name, field); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Debug.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Debug.java index 403165d50870a..f4aa378bbce75 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Debug.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Debug.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.Node; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.NodeUtils; @@ -45,8 +45,8 @@ public enum Format { private final Format format; private final Type type; - public Debug(Location location, LogicalPlan plan, Type type, Format format) { - super(location); + public Debug(Source source, LogicalPlan plan, Type type, Format format) { + super(source); this.plan = plan; this.format = format == null ? Format.TEXT : format; this.type = type == null ? Type.OPTIMIZED : type; @@ -71,7 +71,7 @@ public Type type() { @Override public List output() { - return singletonList(new FieldAttribute(location(), "plan", new KeywordEsField("plan"))); + return singletonList(new FieldAttribute(source(), "plan", new KeywordEsField("plan"))); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Explain.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Explain.java index 2fda8732d57f9..c6904a87f3f4a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Explain.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Explain.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.KeywordEsField; import org.elasticsearch.xpack.sql.util.Graphviz; @@ -50,8 +50,8 @@ public enum Format { private final Format format; private final Type type; - public Explain(Location location, LogicalPlan plan, Type type, Format format, boolean verify) { - super(location); + public Explain(Source source, LogicalPlan plan, Type type, Format format, boolean verify) { + super(source); this.plan = plan; this.verify = verify; this.format = format == null ? Format.TEXT : format; @@ -81,7 +81,7 @@ public Type type() { @Override public List output() { - return singletonList(new FieldAttribute(location(), "plan", new KeywordEsField("plan"))); + return singletonList(new FieldAttribute(source(), "plan", new KeywordEsField("plan"))); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java index 24f55b1d8eb54..8325d3a01e754 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.KeywordEsField; @@ -32,8 +32,8 @@ public class ShowColumns extends Command { private final String index; private final LikePattern pattern; - public ShowColumns(Location location, String index, LikePattern pattern) { - super(location); + public ShowColumns(Source source, String index, LikePattern pattern) { + super(source); this.index = index; this.pattern = pattern; } @@ -53,9 +53,9 @@ protected NodeInfo info() { @Override public List output() { - return asList(new FieldAttribute(location(), "column", new KeywordEsField("column")), - new FieldAttribute(location(), "type", new KeywordEsField("type")), - new FieldAttribute(location(), "mapping", new KeywordEsField("mapping"))); + return asList(new FieldAttribute(source(), "column", new KeywordEsField("column")), + new FieldAttribute(source(), "type", new KeywordEsField("type")), + new FieldAttribute(source(), "mapping", new KeywordEsField("mapping"))); } @Override @@ -80,6 +80,7 @@ private void fillInRows(Map mapping, String prefix, List output() { - return asList(new FieldAttribute(location(), "name", new KeywordEsField("name")), - new FieldAttribute(location(), "type", new KeywordEsField("type"))); + return asList(new FieldAttribute(source(), "name", new KeywordEsField("name")), + new FieldAttribute(source(), "type", new KeywordEsField("type"))); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java index 8a1c8ad0807c7..9d684b3ca456c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.KeywordEsField; @@ -21,8 +21,8 @@ public class ShowSchemas extends Command { - public ShowSchemas(Location location) { - super(location); + public ShowSchemas(Source source) { + super(source); } @Override @@ -32,7 +32,7 @@ protected NodeInfo info() { @Override public List output() { - return singletonList(new FieldAttribute(location(), "schema", new KeywordEsField("schema"))); + return singletonList(new FieldAttribute(source(), "schema", new KeywordEsField("schema"))); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java index 7f6c0c355e37d..ae4196fe4cf65 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -25,8 +25,8 @@ public class ShowTables extends Command { private final String index; private final LikePattern pattern; - public ShowTables(Location location, String index, LikePattern pattern) { - super(location); + public ShowTables(Source source, String index, LikePattern pattern) { + super(source); this.index = index; this.pattern = pattern; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysCatalogs.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysCatalogs.java index 0aa2af9c202f1..a98b1bdc1b7a7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysCatalogs.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysCatalogs.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -24,8 +24,8 @@ */ public class SysCatalogs extends Command { - public SysCatalogs(Location location) { - super(location); + public SysCatalogs(Source source) { + super(source); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java index 073dea1f7dae9..76b58babe2832 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -44,8 +44,8 @@ public class SysColumns extends Command { private final LikePattern pattern; private final LikePattern columnPattern; - public SysColumns(Location location, String catalog, String index, LikePattern pattern, LikePattern columnPattern) { - super(location); + public SysColumns(Source source, String catalog, String index, LikePattern pattern, LikePattern columnPattern) { + super(source); this.catalog = catalog; this.index = index; this.pattern = pattern; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypes.java index 9d7c73b2b8369..fef6171b95d22 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypes.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.Comparator; @@ -27,8 +27,8 @@ */ public class SysTableTypes extends Command { - public SysTableTypes(Location location) { - super(location); + public SysTableTypes(Source source) { + super(source); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java index 58b739cc00fbf..5ce1e6dcc8a70 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.util.CollectionUtils; @@ -39,9 +39,9 @@ public class SysTables extends Command { // flag indicating whether tables are reported as `TABLE` or `BASE TABLE` private final boolean legacyTableTypes; - public SysTables(Location location, LikePattern clusterPattern, String index, LikePattern pattern, EnumSet types, + public SysTables(Source source, LikePattern clusterPattern, String index, LikePattern pattern, EnumSet types, boolean legacyTableTypes) { - super(location); + super(source); this.clusterPattern = clusterPattern; this.index = index; this.pattern = pattern; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java index 65eb752d2fff0..58352027815f7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -37,8 +37,8 @@ public class SysTypes extends Command { private final Integer type; - public SysTypes(Location location, int type) { - super(location); + public SysTypes(Source source, int type) { + super(source); this.type = Integer.valueOf(type); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/AggregateExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/AggregateExec.java index 6814633c7e304..763744f1abab0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/AggregateExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/AggregateExec.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; import java.util.Objects; @@ -19,9 +19,9 @@ public class AggregateExec extends UnaryExec implements Unexecutable { private final List groupings; private final List aggregates; - public AggregateExec(Location location, PhysicalPlan child, + public AggregateExec(Source source, PhysicalPlan child, List groupings, List aggregates) { - super(location, child); + super(source, child); this.groupings = groupings; this.aggregates = aggregates; } @@ -33,7 +33,7 @@ protected NodeInfo info() { @Override protected AggregateExec replaceChild(PhysicalPlan newChild) { - return new AggregateExec(location(), newChild, groupings, aggregates); + return new AggregateExec(source(), newChild, groupings, aggregates); } public List groupings() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/BinaryExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/BinaryExec.java index e3b78001ed674..0510e840eea1d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/BinaryExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/BinaryExec.java @@ -9,14 +9,14 @@ import java.util.List; import java.util.Objects; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; abstract class BinaryExec extends PhysicalPlan { private final PhysicalPlan left, right; - protected BinaryExec(Location location, PhysicalPlan left, PhysicalPlan right) { - super(location, Arrays.asList(left, right)); + protected BinaryExec(Source source, PhysicalPlan left, PhysicalPlan right) { + super(source, Arrays.asList(left, right)); this.left = left; this.right = right; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/CommandExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/CommandExec.java index 3d392eee8810e..f1fccb7e2c49d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/CommandExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/CommandExec.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -20,8 +20,8 @@ public class CommandExec extends LeafExec { private final Command command; - public CommandExec(Location location, Command command) { - super(location); + public CommandExec(Source source, Command command) { + super(source); this.command = command; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/EsQueryExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/EsQueryExec.java index 8b71992d4f05e..2d16f6e0cf128 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/EsQueryExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/EsQueryExec.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -25,8 +25,8 @@ public class EsQueryExec extends LeafExec { private final QueryContainer queryContainer; - public EsQueryExec(Location location, String index, List output, QueryContainer queryContainer) { - super(location); + public EsQueryExec(Source source, String index, List output, QueryContainer queryContainer) { + super(source); this.index = index; this.output = output; this.queryContainer = queryContainer; @@ -38,7 +38,7 @@ protected NodeInfo info() { } public EsQueryExec with(QueryContainer queryContainer) { - return new EsQueryExec(location(), index, output, queryContainer); + return new EsQueryExec(source(), index, output, queryContainer); } public String index() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/FilterExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/FilterExec.java index 6c2f8523b3fd9..f8ea6eac37c84 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/FilterExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/FilterExec.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class FilterExec extends UnaryExec implements Unexecutable { @@ -20,12 +20,12 @@ public class FilterExec extends UnaryExec implements Unexecutable { // gets setup automatically and then copied over during cloning private final boolean isHaving; - public FilterExec(Location location, PhysicalPlan child, Expression condition) { - this(location, child, condition, child instanceof AggregateExec); + public FilterExec(Source source, PhysicalPlan child, Expression condition) { + this(source, child, condition, child instanceof AggregateExec); } - public FilterExec(Location location, PhysicalPlan child, Expression condition, boolean isHaving) { - super(location, child); + public FilterExec(Source source, PhysicalPlan child, Expression condition, boolean isHaving) { + super(source, child); this.condition = condition; this.isHaving = isHaving; } @@ -37,7 +37,7 @@ protected NodeInfo info() { @Override protected FilterExec replaceChild(PhysicalPlan newChild) { - return new FilterExec(location(), newChild, condition, isHaving); + return new FilterExec(source(), newChild, condition, isHaving); } public Expression condition() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LeafExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LeafExec.java index eec10b307c062..515915820d82f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LeafExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LeafExec.java @@ -8,11 +8,11 @@ import java.util.Collections; import java.util.List; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; abstract class LeafExec extends PhysicalPlan { - LeafExec(Location location) { - super(location, Collections.emptyList()); + LeafExec(Source source) { + super(source, Collections.emptyList()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LimitExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LimitExec.java index 1d4d5a24221f4..25795296d5ae0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LimitExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LimitExec.java @@ -8,15 +8,15 @@ import java.util.Objects; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class LimitExec extends UnaryExec implements Unexecutable { private final Expression limit; - public LimitExec(Location location, PhysicalPlan child, Expression limit) { - super(location, child); + public LimitExec(Source source, PhysicalPlan child, Expression limit) { + super(source, child); this.limit = limit; } @@ -27,7 +27,7 @@ protected NodeInfo info() { @Override protected LimitExec replaceChild(PhysicalPlan newChild) { - return new LimitExec(location(), newChild, limit); + return new LimitExec(source(), newChild, limit); } public Expression limit() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LocalExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LocalExec.java index 287d006b5e6c8..cce19411465ef 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LocalExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/LocalExec.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.session.Executable; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -21,8 +21,8 @@ public class LocalExec extends LeafExec { private final Executable executable; - public LocalExec(Location location, Executable executable) { - super(location); + public LocalExec(Source source, Executable executable) { + super(source); this.executable = executable; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/OrderExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/OrderExec.java index db790e0c44964..5311ce21b9c31 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/OrderExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/OrderExec.java @@ -9,15 +9,15 @@ import java.util.Objects; import org.elasticsearch.xpack.sql.expression.Order; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class OrderExec extends UnaryExec implements Unexecutable { private final List order; - public OrderExec(Location location, PhysicalPlan child, List order) { - super(location, child); + public OrderExec(Source source, PhysicalPlan child, List order) { + super(source, child); this.order = order; } @@ -28,7 +28,7 @@ protected NodeInfo info() { @Override protected OrderExec replaceChild(PhysicalPlan newChild) { - return new OrderExec(location(), newChild, order); + return new OrderExec(source(), newChild, order); } public List order() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PhysicalPlan.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PhysicalPlan.java index 749a494c9d836..e1cef71d36520 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PhysicalPlan.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PhysicalPlan.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.plan.QueryPlan; import org.elasticsearch.xpack.sql.session.Executable; import org.elasticsearch.xpack.sql.session.Rows; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.Schema; /** @@ -23,8 +23,8 @@ public abstract class PhysicalPlan extends QueryPlan implements Ex private Schema lazySchema; - public PhysicalPlan(Location location, List children) { - super(location, children); + public PhysicalPlan(Source source, List children) { + super(source, children); } public Schema schema() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/ProjectExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/ProjectExec.java index 411e6c6a20c2f..bb498e0261224 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/ProjectExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/ProjectExec.java @@ -11,15 +11,15 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class ProjectExec extends UnaryExec implements Unexecutable { private final List projections; - public ProjectExec(Location location, PhysicalPlan child, List projections) { - super(location, child); + public ProjectExec(Source source, PhysicalPlan child, List projections) { + super(source, child); this.projections = projections; } @@ -30,7 +30,7 @@ protected NodeInfo info() { @Override protected ProjectExec replaceChild(PhysicalPlan newChild) { - return new ProjectExec(location(), newChild, projections); + return new ProjectExec(source(), newChild, projections); } public List projections() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/UnaryExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/UnaryExec.java index 942a60b2cd8d0..b057c38f16e41 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/UnaryExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/UnaryExec.java @@ -10,14 +10,14 @@ import java.util.Objects; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; abstract class UnaryExec extends PhysicalPlan { private final PhysicalPlan child; - UnaryExec(Location location, PhysicalPlan child) { - super(location, Collections.singletonList(child)); + UnaryExec(Source source, PhysicalPlan child) { + super(source, Collections.singletonList(child)); this.child = child; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/UnplannedExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/UnplannedExec.java index a2a2055da6c23..b8c909fc1c374 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/UnplannedExec.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/UnplannedExec.java @@ -10,15 +10,15 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class UnplannedExec extends LeafExec implements Unexecutable { private final LogicalPlan plan; - public UnplannedExec(Location location, LogicalPlan plan) { - super(location); + public UnplannedExec(Source source, LogicalPlan plan) { + super(source); this.plan = plan; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java index 9ad380e3153ad..7cdd26d540432 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/FoldingException.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.Node; -import java.util.Locale; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; public class FoldingException extends ClientSqlException { @@ -21,8 +21,8 @@ public FoldingException(Node source, String message, Object... args) { super(message, args); Location loc = Location.EMPTY; - if (source != null && source.location() != null) { - loc = source.location(); + if (source != null && source.source() != null) { + loc = source.source().source(); } this.line = loc.getLineNumber(); this.column = loc.getColumnNumber(); @@ -32,8 +32,8 @@ public FoldingException(Node source, String message, Throwable cause) { super(message, cause); Location loc = Location.EMPTY; - if (source != null && source.location() != null) { - loc = source.location(); + if (source != null && source.source() != null) { + loc = source.source().source(); } this.line = loc.getLineNumber(); this.column = loc.getColumnNumber(); @@ -54,6 +54,6 @@ public RestStatus status() { @Override public String getMessage() { - return String.format(Locale.ROOT, "line %s:%s: %s", getLineNumber(), getColumnNumber(), super.getMessage()); + return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), super.getMessage()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java index f27cec678094e..92a60b4ee5576 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java @@ -52,7 +52,7 @@ protected Iterable.Batch> batches() { } private static PhysicalPlan planLater(LogicalPlan plan) { - return new UnplannedExec(plan.location(), plan); + return new UnplannedExec(plan.source(), plan); } private static class SimpleExecMapper extends MapExecRule { @@ -60,43 +60,43 @@ private static class SimpleExecMapper extends MapExecRule { @Override protected PhysicalPlan map(LogicalPlan p) { if (p instanceof Command) { - return new CommandExec(p.location(), (Command) p); + return new CommandExec(p.source(), (Command) p); } if (p instanceof LocalRelation) { - return new LocalExec(p.location(), ((LocalRelation) p).executable()); + return new LocalExec(p.source(), ((LocalRelation) p).executable()); } if (p instanceof Project) { Project pj = (Project) p; - return new ProjectExec(p.location(), map(pj.child()), pj.projections()); + return new ProjectExec(p.source(), map(pj.child()), pj.projections()); } if (p instanceof Filter) { Filter fl = (Filter) p; - return new FilterExec(p.location(), map(fl.child()), fl.condition()); + return new FilterExec(p.source(), map(fl.child()), fl.condition()); } if (p instanceof OrderBy) { OrderBy o = (OrderBy) p; - return new OrderExec(p.location(), map(o.child()), o.order()); + return new OrderExec(p.source(), map(o.child()), o.order()); } if (p instanceof Aggregate) { Aggregate a = (Aggregate) p; // analysis and optimizations have converted the grouping into actual attributes - return new AggregateExec(p.location(), map(a.child()), a.groupings(), a.aggregates()); + return new AggregateExec(p.source(), map(a.child()), a.groupings(), a.aggregates()); } if (p instanceof EsRelation) { EsRelation c = (EsRelation) p; List output = c.output(); - return new EsQueryExec(p.location(), c.index().name(), output, new QueryContainer()); + return new EsQueryExec(p.source(), c.index().name(), output, new QueryContainer()); } if (p instanceof Limit) { Limit l = (Limit) p; - return new LimitExec(p.location(), map(l.child()), l.limit()); + return new LimitExec(p.source(), map(l.child()), l.limit()); } // TODO: Translate With in a subplan if (p instanceof With) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java index bbfda467d93f0..8b5f282973c18 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/PlanningException.java @@ -30,7 +30,7 @@ public RestStatus status() { private static String extractMessage(Collection failures) { return failures.stream() .map(f -> { - Location l = f.source().location(); + Location l = f.source().source().source(); return "line " + l.getLineNumber() + ":" + l.getColumnNumber() + ": " + f.message(); }) .collect(Collectors.joining("\n", "Found " + failures.size() + " problem(s)\n", "")); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 96c267b3ba6fd..46380a9de2afd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Foldables; +import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.function.Function; @@ -146,7 +147,7 @@ protected PhysicalPlan rule(ProjectExec project) { QueryContainer clone = new QueryContainer(queryC.query(), queryC.aggs(), queryC.columns(), aliases, queryC.pseudoFunctions(), processors, queryC.sort(), queryC.limit()); - return new EsQueryExec(exec.location(), exec.index(), project.output(), clone); + return new EsQueryExec(exec.source(), exec.index(), project.output(), clone); } return project; } @@ -164,7 +165,7 @@ protected PhysicalPlan rule(FilterExec plan) { Query query = null; if (qContainer.query() != null || qt.query != null) { - query = and(plan.location(), qContainer.query(), qt.query); + query = and(plan.source(), qContainer.query(), qt.query); } Aggs aggs = addPipelineAggs(qContainer, qt, plan); @@ -293,7 +294,7 @@ protected PhysicalPlan rule(AggregateExec a) { action = ((UnaryPipe) p).action(); zi = ((DateTimeFunction) exp).zoneId(); } - return new AggPathInput(exp.location(), exp, new GroupByRef(matchingGroup.id(), null, zi), action); + return new AggPathInput(exp.source(), exp, new GroupByRef(matchingGroup.id(), null, zi), action); } } // or found an aggregate expression (which has to work on an attribute used for grouping) @@ -369,7 +370,7 @@ else if (child instanceof GroupingFunction) { newAliases.putAll(aliases); queryC = queryC.withAliases(newAliases); } - return new EsQueryExec(exec.location(), exec.index(), a.output(), queryC); + return new EsQueryExec(exec.source(), exec.index(), a.output(), queryC); } return a; } @@ -380,7 +381,8 @@ private Tuple addAggFunction(GroupByKey groupingAg // handle count as a special case agg if (f instanceof Count) { Count c = (Count) f; - if (!c.distinct()) { + // COUNT(*) or COUNT() + if (c.field() instanceof Literal) { AggRef ref = groupingAgg == null ? GlobalCountRef.INSTANCE : new GroupByRef(groupingAgg.id(), Property.COUNT, null); @@ -388,7 +390,14 @@ private Tuple addAggFunction(GroupByKey groupingAg Map pseudoFunctions = new LinkedHashMap<>(queryC.pseudoFunctions()); pseudoFunctions.put(functionId, groupingAgg); return new Tuple<>(queryC.withPseudoFunctions(pseudoFunctions), new AggPathInput(f, ref)); + // COUNT() + } else if (!c.distinct()) { + LeafAgg leafAgg = toAgg(functionId, f); + AggPathInput a = new AggPathInput(f, new MetricAggRef(leafAgg.id(), "doc_count", "_count")); + queryC = queryC.with(queryC.aggs().addAgg(leafAgg)); + return new Tuple<>(queryC, a); } + // the only variant left - COUNT(DISTINCT) - will be covered by the else branch below } AggPathInput aggInput = null; @@ -530,7 +539,7 @@ protected PhysicalPlan rule(PhysicalPlan plan) { PhysicalPlan p = plan.children().get(0); if (p instanceof LocalExec) { if (((LocalExec) p).isEmpty()) { - return new LocalExec(plan.location(), new EmptyExecutable(plan.output())); + return new LocalExec(plan.source(), new EmptyExecutable(plan.output())); } else { throw new SqlIllegalArgumentException("Encountered a bug; {} is a LocalExec but is not empty", p); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 4f071ee50f4f1..e0472f27131f0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -62,6 +62,7 @@ import org.elasticsearch.xpack.sql.querydsl.agg.AvgAgg; import org.elasticsearch.xpack.sql.querydsl.agg.CardinalityAgg; import org.elasticsearch.xpack.sql.querydsl.agg.ExtendedStatsAgg; +import org.elasticsearch.xpack.sql.querydsl.agg.FilterExistsAgg; import org.elasticsearch.xpack.sql.querydsl.agg.GroupByDateHistogram; import org.elasticsearch.xpack.sql.querydsl.agg.GroupByKey; import org.elasticsearch.xpack.sql.querydsl.agg.GroupByNumericHistogram; @@ -89,7 +90,7 @@ import org.elasticsearch.xpack.sql.querydsl.query.TermQuery; import org.elasticsearch.xpack.sql.querydsl.query.TermsQuery; import org.elasticsearch.xpack.sql.querydsl.query.WildcardQuery; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; import org.elasticsearch.xpack.sql.util.ReflectionUtils; @@ -135,7 +136,7 @@ private QueryTranslator(){} new MatrixStatsAggs(), new PercentilesAggs(), new PercentileRanksAggs(), - new DistinctCounts(), + new CountAggs(), new DateTimes() ); @@ -318,7 +319,7 @@ else if (exp instanceof GroupingFunction) { return new GroupingContext(aggMap); } - static QueryTranslation and(Location loc, QueryTranslation left, QueryTranslation right) { + static QueryTranslation and(Source source, QueryTranslation left, QueryTranslation right) { Check.isTrue(left != null || right != null, "Both expressions are null"); if (left == null) { return right; @@ -329,7 +330,7 @@ static QueryTranslation and(Location loc, QueryTranslation left, QueryTranslatio Query newQ = null; if (left.query != null || right.query != null) { - newQ = and(loc, left.query, right.query); + newQ = and(source, left.query, right.query); } AggFilter aggFilter; @@ -347,7 +348,7 @@ else if (right.aggFilter == null) { return new QueryTranslation(newQ, aggFilter); } - static Query and(Location loc, Query left, Query right) { + static Query and(Source source, Query left, Query right) { Check.isTrue(left != null || right != null, "Both expressions are null"); if (left == null) { return right; @@ -355,10 +356,10 @@ static Query and(Location loc, Query left, Query right) { if (right == null) { return left; } - return new BoolQuery(loc, true, left, right); + return new BoolQuery(source, true, left, right); } - static QueryTranslation or(Location loc, QueryTranslation left, QueryTranslation right) { + static QueryTranslation or(Source source, QueryTranslation left, QueryTranslation right) { Check.isTrue(left != null || right != null, "Both expressions are null"); if (left == null) { return right; @@ -369,7 +370,7 @@ static QueryTranslation or(Location loc, QueryTranslation left, QueryTranslation Query newQ = null; if (left.query != null || right.query != null) { - newQ = or(loc, left.query, right.query); + newQ = or(source, left.query, right.query); } AggFilter aggFilter = null; @@ -387,7 +388,7 @@ else if (right.aggFilter == null) { return new QueryTranslation(newQ, aggFilter); } - static Query or(Location loc, Query left, Query right) { + static Query or(Source source, Query left, Query right) { Check.isTrue(left != null || right != null, "Both expressions are null"); if (left == null) { @@ -396,7 +397,7 @@ static Query or(Location loc, Query left, Query right) { if (right == null) { return left; } - return new BoolQuery(loc, false, left, right); + return new BoolQuery(source, false, left, right); } static String nameOf(Expression e) { @@ -429,7 +430,13 @@ static String dateFormat(Expression e) { static String field(AggregateFunction af) { Expression arg = af.field(); if (arg instanceof FieldAttribute) { - return ((FieldAttribute) arg).name(); + FieldAttribute field = (FieldAttribute) arg; + // COUNT(DISTINCT) uses cardinality aggregation which works on exact values (not changed by analyzers or normalizers) + if (af instanceof Count && ((Count) af).distinct()) { + // use the `keyword` version of the field, if there is one + return field.isInexact() ? field.exactAttribute().name() : field.name(); + } + return field.name(); } if (arg instanceof Literal) { return String.valueOf(((Literal) arg).value()); @@ -460,20 +467,20 @@ protected QueryTranslation asQuery(RegexMatch e, boolean onAggs) { if (e instanceof Like) { LikePattern p = ((Like) e).pattern(); if (inexact) { - q = new QueryStringQuery(e.location(), p.asLuceneWildcard(), target); + q = new QueryStringQuery(e.source(), p.asLuceneWildcard(), target); } else { - q = new WildcardQuery(e.location(), nameOf(e.field()), p.asLuceneWildcard()); + q = new WildcardQuery(e.source(), nameOf(e.field()), p.asLuceneWildcard()); } } if (e instanceof RLike) { String pattern = ((RLike) e).pattern(); if (inexact) { - q = new QueryStringQuery(e.location(), "/" + pattern + "/", target); + q = new QueryStringQuery(e.source(), "/" + pattern + "/", target); } else { - q = new RegexQuery(e.location(), nameOf(e.field()), pattern); + q = new RegexQuery(e.source(), nameOf(e.field()), pattern); } } @@ -485,7 +492,7 @@ static class StringQueries extends ExpressionTranslator { @Override protected QueryTranslation asQuery(StringQueryPredicate q, boolean onAggs) { - return new QueryTranslation(new QueryStringQuery(q.location(), q.query(), q.fields(), q)); + return new QueryTranslation(new QueryStringQuery(q.source(), q.query(), q.fields(), q)); } } @@ -493,7 +500,7 @@ static class Matches extends ExpressionTranslator { @Override protected QueryTranslation asQuery(MatchQueryPredicate q, boolean onAggs) { - return new QueryTranslation(wrapIfNested(new MatchQuery(q.location(), nameOf(q.field()), q.query(), q), q.field())); + return new QueryTranslation(wrapIfNested(new MatchQuery(q.source(), nameOf(q.field()), q.query(), q), q.field())); } } @@ -501,7 +508,7 @@ static class MultiMatches extends ExpressionTranslator @Override protected QueryTranslation asQuery(MultiMatchQueryPredicate q, boolean onAggs) { - return new QueryTranslation(new MultiMatchQuery(q.location(), q.query(), q.fields(), q)); + return new QueryTranslation(new MultiMatchQuery(q.source(), q.query(), q.fields(), q)); } } @@ -510,10 +517,10 @@ static class BinaryLogic extends ExpressionTranslator qu); @@ -582,9 +589,9 @@ protected QueryTranslation asQuery(IsNull isNull, boolean onAggs) { } else { Query q = null; if (isNull.field() instanceof FieldAttribute) { - q = new NotQuery(isNull.location(), new ExistsQuery(isNull.location(), nameOf(isNull.field()))); + q = new NotQuery(isNull.source(), new ExistsQuery(isNull.source(), nameOf(isNull.field()))); } else { - q = new ScriptQuery(isNull.location(), isNull.asScript()); + q = new ScriptQuery(isNull.source(), isNull.asScript()); } final Query qu = q; @@ -602,7 +609,7 @@ static class BinaryComparisons extends ExpressionTranslator { protected QueryTranslation asQuery(BinaryComparison bc, boolean onAggs) { Check.isTrue(bc.right().foldable(), "Line {}:{}: Comparisons against variables are not (currently) supported; offender [{}] in [{}]", - bc.right().location().getLineNumber(), bc.right().location().getColumnNumber(), + bc.right().sourceLocation().getLineNumber(), bc.right().sourceLocation().getColumnNumber(), Expressions.name(bc.right()), bc.symbol()); if (bc.left() instanceof NamedExpression) { @@ -632,22 +639,22 @@ protected QueryTranslation asQuery(BinaryComparison bc, boolean onAggs) { } private static Query translateQuery(BinaryComparison bc) { - Location loc = bc.location(); + Source source = bc.source(); String name = nameOf(bc.left()); Object value = valueOf(bc.right()); String format = dateFormat(bc.left()); if (bc instanceof GreaterThan) { - return new RangeQuery(loc, name, value, false, null, false, format); + return new RangeQuery(source, name, value, false, null, false, format); } if (bc instanceof GreaterThanOrEqual) { - return new RangeQuery(loc, name, value, true, null, false, format); + return new RangeQuery(source, name, value, true, null, false, format); } if (bc instanceof LessThan) { - return new RangeQuery(loc, name, null, false, value, false, format); + return new RangeQuery(source, name, null, false, value, false, format); } if (bc instanceof LessThanOrEqual) { - return new RangeQuery(loc, name, null, false, value, true, format); + return new RangeQuery(source, name, null, false, value, true, format); } if (bc instanceof Equals || bc instanceof NullEquals || bc instanceof NotEquals) { if (bc.left() instanceof FieldAttribute) { @@ -658,9 +665,9 @@ private static Query translateQuery(BinaryComparison bc) { name = fa.exactAttribute().name(); } } - Query query = new TermQuery(loc, name, value); + Query query = new TermQuery(source, name, value); if (bc instanceof NotEquals) { - query = new NotQuery(loc, query); + query = new NotQuery(source, query); } return query; } @@ -680,8 +687,8 @@ protected QueryTranslation asQuery(In in, boolean onAggs) { if (firstNotFoldable.isPresent()) { throw new SqlIllegalArgumentException( "Line {}:{}: Comparisons against variables are not (currently) supported; offender [{}] in [{}]", - firstNotFoldable.get().location().getLineNumber(), - firstNotFoldable.get().location().getColumnNumber(), + firstNotFoldable.get().sourceLocation().getLineNumber(), + firstNotFoldable.get().sourceLocation().getColumnNumber(), Expressions.name(firstNotFoldable.get()), in.name()); } @@ -702,9 +709,9 @@ protected QueryTranslation asQuery(In in, boolean onAggs) { else { Query q = null; if (in.value() instanceof FieldAttribute) { - q = new TermsQuery(in.location(), ne.name(), in.list()); + q = new TermsQuery(in.source(), ne.name(), in.list()); } else { - q = new ScriptQuery(in.location(), in.asScript()); + q = new ScriptQuery(in.source(), in.asScript()); } Query qu = q; query = handleQuery(in, ne, () -> qu); @@ -739,7 +746,7 @@ protected QueryTranslation asQuery(Range r, boolean onAggs) { aggFilter = new AggFilter(at.id().toString(), r.asScript()); } else { query = handleQuery(r, r.value(), - () -> new RangeQuery(r.location(), nameOf(r.value()), valueOf(r.lower()), r.includeLower(), + () -> new RangeQuery(r.source(), nameOf(r.value()), valueOf(r.lower()), r.includeLower(), valueOf(r.upper()), r.includeUpper(), dateFormat(r.value()))); } return new QueryTranslation(query, aggFilter); @@ -761,7 +768,7 @@ protected QueryTranslation asQuery(ScalarFunction f, boolean onAggs) { if (onAggs) { aggFilter = new AggFilter(f.id().toString(), script); } else { - query = handleQuery(f, f, () -> new ScriptQuery(f.location(), script)); + query = handleQuery(f, f, () -> new ScriptQuery(f.source(), script)); } return new QueryTranslation(query, aggFilter); @@ -772,15 +779,16 @@ protected QueryTranslation asQuery(ScalarFunction f, boolean onAggs) { // // Agg translators // - - static class DistinctCounts extends SingleValueAggTranslator { + + static class CountAggs extends SingleValueAggTranslator { @Override protected LeafAgg toAgg(String id, Count c) { - if (!c.distinct()) { - return null; + if (c.distinct()) { + return new CardinalityAgg(id, field(c)); + } else { + return new FilterExistsAgg(id, field(c)); } - return new CardinalityAgg(id, field(c)); } } @@ -914,14 +922,14 @@ protected static Query handleQuery(ScalarFunction sf, Expression field, Supplier if (field instanceof FieldAttribute) { return wrapIfNested(q, field); } - return new ScriptQuery(sf.location(), sf.asScript()); + return new ScriptQuery(sf.source(), sf.asScript()); } protected static Query wrapIfNested(Query query, Expression exp) { if (exp instanceof FieldAttribute) { FieldAttribute fa = (FieldAttribute) exp; if (fa.isNested()) { - return new NestedQuery(fa.location(), fa.nestedParent().name(), query); + return new NestedQuery(fa.source(), fa.nestedParent().name(), query); } } return query; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/CardinalityAgg.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/CardinalityAgg.java index f4fb20428c59f..847509125ba49 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/CardinalityAgg.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/CardinalityAgg.java @@ -15,7 +15,8 @@ public CardinalityAgg(String id, String fieldName) { super(id, fieldName); } - @Override AggregationBuilder toBuilder() { + @Override + AggregationBuilder toBuilder() { return cardinality(id()).field(fieldName()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/FilterExistsAgg.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/FilterExistsAgg.java new file mode 100644 index 0000000000000..4c14fa6dad82a --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/FilterExistsAgg.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.querydsl.agg; + +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.aggregations.AggregationBuilder; + +import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; + +/** + * Aggregation builder for a "filter" aggregation encapsulating an "exists" query. + */ +public class FilterExistsAgg extends LeafAgg { + + public FilterExistsAgg(String id, String fieldName) { + super(id, fieldName); + } + + @Override + AggregationBuilder toBuilder() { + return filter(id(), QueryBuilders.existsQuery(fieldName())); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 8901ca75c8f88..43b1045ff7092 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -29,7 +29,7 @@ import org.elasticsearch.xpack.sql.querydsl.query.MatchAll; import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery; import org.elasticsearch.xpack.sql.querydsl.query.Query; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import java.io.IOException; @@ -184,7 +184,7 @@ private Tuple nestedHitFieldRef(FieldAttribute String name = aliasName(attr); String format = attr.field().getDataType() == DataType.DATE ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT; - Query q = rewriteToContainNestedField(query, attr.location(), + Query q = rewriteToContainNestedField(query, attr.source(), attr.nestedParent().name(), name, format, attr.field().isAggregatable()); SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.field().getDataType(), @@ -194,13 +194,13 @@ private Tuple nestedHitFieldRef(FieldAttribute return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef); } - static Query rewriteToContainNestedField(@Nullable Query query, Location location, String path, String name, String format, + static Query rewriteToContainNestedField(@Nullable Query query, Source source, String path, String name, String format, boolean hasDocValues) { if (query == null) { /* There is no query so we must add the nested query * ourselves to fetch the field. */ - return new NestedQuery(location, path, singletonMap(name, new AbstractMap.SimpleImmutableEntry<>(hasDocValues, format)), - new MatchAll(location)); + return new NestedQuery(source, path, singletonMap(name, new AbstractMap.SimpleImmutableEntry<>(hasDocValues, format)), + new MatchAll(source)); } if (query.containsNestedField(path, name)) { // The query already has the nested field. Nothing to do. @@ -216,9 +216,9 @@ static Query rewriteToContainNestedField(@Nullable Query query, Location locatio } /* There is no nested query with a matching path so we must * add the nested query ourselves just to fetch the field. */ - NestedQuery nested = new NestedQuery(location, path, - singletonMap(name, new AbstractMap.SimpleImmutableEntry<>(hasDocValues, format)), new MatchAll(location)); - return new BoolQuery(location, true, query, nested); + NestedQuery nested = new NestedQuery(source, path, + singletonMap(name, new AbstractMap.SimpleImmutableEntry<>(hasDocValues, format)), new MatchAll(source)); + return new BoolQuery(source, true, query, nested); } // replace function/operators's input with references diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/BoolQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/BoolQuery.java index 8b342b34c77f1..6acdf2919dcc2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/BoolQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/BoolQuery.java @@ -8,7 +8,7 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.sort.NestedSortBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Objects; @@ -25,8 +25,8 @@ public class BoolQuery extends Query { private final Query left; private final Query right; - public BoolQuery(Location location, boolean isAnd, Query left, Query right) { - super(location); + public BoolQuery(Source source, boolean isAnd, Query left, Query right) { + super(source); if (left == null) { throw new IllegalArgumentException("left is required"); } @@ -50,7 +50,7 @@ public Query addNestedField(String path, String field, String format, boolean ha if (rewrittenLeft == left && rewrittenRight == right) { return this; } - return new BoolQuery(location(), isAnd, rewrittenLeft, rewrittenRight); + return new BoolQuery(source(), isAnd, rewrittenLeft, rewrittenRight); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ExistsQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ExistsQuery.java index aa0f39f1bec04..07aad6d260af3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ExistsQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ExistsQuery.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.querydsl.query; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; @@ -14,8 +14,8 @@ public class ExistsQuery extends LeafQuery { private final String name; - public ExistsQuery(Location location, String name) { - super(location); + public ExistsQuery(Source source, String name) { + super(source); this.name = name; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/LeafQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/LeafQuery.java index 4c0a5b4dcd683..f1252d90fa42e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/LeafQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/LeafQuery.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.sql.querydsl.query; import org.elasticsearch.search.sort.NestedSortBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; abstract class LeafQuery extends Query { - LeafQuery(Location location) { - super(location); + LeafQuery(Source source) { + super(source); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MatchAll.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MatchAll.java index af2801694d033..260bb34b27a89 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MatchAll.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MatchAll.java @@ -6,13 +6,13 @@ package org.elasticsearch.xpack.sql.querydsl.query; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; public class MatchAll extends LeafQuery { - public MatchAll(Location location) { - super(location); + public MatchAll(Source source) { + super(source); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MatchQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MatchQuery.java index 292a8b9a8e341..d0fe697268d41 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MatchQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MatchQuery.java @@ -11,7 +11,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Collections; import java.util.HashMap; @@ -47,12 +47,12 @@ public class MatchQuery extends LeafQuery { private final Map options; - public MatchQuery(Location location, String name, Object text) { - this(location, name, text, null); + public MatchQuery(Source source, String name, Object text) { + this(source, name, text, null); } - public MatchQuery(Location location, String name, Object text, MatchQueryPredicate predicate) { - super(location); + public MatchQuery(Source source, String name, Object text, MatchQueryPredicate predicate) { + super(source); this.name = name; this.text = text; this.predicate = predicate; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQuery.java index 30def4db3dac4..4f0bc0720ae83 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQuery.java @@ -11,7 +11,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Collections; import java.util.HashMap; @@ -49,8 +49,8 @@ public class MultiMatchQuery extends LeafQuery { private final Map options; private final MultiMatchQueryPredicate predicate; - public MultiMatchQuery(Location location, String query, Map fields, MultiMatchQueryPredicate predicate) { - super(location); + public MultiMatchQuery(Source source, String query, Map fields, MultiMatchQueryPredicate predicate) { + super(source); this.query = query; this.fields = fields; this.predicate = predicate; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQuery.java index 4224c2adc8af3..bb4310d3b9118 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQuery.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; @@ -40,12 +40,12 @@ public class NestedQuery extends Query { private final Map> fields; // field -> (useDocValues, format) private final Query child; - public NestedQuery(Location location, String path, Query child) { - this(location, path, emptyMap(), child); + public NestedQuery(Source source, String path, Query child) { + this(source, path, emptyMap(), child); } - public NestedQuery(Location location, String path, Map> fields, Query child) { - super(location); + public NestedQuery(Source source, String path, Map> fields, Query child) { + super(source); if (path == null) { throw new IllegalArgumentException("path is required"); } @@ -75,7 +75,7 @@ public Query addNestedField(String path, String field, String format, boolean ha if (rewrittenChild == child) { return this; } - return new NestedQuery(location(), path, fields, rewrittenChild); + return new NestedQuery(source(), path, fields, rewrittenChild); } if (fields.containsKey(field)) { // I already have the field, no rewriting needed @@ -84,7 +84,7 @@ public Query addNestedField(String path, String field, String format, boolean ha Map> newFields = new HashMap<>(fields.size() + 1); newFields.putAll(fields); newFields.put(field, new AbstractMap.SimpleImmutableEntry<>(hasDocValues, format)); - return new NestedQuery(location(), path, unmodifiableMap(newFields), child); + return new NestedQuery(source(), path, unmodifiableMap(newFields), child); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NotQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NotQuery.java index d44e62c230f53..3115ab26a07f8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NotQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/NotQuery.java @@ -7,7 +7,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.sort.NestedSortBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Objects; @@ -16,8 +16,8 @@ public class NotQuery extends Query { private final Query child; - public NotQuery(Location location, Query child) { - super(location); + public NotQuery(Source source, Query child) { + super(source); if (child == null) { throw new IllegalArgumentException("child is required"); } @@ -39,7 +39,7 @@ public Query addNestedField(String path, String field, String format, boolean ha if (child == rewrittenChild) { return this; } - return new NotQuery(location(), child); + return new NotQuery(source(), child); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/Query.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/Query.java index 7691ef42041c5..de7ff8e0f648c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/Query.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/Query.java @@ -7,7 +7,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.sort.NestedSortBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; /** * Intermediate representation of queries that is rewritten to fetch @@ -15,20 +15,20 @@ * Elasticsearch {@link QueryBuilder}s. */ public abstract class Query { - private final Location location; + private final Source source; - Query(Location location) { - if (location == null) { + Query(Source source) { + if (source == null) { throw new IllegalArgumentException("location must be specified"); } - this.location = location; + this.source = source; } /** * Location in the source statement. */ - public Location location() { - return location; + public Source source() { + return source; } /** @@ -69,16 +69,16 @@ public boolean equals(Object obj) { return false; } Query other = (Query) obj; - return location.equals(other.location); + return source.equals(other.source); } @Override public int hashCode() { - return location.hashCode(); + return source.hashCode(); } @Override public String toString() { - return getClass().getSimpleName() + location + "[" + innerToString() + "]"; + return getClass().getSimpleName() + source + "[" + innerToString() + "]"; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/QueryStringQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/QueryStringQuery.java index 195a1daff9c8a..de457ba918e7c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/QueryStringQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/QueryStringQuery.java @@ -13,7 +13,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.StringQueryPredicate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Collections; import java.util.HashMap; @@ -61,12 +61,12 @@ public class QueryStringQuery extends LeafQuery { private final Map options; // dedicated constructor for QueryTranslator - public QueryStringQuery(Location location, String query, String fieldName) { - this(location, query, Collections.singletonMap(fieldName, Float.valueOf(1.0f)), null); + public QueryStringQuery(Source source, String query, String fieldName) { + this(source, query, Collections.singletonMap(fieldName, Float.valueOf(1.0f)), null); } - public QueryStringQuery(Location location, String query, Map fields, StringQueryPredicate predicate) { - super(location); + public QueryStringQuery(Source source, String query, Map fields, StringQueryPredicate predicate) { + super(source); this.query = query; this.fields = fields; this.predicate = predicate; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/RangeQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/RangeQuery.java index 4402222e8986e..3b7bc21bb0079 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/RangeQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/RangeQuery.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; @@ -21,13 +21,13 @@ public class RangeQuery extends LeafQuery { private final boolean includeLower, includeUpper; private final String format; - public RangeQuery(Location location, String field, Object lower, boolean includeLower, Object upper, boolean includeUpper) { - this(location, field, lower, includeLower, upper, includeUpper, null); + public RangeQuery(Source source, String field, Object lower, boolean includeLower, Object upper, boolean includeUpper) { + this(source, field, lower, includeLower, upper, includeUpper, null); } - public RangeQuery(Location location, String field, Object lower, boolean includeLower, Object upper, + public RangeQuery(Source source, String field, Object lower, boolean includeLower, Object upper, boolean includeUpper, String format) { - super(location); + super(source); this.field = field; this.lower = lower; this.upper = upper; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/RegexQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/RegexQuery.java index bf3364388a977..c8459791eb404 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/RegexQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/RegexQuery.java @@ -8,7 +8,7 @@ import java.util.Objects; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import static org.elasticsearch.index.query.QueryBuilders.regexpQuery; @@ -16,8 +16,8 @@ public class RegexQuery extends LeafQuery { private final String field, regex; - public RegexQuery(Location location, String field, String regex) { - super(location); + public RegexQuery(Source source, String field, String regex) { + super(source); this.field = field; this.regex = regex; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ScriptQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ScriptQuery.java index 6d9a1639f4f5d..396ea283951e0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ScriptQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/ScriptQuery.java @@ -8,7 +8,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.gen.script.Scripts; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Objects; @@ -18,8 +18,8 @@ public class ScriptQuery extends LeafQuery { private final ScriptTemplate script; - public ScriptQuery(Location location, ScriptTemplate script) { - super(location); + public ScriptQuery(Source source, ScriptTemplate script) { + super(source); // make script null safe this.script = Scripts.nullSafeFilter(script); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermQuery.java index af14272dff5b1..5d488338b8642 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermQuery.java @@ -8,7 +8,7 @@ import java.util.Objects; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -17,8 +17,8 @@ public class TermQuery extends LeafQuery { private final String term; private final Object value; - public TermQuery(Location location, String term, Object value) { - super(location); + public TermQuery(Source source, String term, Object value) { + super(source); this.term = term; this.value = value; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java index 66d206f829a32..966130cb239ec 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java @@ -8,7 +8,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Foldables; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataTypes; import java.util.Collections; @@ -24,8 +24,8 @@ public class TermsQuery extends LeafQuery { private final String term; private final Set values; - public TermsQuery(Location location, String term, List values) { - super(location); + public TermsQuery(Source source, String term, List values) { + super(source); this.term = term; values.removeIf(e -> DataTypes.isNull(e.dataType())); if (values.isEmpty()) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/WildcardQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/WildcardQuery.java index 6c252450c6f07..11b776e90fbef 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/WildcardQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/WildcardQuery.java @@ -8,7 +8,7 @@ import java.util.Objects; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; @@ -16,8 +16,8 @@ public class WildcardQuery extends LeafQuery { private final String field, query; - public WildcardQuery(Location location, String field, String query) { - super(location); + public WildcardQuery(Source source, String field, String query) { + super(source); this.field = field; this.query = query; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/Node.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/Node.java index 13b17791475ac..fb6d3da92758f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/Node.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/Node.java @@ -34,19 +34,27 @@ public abstract class Node> { private static final int TO_STRING_MAX_PROP = 10; private static final int TO_STRING_MAX_WIDTH = 110; - private final Location location; + private final Source source; private final List children; - public Node(Location location, List children) { - this.location = (location != null ? location : Location.EMPTY); + public Node(Source source, List children) { + this.source = (source != null ? source : Source.EMPTY); if (children.contains(null)) { throw new SqlIllegalArgumentException("Null children are not allowed"); } this.children = children; } - public Location location() { - return location; + public Source source() { + return source; + } + + public Location sourceLocation() { + return source.source(); + } + + public String sourceText() { + return source.text(); } public List children() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/NodeInfo.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/NodeInfo.java index 23f29137cffa7..8af8ff637b3d7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/NodeInfo.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/NodeInfo.java @@ -77,7 +77,7 @@ protected T innerTransform(Function rule) { } public static , P1> NodeInfo create( - T n, BiFunction ctor, + T n, BiFunction ctor, P1 p1) { return new NodeInfo(n) { @Override @@ -92,7 +92,7 @@ protected T innerTransform(Function rule) { P1 newP1 = (P1) rule.apply(p1); same &= Objects.equals(p1, newP1); - return same ? node : ctor.apply(node.location(), newP1); + return same ? node : ctor.apply(node.source(), newP1); } }; } @@ -116,12 +116,12 @@ protected T innerTransform(Function rule) { P2 newP2 = (P2) rule.apply(p2); same &= Objects.equals(p2, newP2); - return same ? node : ctor.apply(node.location(), newP1, newP2); + return same ? node : ctor.apply(node.source(), newP1, newP2); } }; } public interface NodeCtor2 { - T apply(Location l, P1 p1, P2 p2); + T apply(Source l, P1 p1, P2 p2); } public static , P1, P2, P3> NodeInfo create( @@ -146,12 +146,12 @@ protected T innerTransform(Function rule) { P3 newP3 = (P3) rule.apply(p3); same &= Objects.equals(p3, newP3); - return same ? node : ctor.apply(node.location(), newP1, newP2, newP3); + return same ? node : ctor.apply(node.source(), newP1, newP2, newP3); } }; } public interface NodeCtor3 { - T apply(Location l, P1 p1, P2 p2, P3 p3); + T apply(Source l, P1 p1, P2 p2, P3 p3); } public static , P1, P2, P3, P4> NodeInfo create( @@ -179,12 +179,12 @@ protected T innerTransform(Function rule) { P4 newP4 = (P4) rule.apply(p4); same &= Objects.equals(p4, newP4); - return same ? node : ctor.apply(node.location(), newP1, newP2, newP3, newP4); + return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4); } }; } public interface NodeCtor4 { - T apply(Location l, P1 p1, P2 p2, P3 p3, P4 p4); + T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4); } public static , P1, P2, P3, P4, P5> NodeInfo create( @@ -215,12 +215,12 @@ protected T innerTransform(Function rule) { P5 newP5 = (P5) rule.apply(p5); same &= Objects.equals(p5, newP5); - return same ? node : ctor.apply(node.location(), newP1, newP2, newP3, newP4, newP5); + return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5); } }; } public interface NodeCtor5 { - T apply(Location l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5); + T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5); } public static , P1, P2, P3, P4, P5, P6> NodeInfo create( @@ -254,12 +254,12 @@ protected T innerTransform(Function rule) { P6 newP6 = (P6) rule.apply(p6); same &= Objects.equals(p6, newP6); - return same ? node : ctor.apply(node.location(), newP1, newP2, newP3, newP4, newP5, newP6); + return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6); } }; } public interface NodeCtor6 { - T apply(Location l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6); + T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6); } public static , P1, P2, P3, P4, P5, P6, P7> NodeInfo create( @@ -296,12 +296,12 @@ protected T innerTransform(Function rule) { P7 newP7 = (P7) rule.apply(p7); same &= Objects.equals(p7, newP7); - return same ? node : ctor.apply(node.location(), newP1, newP2, newP3, newP4, newP5, newP6, newP7); + return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7); } }; } public interface NodeCtor7 { - T apply(Location l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7); + T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7); } public static , P1, P2, P3, P4, P5, P6, P7, P8> NodeInfo create( @@ -341,12 +341,12 @@ protected T innerTransform(Function rule) { P8 newP8 = (P8) rule.apply(p8); same &= Objects.equals(p8, newP8); - return same ? node : ctor.apply(node.location(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8); + return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8); } }; } public interface NodeCtor8 { - T apply(Location l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8); + T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8); } public static , P1, P2, P3, P4, P5, P6, P7, P8, P9, P10> NodeInfo create( @@ -392,12 +392,12 @@ protected T innerTransform(Function rule) { P10 newP10 = (P10) rule.apply(p10); same &= Objects.equals(p10, newP10); - return same ? node : ctor.apply(node.location(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8, + return same ? node : ctor.apply(node.source(), newP1, newP2, newP3, newP4, newP5, newP6, newP7, newP8, newP9, newP10); } }; } public interface NodeCtor10 { - T apply(Location l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10); + T apply(Source l, P1 p1, P2 p2, P3 p3, P4 p4, P5 p5, P6 p6, P7 p7, P8 p8, P9 p9, P10 p10); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/Source.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/Source.java new file mode 100644 index 0000000000000..d689a7f31dd8e --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/tree/Source.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.tree; + +import org.elasticsearch.xpack.sql.proto.StringUtils; + +import java.util.Objects; + +public final class Source { + + public static final Source EMPTY = new Source(Location.EMPTY, StringUtils.EMPTY); + + private final Location location; + private final String text; + + public Source(int line, int charPositionInLine, String text) { + this(new Location(line, charPositionInLine), text); + } + + public Source(Location location, String text) { + this.location = location; + this.text = text; + } + + public Location source() { + return location; + } + + public String text() { + return text; + } + + @Override + public int hashCode() { + return Objects.hash(location, text); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + Source other = (Source) obj; + return Objects.equals(location, other.location) && Objects.equals(text, other.text); + } + + @Override + public String toString() { + return text + location; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/EsField.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/EsField.java index 5630c9409af95..47a2904adb7a7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/EsField.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/EsField.java @@ -99,7 +99,7 @@ public boolean equals(Object o) { return false; } EsField field = (EsField) o; - return aggregatable == field.aggregatable && esDataType == field.esDataType + return aggregatable == field.aggregatable && esDataType == field.esDataType && Objects.equals(name, field.name) && Objects.equals(properties, field.properties); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java index 09e106b17bf6c..c71d7c274947f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java @@ -18,6 +18,7 @@ public class SqlActionIT extends AbstractSqlIntegTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37191") public void testSqlAction() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").get()); client().prepareBulk() diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java index de55d486555ad..952104b49eebb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.action; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; @@ -16,6 +17,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37191") public class SqlClearCursorActionIT extends AbstractSqlIntegTestCase { public void testSqlClearCursorAction() throws Exception { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlDisabledIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlDisabledIT.java index c50f1095164df..0a56e804a00c6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlDisabledIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlDisabledIT.java @@ -29,6 +29,7 @@ protected Settings transportClientSettings() { .build(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37191") public void testSqlAction() throws Exception { Throwable throwable = expectThrows(Throwable.class, () -> new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SHOW tables").get()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java index df0875690a2c2..62f7b42c69944 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.action; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; @@ -34,6 +35,7 @@ import static org.elasticsearch.license.XPackLicenseStateTests.randomTrialOrPlatinumMode; import static org.hamcrest.Matchers.equalTo; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37191") public class SqlLicenseIT extends AbstractLicensesIntegrationTestCase { @Override protected boolean ignoreExternalCluster() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java index e413a590dfaf1..d86245dcbfae7 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java @@ -17,6 +17,7 @@ public class SqlTranslateActionIT extends AbstractSqlIntegTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37191") public void testSqlTranslateAction() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").get()); client().prepareBulk() diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 5a786441d3300..6366ba85503b0 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.analysis.analyzer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.TestUtils; import org.elasticsearch.xpack.sql.analysis.AnalysisException; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; @@ -20,18 +21,24 @@ import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.stats.Metrics; +import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; +import java.util.LinkedHashMap; import java.util.Map; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; + public class VerifierErrorMessagesTests extends ESTestCase { + private SqlParser parser = new SqlParser(); + private IndexResolution indexResolution = IndexResolution.valid(new EsIndex("test", + TypesTests.loadMapping("mapping-multi-field-with-nested.json"))); private String error(String sql) { - Map mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json"); - EsIndex test = new EsIndex("test", mapping); - return error(IndexResolution.valid(test), sql); + return error(indexResolution, sql); } private String error(IndexResolution getIndexResult, String sql) { @@ -95,7 +102,27 @@ public void testMisspelledColumnWithWildcard() { public void testColumnWithNoSubFields() { assertEquals("1:8: Cannot determine columns for [text.*]", error("SELECT text.* FROM test")); } - + + public void testFieldAliasTypeWithoutHierarchy() { + Map mapping = new LinkedHashMap<>(); + + mapping.put("field", new EsField("field", DataType.OBJECT, + singletonMap("alias", new EsField("alias", DataType.KEYWORD, emptyMap(), true)), false)); + + IndexResolution resolution = IndexResolution.valid(new EsIndex("test", mapping)); + + // check the nested alias is seen + accept(resolution, "SELECT field.alias FROM test"); + // or its hierarhcy + accept(resolution, "SELECT field.* FROM test"); + + // check typos + assertEquals("1:8: Unknown column [field.alas], did you mean [field.alias]?", error(resolution, "SELECT field.alas FROM test")); + + // non-existing parents for aliases are not seen by the user + assertEquals("1:8: Cannot use field [field] type [object] only its subfields", error(resolution, "SELECT field FROM test")); + } + public void testMultipleColumnsWithWildcard1() { assertEquals("1:14: Unknown column [a]\n" + "line 1:17: Unknown column [b]\n" + @@ -504,4 +531,20 @@ public void testAggsInHistogram() { assertEquals("1:47: Cannot use an aggregate [MAX] for grouping", error("SELECT MAX(date) FROM test GROUP BY HISTOGRAM(MAX(int), 1)")); } -} \ No newline at end of file + + public void testErrorMessageForPercentileWithSecondArgBasedOnAField() { + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), indexResolution, new Verifier(new Metrics())); + SqlIllegalArgumentException e = expectThrows(SqlIllegalArgumentException.class, () -> analyzer.analyze(parser.createStatement( + "SELECT PERCENTILE(int, ABS(int)) FROM test"), true)); + assertEquals("2nd argument of PERCENTILE must be constant, received [ABS(int)]", + e.getMessage()); + } + + public void testErrorMessageForPercentileRankWithSecondArgBasedOnAField() { + Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), indexResolution, new Verifier(new Metrics())); + SqlIllegalArgumentException e = expectThrows(SqlIllegalArgumentException.class, () -> analyzer.analyze(parser.createStatement( + "SELECT PERCENTILE_RANK(int, ABS(int)) FROM test"), true)); + assertEquals("2nd argument of PERCENTILE_RANK must be constant, received [ABS(int)]", + e.getMessage()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java index 7ef57972b3131..b53d00cfbb71d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -15,10 +15,11 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Map.Entry; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; + public class IndexResolverTests extends ESTestCase { public void testMergeSameMapping() throws Exception { @@ -218,14 +219,14 @@ public String[] nonAggregatableIndices() { @Override public String toString() { - return String.format(Locale.ROOT, "%s,%s->%s", getName(), getType(), indices); + return format("{},{}->{}", getName(), getType(), indices); } } private static void assertEqualsMaps(Map left, Map right) { for (Entry entry : left.entrySet()) { V rv = right.get(entry.getKey()); - assertEquals(String.format(Locale.ROOT, "Key [%s] has different values", entry.getKey()), entry.getValue(), rv); + assertEquals(format("Key [{}] has different values", entry.getKey()), entry.getValue(), rv); } } @@ -235,4 +236,4 @@ private void addFieldCaps(Map> fieldCaps, cap.put(name, new FieldCapabilities(name, type, isSearchable, isAggregatable)); fieldCaps.put(name, cap); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java index 75f37f8e71f33..eabbfaba7976e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction; import org.elasticsearch.xpack.sql.querydsl.container.Sort.Missing; import org.elasticsearch.xpack.sql.querydsl.query.MatchQuery; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.KeywordEsField; import static java.util.Collections.singletonList; @@ -41,7 +41,7 @@ public void testNoQueryNoFilter() { } public void testQueryNoFilter() { - QueryContainer container = new QueryContainer().with(new MatchQuery(Location.EMPTY, "foo", "bar")); + QueryContainer container = new QueryContainer().with(new MatchQuery(Source.EMPTY, "foo", "bar")); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(matchQuery("foo", "bar").operator(Operator.OR), sourceBuilder.query()); } @@ -54,7 +54,7 @@ public void testNoQueryFilter() { } public void testQueryFilter() { - QueryContainer container = new QueryContainer().with(new MatchQuery(Location.EMPTY, "foo", "bar")); + QueryContainer container = new QueryContainer().with(new MatchQuery(Source.EMPTY, "foo", "bar")); QueryBuilder filter = matchQuery("bar", "baz"); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, filter, randomIntBetween(1, 10)); assertEquals(boolQuery().must(matchQuery("foo", "bar").operator(Operator.OR)).filter(matchQuery("bar", "baz")), @@ -78,8 +78,7 @@ public void testSortNoneSpecified() { } public void testSelectScoreForcesTrackingScore() { - QueryContainer container = new QueryContainer() - .addColumn(new Score(new Location(1, 1)).toAttribute()); + QueryContainer container = new QueryContainer().addColumn(new Score(Source.EMPTY).toAttribute()); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertTrue(sourceBuilder.trackScores()); } @@ -95,13 +94,13 @@ public void testSortFieldSpecified() { FieldSortBuilder sortField = fieldSort("test").unmappedType("keyword"); QueryContainer container = new QueryContainer() - .sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", new KeywordEsField("test")), Direction.ASC, + .sort(new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.ASC, Missing.LAST)); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(singletonList(sortField.order(SortOrder.ASC).missing("_last")), sourceBuilder.sorts()); container = new QueryContainer() - .sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", new KeywordEsField("test")), Direction.DESC, + .sort(new AttributeSort(new FieldAttribute(Source.EMPTY, "test", new KeywordEsField("test")), Direction.DESC, Missing.FIRST)); sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(singletonList(sortField.order(SortOrder.DESC).missing("_first")), sourceBuilder.sorts()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/AttributeMapTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/AttributeMapTests.java index fa85ca9cbff12..c14f15b7b2f1b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/AttributeMapTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/AttributeMapTests.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Collection; import java.util.LinkedHashMap; @@ -25,7 +25,7 @@ public class AttributeMapTests extends ESTestCase { private static Attribute a(String name) { - return new UnresolvedAttribute(Location.EMPTY, name); + return new UnresolvedAttribute(Source.EMPTY, name); } private static AttributeMap threeMap() { @@ -79,19 +79,19 @@ public void testSingleItemConstructor() { assertThat(m.containsValue("on"), is(false)); } - public void testSubstract() { + public void testSubtract() { AttributeMap m = threeMap(); AttributeMap mo = new AttributeMap<>(m.keySet().iterator().next(), "one"); AttributeMap empty = new AttributeMap<>(); - assertThat(m.substract(empty), is(m)); - assertThat(m.substract(m), is(empty)); - assertThat(mo.substract(m), is(empty)); + assertThat(m.subtract(empty), is(m)); + assertThat(m.subtract(m), is(empty)); + assertThat(mo.subtract(m), is(empty)); - AttributeMap substract = m.substract(mo); + AttributeMap subtract = m.subtract(mo); - assertThat(substract.size(), is(2)); - assertThat(substract.attributeNames(), contains("two", "three")); + assertThat(subtract.size(), is(2)); + assertThat(subtract.attributeNames(), contains("two", "three")); } public void testIntersect() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/LiteralTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/LiteralTests.java index d6bd6ab96b218..2d36cb1e1e56c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/LiteralTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/LiteralTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.LocationTests; +import org.elasticsearch.xpack.sql.tree.SourceTests; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; @@ -51,7 +51,7 @@ static class ValueAndCompatibleTypes { public static Literal randomLiteral() { ValueAndCompatibleTypes gen = randomFrom(GENERATORS); - return new Literal(LocationTests.randomLocation(), gen.valueSupplier.get(), randomFrom(gen.validDataTypes)); + return new Literal(SourceTests.randomSource(), gen.valueSupplier.get(), randomFrom(gen.validDataTypes)); } @Override @@ -61,7 +61,7 @@ protected Literal randomInstance() { @Override protected Literal copy(Literal instance) { - return new Literal(instance.location(), instance.name(), instance.value(), instance.dataType()); + return new Literal(instance.source(), instance.name(), instance.value(), instance.dataType()); } @Override @@ -69,11 +69,11 @@ protected Literal mutate(Literal instance) { List> mutators = new ArrayList<>(); // Changing the location doesn't count as mutation because..... it just doesn't, ok?! // Change the value to another valid value - mutators.add(l -> new Literal(l.location(), randomValueOfTypeOtherThan(l.value(), l.dataType()), l.dataType())); + mutators.add(l -> new Literal(l.source(), randomValueOfTypeOtherThan(l.value(), l.dataType()), l.dataType())); // If we can change the data type then add that as an option as well List validDataTypes = validReplacementDataTypes(instance.value(), instance.dataType()); if (validDataTypes.size() > 1) { - mutators.add(l -> new Literal(l.location(), l.value(), randomValueOtherThan(l.dataType(), () -> randomFrom(validDataTypes)))); + mutators.add(l -> new Literal(l.source(), l.value(), randomValueOtherThan(l.dataType(), () -> randomFrom(validDataTypes)))); } return randomFrom(mutators).apply(instance); } @@ -84,14 +84,14 @@ public void testTransform() { // Replace value Object newValue = randomValueOfTypeOtherThan(literal.value(), literal.dataType()); - assertEquals(new Literal(literal.location(), newValue, literal.dataType()), + assertEquals(new Literal(literal.source(), newValue, literal.dataType()), literal.transformPropertiesOnly(p -> p == literal.value() ? newValue : p, Object.class)); // Replace data type if there are more compatible data types List validDataTypes = validReplacementDataTypes(literal.value(), literal.dataType()); if (validDataTypes.size() > 1) { DataType newDataType = randomValueOtherThan(literal.dataType(), () -> randomFrom(validDataTypes)); - assertEquals(new Literal(literal.location(), literal.value(), newDataType), + assertEquals(new Literal(literal.source(), literal.value(), newDataType), literal.transformPropertiesOnly(p -> newDataType, DataType.class)); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/NullabilityTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/NullabilityTests.java new file mode 100644 index 0000000000000..1b493de6b4c1b --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/NullabilityTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression; + +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.xpack.sql.expression.Nullability.FALSE; +import static org.elasticsearch.xpack.sql.expression.Nullability.TRUE; +import static org.elasticsearch.xpack.sql.expression.Nullability.UNKNOWN; + +public class NullabilityTests extends ESTestCase { + + public void testLogicalAndOfNullabilities() { + assertEquals(FALSE, Nullability.and()); + + assertEquals(TRUE, Nullability.and(TRUE)); + assertEquals(FALSE, Nullability.and(FALSE)); + assertEquals(UNKNOWN, Nullability.and(UNKNOWN)); + + assertEquals(UNKNOWN, Nullability.and(UNKNOWN, UNKNOWN)); + assertEquals(UNKNOWN, Nullability.and(UNKNOWN, TRUE)); + assertEquals(UNKNOWN, Nullability.and(UNKNOWN, FALSE)); + + assertEquals(FALSE, Nullability.and(FALSE, FALSE)); + assertEquals(TRUE, Nullability.and(FALSE, TRUE)); + assertEquals(UNKNOWN, Nullability.and(FALSE, UNKNOWN)); + + assertEquals(TRUE, Nullability.and(TRUE, TRUE)); + assertEquals(TRUE, Nullability.and(TRUE, FALSE)); + assertEquals(UNKNOWN, Nullability.and(TRUE, UNKNOWN)); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java index 2f23ea7cb73f6..a7cae7df4312b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java @@ -10,11 +10,10 @@ import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.List; -import java.util.Locale; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -22,11 +21,12 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; public class QuotingTests extends ESTestCase { private static UnresolvedAttribute from(String s) { - return new UnresolvedAttribute(Location.EMPTY, s); + return new UnresolvedAttribute(Source.EMPTY, s); } public void testBasicString() { @@ -48,7 +48,7 @@ public void testSingleQuoteLiteral() { public void testMultiSingleQuotedLiteral() { String first = "bucket"; String second = "head"; - Expression exp = new SqlParser().createExpression(String.format(Locale.ROOT, "'%s' '%s'", first, second)); + Expression exp = new SqlParser().createExpression(format(null, "'{}' '{}'", first, second)); assertThat(exp, instanceOf(Literal.class)); Literal l = (Literal) exp; assertThat(l.value(), equalTo(first + second)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/TyperResolutionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/TyperResolutionTests.java index 27bfcaf37223b..1340332505444 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/TyperResolutionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/TyperResolutionTests.java @@ -14,7 +14,7 @@ import java.time.Period; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class TyperResolutionTests extends ESTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttributeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttributeTests.java index 51a16c14a8889..4d35b40a98c71 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttributeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttributeTests.java @@ -6,23 +6,22 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; import java.util.Arrays; import java.util.Objects; import java.util.function.Supplier; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; - public class UnresolvedAttributeTests extends AbstractNodeTestCase { public static UnresolvedAttribute randomUnresolvedAttribute() { - Location location = randomLocation(); + Source source = SourceTests.randomSource(); String name = randomAlphaOfLength(5); String qualifier = randomQualifier(); ExpressionId id = randomBoolean() ? null : new ExpressionId(); String unresolvedMessage = randomUnresolvedMessage(); Object resolutionMetadata = new Object(); - return new UnresolvedAttribute(location, name, qualifier, id, unresolvedMessage, resolutionMetadata); + return new UnresolvedAttribute(source, name, qualifier, id, unresolvedMessage, resolutionMetadata); } /** @@ -49,18 +48,18 @@ protected UnresolvedAttribute randomInstance() { @Override protected UnresolvedAttribute mutate(UnresolvedAttribute a) { Supplier option = randomFrom(Arrays.asList( - () -> new UnresolvedAttribute(a.location(), + () -> new UnresolvedAttribute(a.source(), randomValueOtherThan(a.name(), () -> randomAlphaOfLength(5)), a.qualifier(), a.id(), a.unresolvedMessage(), a.resolutionMetadata()), - () -> new UnresolvedAttribute(a.location(), a.name(), + () -> new UnresolvedAttribute(a.source(), a.name(), randomValueOtherThan(a.qualifier(), UnresolvedAttributeTests::randomQualifier), a.id(), a.unresolvedMessage(), a.resolutionMetadata()), - () -> new UnresolvedAttribute(a.location(), a.name(), a.qualifier(), + () -> new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), new ExpressionId(), a.unresolvedMessage(), a.resolutionMetadata()), - () -> new UnresolvedAttribute(a.location(), a.name(), a.qualifier(), a.id(), + () -> new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), a.id(), randomValueOtherThan(a.unresolvedMessage(), () -> randomUnresolvedMessage()), a.resolutionMetadata()), - () -> new UnresolvedAttribute(a.location(), a.name(), + () -> new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), a.id(), a.unresolvedMessage(), new Object()) )); return option.get(); @@ -68,7 +67,7 @@ protected UnresolvedAttribute mutate(UnresolvedAttribute a) { @Override protected UnresolvedAttribute copy(UnresolvedAttribute a) { - return new UnresolvedAttribute(a.location(), a.name(), a.qualifier(), a.id(), a.unresolvedMessage(), a.resolutionMetadata()); + return new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), a.id(), a.unresolvedMessage(), a.resolutionMetadata()); } @Override @@ -76,27 +75,27 @@ public void testTransform() { UnresolvedAttribute a = randomUnresolvedAttribute(); String newName = randomValueOtherThan(a.name(), () -> randomAlphaOfLength(5)); - assertEquals(new UnresolvedAttribute(a.location(), newName, a.qualifier(), a.id(), + assertEquals(new UnresolvedAttribute(a.source(), newName, a.qualifier(), a.id(), a.unresolvedMessage(), a.resolutionMetadata()), a.transformPropertiesOnly(v -> Objects.equals(v, a.name()) ? newName : v, Object.class)); String newQualifier = randomValueOtherThan(a.qualifier(), UnresolvedAttributeTests::randomQualifier); - assertEquals(new UnresolvedAttribute(a.location(), a.name(), newQualifier, a.id(), + assertEquals(new UnresolvedAttribute(a.source(), a.name(), newQualifier, a.id(), a.unresolvedMessage(), a.resolutionMetadata()), a.transformPropertiesOnly(v -> Objects.equals(v, a.qualifier()) ? newQualifier : v, Object.class)); ExpressionId newId = new ExpressionId(); - assertEquals(new UnresolvedAttribute(a.location(), a.name(), a.qualifier(), newId, + assertEquals(new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), newId, a.unresolvedMessage(), a.resolutionMetadata()), a.transformPropertiesOnly(v -> Objects.equals(v, a.id()) ? newId : v, Object.class)); String newMessage = randomValueOtherThan(a.unresolvedMessage(), UnresolvedAttributeTests::randomUnresolvedMessage); - assertEquals(new UnresolvedAttribute(a.location(), a.name(), a.qualifier(), a.id(), + assertEquals(new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), a.id(), newMessage, a.resolutionMetadata()), a.transformPropertiesOnly(v -> Objects.equals(v, a.unresolvedMessage()) ? newMessage : v, Object.class)); Object newMeta = new Object(); - assertEquals(new UnresolvedAttribute(a.location(), a.name(), a.qualifier(), a.id(), + assertEquals(new UnresolvedAttribute(a.source(), a.name(), a.qualifier(), a.id(), a.unresolvedMessage(), newMeta), a.transformPropertiesOnly(v -> Objects.equals(v, a.resolutionMetadata()) ? newMeta : v, Object.class)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java index a23fbff4a99d1..8c5446f5ee0d7 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java @@ -15,9 +15,9 @@ import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.session.Configuration; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.LocationTests; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; import org.elasticsearch.xpack.sql.type.DataType; import java.time.ZoneId; @@ -39,7 +39,7 @@ public void testNoArgFunction() { UnresolvedFunction ur = uf(STANDARD); FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, DummyFunction::new, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); // Distinct isn't supported ParsingException e = expectThrows(ParsingException.class, () -> @@ -54,13 +54,13 @@ public void testNoArgFunction() { public void testUnaryFunction() { UnresolvedFunction ur = uf(STANDARD, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e) -> { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Source l, Expression e) -> { assertSame(e, ur.children().get(0)); return new DummyFunction(l); }, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); assertFalse(def.extractViable()); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); // Distinct isn't supported ParsingException e = expectThrows(ParsingException.class, () -> @@ -81,13 +81,13 @@ public void testUnaryFunction() { public void testUnaryDistinctAwareFunction() { boolean urIsDistinct = randomBoolean(); UnresolvedFunction ur = uf(urIsDistinct ? DISTINCT : STANDARD, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, boolean distinct) -> { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Source l, Expression e, boolean distinct) -> { assertEquals(urIsDistinct, distinct); assertSame(e, ur.children().get(0)); return new DummyFunction(l); }, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); assertFalse(def.extractViable()); // No children aren't supported @@ -106,13 +106,13 @@ public void testDateTimeFunction() { UnresolvedFunction ur = uf(urIsExtract ? EXTRACT : STANDARD, mock(Expression.class)); ZoneId providedTimeZone = randomZone().normalized(); Configuration providedConfiguration = randomConfiguration(providedTimeZone); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, ZoneId zi) -> { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Source l, Expression e, ZoneId zi) -> { assertEquals(providedTimeZone, zi); assertSame(e, ur.children().get(0)); return new DummyFunction(l); }, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); - assertEquals(ur.location(), ur.buildResolved(providedConfiguration, def).location()); + assertEquals(ur.source(), ur.buildResolved(providedConfiguration, def).source()); assertTrue(def.extractViable()); // Distinct isn't supported @@ -133,13 +133,13 @@ public void testDateTimeFunction() { public void testBinaryFunction() { UnresolvedFunction ur = uf(STANDARD, mock(Expression.class), mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression lhs, Expression rhs) -> { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Source l, Expression lhs, Expression rhs) -> { assertSame(lhs, ur.children().get(0)); assertSame(rhs, ur.children().get(1)); return new DummyFunction(l); }, "DUMMY_FUNCTION")); FunctionDefinition def = r.resolveFunction(ur.name()); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); assertFalse(def.extractViable()); // Distinct isn't supported @@ -187,33 +187,33 @@ public void testDuplicateAliasInTwoDifferentFunctionsFromTwoDifferentBatches() { public void testFunctionResolving() { UnresolvedFunction ur = uf(STANDARD, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e) -> { + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Source l, Expression e) -> { assertSame(e, ur.children().get(0)); return new DummyFunction(l); }, "DUMMY_FUNCTION", "DUMMY_FUNC")); // Resolve by primary name FunctionDefinition def = r.resolveFunction(r.resolveAlias("DuMMy_FuncTIon")); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); def = r.resolveFunction(r.resolveAlias("Dummy_Function")); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); def = r.resolveFunction(r.resolveAlias("dummy_function")); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); def = r.resolveFunction(r.resolveAlias("DUMMY_FUNCTION")); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); // Resolve by alias def = r.resolveFunction(r.resolveAlias("DumMy_FunC")); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); def = r.resolveFunction(r.resolveAlias("dummy_func")); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); def = r.resolveFunction(r.resolveAlias("DUMMY_FUNC")); - assertEquals(ur.location(), ur.buildResolved(randomConfiguration(), def).location()); + assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); // Not resolved SqlIllegalArgumentException e = expectThrows(SqlIllegalArgumentException.class, @@ -228,7 +228,7 @@ public void testFunctionResolving() { } private UnresolvedFunction uf(UnresolvedFunction.ResolutionType resolutionType, Expression... children) { - return new UnresolvedFunction(LocationTests.randomLocation(), "DUMMY_FUNCTION", resolutionType, Arrays.asList(children)); + return new UnresolvedFunction(SourceTests.randomSource(), "DUMMY_FUNCTION", resolutionType, Arrays.asList(children)); } private Configuration randomConfiguration() { @@ -254,8 +254,8 @@ private Configuration randomConfiguration(ZoneId providedZoneId) { } public static class DummyFunction extends ScalarFunction { - public DummyFunction(Location location) { - super(location, emptyList()); + public DummyFunction(Source source) { + super(source, emptyList()); } @Override @@ -285,8 +285,8 @@ protected Pipe makePipe() { } public static class DummyFunction2 extends DummyFunction { - public DummyFunction2(Location location) { - super(location); + public DummyFunction2(Source source) { + super(source); } } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java index 791eef8752c42..d723e79274ed9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.sql.type.EsField; import static java.util.Collections.emptyMap; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class NamedExpressionTests extends ESTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunctionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunctionTests.java index 1e58a1a5dc277..a165694f00d91 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunctionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/UnresolvedFunctionTests.java @@ -6,9 +6,8 @@ package org.elasticsearch.xpack.sql.expression.function; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Arrays; import java.util.Collections; @@ -16,23 +15,22 @@ import java.util.Objects; import java.util.function.Supplier; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; -import static org.elasticsearch.xpack.sql.expression.UnresolvedAttributeTests.randomUnresolvedAttribute; - import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.sql.expression.UnresolvedAttributeTests.randomUnresolvedAttribute; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class UnresolvedFunctionTests extends AbstractNodeTestCase { public static UnresolvedFunction randomUnresolvedFunction() { /* Pick an UnresolvedFunction where the name and the * message don't happen to be the same String. If they * matched then transform would get them confused. */ - Location location = randomLocation(); + Source source = randomSource(); String name = randomAlphaOfLength(5); UnresolvedFunction.ResolutionType resolutionType = randomFrom(UnresolvedFunction.ResolutionType.values()); List args = randomFunctionArgs(); boolean analyzed = randomBoolean(); String unresolvedMessage = randomUnresolvedMessage(); - return new UnresolvedFunction(location, name, resolutionType, args, analyzed, unresolvedMessage); + return new UnresolvedFunction(source, name, resolutionType, args, analyzed, unresolvedMessage); } private static List randomFunctionArgs() { @@ -63,17 +61,17 @@ protected UnresolvedFunction randomInstance() { @Override protected UnresolvedFunction mutate(UnresolvedFunction uf) { Supplier option = randomFrom(Arrays.asList( - () -> new UnresolvedFunction(uf.location(), randomValueOtherThan(uf.name(), () -> randomAlphaOfLength(5)), + () -> new UnresolvedFunction(uf.source(), randomValueOtherThan(uf.name(), () -> randomAlphaOfLength(5)), uf.resolutionType(), uf.children(), uf.analyzed(), uf.unresolvedMessage()), - () -> new UnresolvedFunction(uf.location(), uf.name(), + () -> new UnresolvedFunction(uf.source(), uf.name(), randomValueOtherThan(uf.resolutionType(), () -> randomFrom(UnresolvedFunction.ResolutionType.values())), uf.children(), uf.analyzed(), uf.unresolvedMessage()), - () -> new UnresolvedFunction(uf.location(), uf.name(), uf.resolutionType(), + () -> new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionType(), randomValueOtherThan(uf.children(), UnresolvedFunctionTests::randomFunctionArgs), uf.analyzed(), uf.unresolvedMessage()), - () -> new UnresolvedFunction(uf.location(), uf.name(), uf.resolutionType(), uf.children(), + () -> new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionType(), uf.children(), !uf.analyzed(), uf.unresolvedMessage()), - () -> new UnresolvedFunction(uf.location(), uf.name(), uf.resolutionType(), uf.children(), + () -> new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionType(), uf.children(), uf.analyzed(), randomValueOtherThan(uf.unresolvedMessage(), () -> randomAlphaOfLength(5))) )); return option.get(); @@ -81,7 +79,7 @@ protected UnresolvedFunction mutate(UnresolvedFunction uf) { @Override protected UnresolvedFunction copy(UnresolvedFunction uf) { - return new UnresolvedFunction(uf.location(), uf.name(), uf.resolutionType(), uf.children(), + return new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionType(), uf.children(), uf.analyzed(), uf.unresolvedMessage()); } @@ -90,23 +88,23 @@ public void testTransform() { UnresolvedFunction uf = randomUnresolvedFunction(); String newName = randomValueOtherThan(uf.name(), () -> randomAlphaOfLength(5)); - assertEquals(new UnresolvedFunction(uf.location(), newName, uf.resolutionType(), uf.children(), + assertEquals(new UnresolvedFunction(uf.source(), newName, uf.resolutionType(), uf.children(), uf.analyzed(), uf.unresolvedMessage()), uf.transformPropertiesOnly(p -> Objects.equals(p, uf.name()) ? newName : p, Object.class)); UnresolvedFunction.ResolutionType newResolutionType = randomValueOtherThan(uf.resolutionType(), () -> randomFrom(UnresolvedFunction.ResolutionType.values())); - assertEquals(new UnresolvedFunction(uf.location(), uf.name(), newResolutionType, uf.children(), + assertEquals(new UnresolvedFunction(uf.source(), uf.name(), newResolutionType, uf.children(), uf.analyzed(), uf.unresolvedMessage()), uf.transformPropertiesOnly(p -> Objects.equals(p, uf.resolutionType()) ? newResolutionType : p, Object.class)); String newUnresolvedMessage = randomValueOtherThan(uf.unresolvedMessage(), UnresolvedFunctionTests::randomUnresolvedMessage); - assertEquals(new UnresolvedFunction(uf.location(), uf.name(), uf.resolutionType(), uf.children(), + assertEquals(new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionType(), uf.children(), uf.analyzed(), newUnresolvedMessage), uf.transformPropertiesOnly(p -> Objects.equals(p, uf.unresolvedMessage()) ? newUnresolvedMessage : p, Object.class)); - assertEquals(new UnresolvedFunction(uf.location(), uf.name(), uf.resolutionType(), uf.children(), + assertEquals(new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionType(), uf.children(), !uf.analyzed(), uf.unresolvedMessage()), uf.transformPropertiesOnly(p -> Objects.equals(p, uf.analyzed()) ? !uf.analyzed() : p, Object.class)); @@ -117,7 +115,7 @@ public void testReplaceChildren() { UnresolvedFunction uf = randomUnresolvedFunction(); List newChildren = randomValueOtherThan(uf.children(), UnresolvedFunctionTests::randomFunctionArgs); - assertEquals(new UnresolvedFunction(uf.location(), uf.name(), uf.resolutionType(), newChildren, + assertEquals(new UnresolvedFunction(uf.source(), uf.name(), uf.resolutionType(), newChildren, uf.analyzed(), uf.unresolvedMessage()), uf.replaceChildren(newChildren)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/FunctionTestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/FunctionTestUtils.java index c57dff15ca99a..3cc1b6d987dc6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/FunctionTestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/FunctionTestUtils.java @@ -12,7 +12,7 @@ import java.util.BitSet; import java.util.Iterator; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public final class FunctionTestUtils { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessorTests.java index 84ca662bebac6..66ab6146ec0eb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/BinaryMathProcessorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class BinaryMathProcessorTests extends AbstractWireSerializingTestCase { public static BinaryMathProcessor randomProcessor() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipeTests.java index bed19063f0898..e35403eabbafe 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericPipeTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.List; @@ -21,7 +21,7 @@ import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class BinaryStringNumericPipeTests extends AbstractNodeTestCase { @@ -41,22 +41,22 @@ private BinaryStringNumericOperation randomBinaryStringNumericOperation() { public static BinaryStringNumericPipe randomBinaryStringNumericPipe() { List functions = new ArrayList<>(); - functions.add(new Left(randomLocation(), randomStringLiteral(), randomIntLiteral()).makePipe()); - functions.add(new Right(randomLocation(), randomStringLiteral(), randomIntLiteral()).makePipe()); - functions.add(new Repeat(randomLocation(), randomStringLiteral(), randomIntLiteral()).makePipe()); + functions.add(new Left(randomSource(), randomStringLiteral(), randomIntLiteral()).makePipe()); + functions.add(new Right(randomSource(), randomStringLiteral(), randomIntLiteral()).makePipe()); + functions.add(new Repeat(randomSource(), randomStringLiteral(), randomIntLiteral()).makePipe()); return (BinaryStringNumericPipe) randomFrom(functions); } @Override public void testTransform() { - // test transforming only the properties (location, expression, operation), + // test transforming only the properties (source, expression, operation), // skipping the children (the two parameters of the binary function) which are tested separately BinaryStringNumericPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomBinaryStringNumericExpression()); BinaryStringNumericPipe newB = new BinaryStringNumericPipe( - b1.location(), + b1.source(), newExpression, b1.left(), b1.right(), @@ -66,7 +66,7 @@ public void testTransform() { BinaryStringNumericPipe b2 = randomInstance(); BinaryStringNumericOperation newOp = randomValueOtherThan(b2.operation(), () -> randomBinaryStringNumericOperation()); newB = new BinaryStringNumericPipe( - b2.location(), + b2.source(), b2.expression(), b2.left(), b2.right(), @@ -75,7 +75,7 @@ public void testTransform() { b2.transformPropertiesOnly(v -> Objects.equals(v, b2.operation()) ? newOp : v, BinaryStringNumericOperation.class)); BinaryStringNumericPipe b3 = randomInstance(); - Location newLoc = randomValueOtherThan(b3.location(), () -> randomLocation()); + Source newLoc = randomValueOtherThan(b3.source(), () -> randomSource()); newB = new BinaryStringNumericPipe( newLoc, b3.expression(), @@ -83,7 +83,7 @@ public void testTransform() { b3.right(), b3.operation()); assertEquals(newB, - b3.transformPropertiesOnly(v -> Objects.equals(v, b3.location()) ? newLoc : v, Location.class)); + b3.transformPropertiesOnly(v -> Objects.equals(v, b3.source()) ? newLoc : v, Source.class)); } @Override @@ -92,23 +92,23 @@ public void testReplaceChildren() { Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral()))); Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), () -> randomIntLiteral()))); BinaryStringNumericPipe newB = - new BinaryStringNumericPipe(b.location(), b.expression(), b.left(), b.right(), b.operation()); + new BinaryStringNumericPipe(b.source(), b.expression(), b.left(), b.right(), b.operation()); BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), b.right()); transformed = newB.replaceChildren(b.left(), newRight); assertEquals(transformed.left(), b.left()); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), newRight); transformed = newB.replaceChildren(newLeft, newRight); assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), newRight); } @@ -116,17 +116,17 @@ public void testReplaceChildren() { @Override protected BinaryStringNumericPipe mutate(BinaryStringNumericPipe instance) { List> randoms = new ArrayList<>(); - randoms.add(f -> new BinaryStringNumericPipe(f.location(), + randoms.add(f -> new BinaryStringNumericPipe(f.source(), f.expression(), pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), f.right(), f.operation())); - randoms.add(f -> new BinaryStringNumericPipe(f.location(), + randoms.add(f -> new BinaryStringNumericPipe(f.source(), f.expression(), f.left(), pipe(((Expression) randomValueOtherThan(f.right(), () -> randomIntLiteral()))), f.operation())); - randoms.add(f -> new BinaryStringNumericPipe(f.location(), + randoms.add(f -> new BinaryStringNumericPipe(f.source(), f.expression(), pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), pipe(((Expression) randomValueOtherThan(f.right(), () -> randomIntLiteral()))), @@ -137,7 +137,7 @@ protected BinaryStringNumericPipe mutate(BinaryStringNumericPipe instance) { @Override protected BinaryStringNumericPipe copy(BinaryStringNumericPipe instance) { - return new BinaryStringNumericPipe(instance.location(), + return new BinaryStringNumericPipe(instance.source(), instance.expression(), instance.left(), instance.right(), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java index 6712df0c8f65c..42e6c4694255f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class BinaryStringNumericProcessorTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipeTests.java index 65a52b2ee0403..ef878fce39550 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringPipeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.List; @@ -19,7 +19,7 @@ import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class BinaryStringStringPipeTests extends AbstractNodeTestCase { @@ -36,7 +36,7 @@ private Expression randomBinaryStringStringExpression() { public static BinaryStringStringPipe randomBinaryStringStringPipe() { List functions = new ArrayList<>(); functions.add(new Position( - randomLocation(), + randomSource(), randomStringLiteral(), randomStringLiteral() ).makePipe()); @@ -46,12 +46,12 @@ public static BinaryStringStringPipe randomBinaryStringStringPipe() { @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (source, expression), // skipping the children (the two parameters of the binary function) which are tested separately BinaryStringStringPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomBinaryStringStringExpression()); BinaryStringStringPipe newB = new BinaryStringStringPipe( - b1.location(), + b1.source(), newExpression, b1.left(), b1.right(), @@ -59,7 +59,7 @@ public void testTransform() { assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); BinaryStringStringPipe b2 = randomInstance(); - Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); + Source newLoc = randomValueOtherThan(b2.source(), () -> randomSource()); newB = new BinaryStringStringPipe( newLoc, b2.expression(), @@ -67,7 +67,7 @@ public void testTransform() { b2.right(), b2.operation()); assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); } @Override @@ -76,23 +76,23 @@ public void testReplaceChildren() { Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral()))); Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), () -> randomStringLiteral()))); BinaryStringStringPipe newB = - new BinaryStringStringPipe(b.location(), b.expression(), b.left(), b.right(), b.operation()); + new BinaryStringStringPipe(b.source(), b.expression(), b.left(), b.right(), b.operation()); BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), b.right()); transformed = newB.replaceChildren(b.left(), newRight); assertEquals(transformed.left(), b.left()); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), newRight); transformed = newB.replaceChildren(newLeft, newRight); assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), newRight); } @@ -100,17 +100,17 @@ public void testReplaceChildren() { @Override protected BinaryStringStringPipe mutate(BinaryStringStringPipe instance) { List> randoms = new ArrayList<>(); - randoms.add(f -> new BinaryStringStringPipe(f.location(), + randoms.add(f -> new BinaryStringStringPipe(f.source(), f.expression(), pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), f.right(), f.operation())); - randoms.add(f -> new BinaryStringStringPipe(f.location(), + randoms.add(f -> new BinaryStringStringPipe(f.source(), f.expression(), f.left(), pipe(((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral()))), f.operation())); - randoms.add(f -> new BinaryStringStringPipe(f.location(), + randoms.add(f -> new BinaryStringStringPipe(f.source(), f.expression(), pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), pipe(((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral()))), @@ -121,7 +121,7 @@ protected BinaryStringStringPipe mutate(BinaryStringStringPipe instance) { @Override protected BinaryStringStringPipe copy(BinaryStringStringPipe instance) { - return new BinaryStringStringPipe(instance.location(), + return new BinaryStringStringPipe(instance.source(), instance.expression(), instance.left(), instance.right(), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorTests.java index bc5c2f57a7de4..3422d5bddefc6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringStringProcessorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; public class BinaryStringStringProcessorTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipeTests.java index b5e18f9f9f841..931fd11b0340b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.List; @@ -19,7 +19,7 @@ import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class ConcatFunctionPipeTests extends AbstractNodeTestCase { @@ -34,7 +34,7 @@ private Expression randomConcatFunctionExpression() { public static ConcatFunctionPipe randomConcatFunctionPipe() { return (ConcatFunctionPipe) new Concat( - randomLocation(), + randomSource(), randomStringLiteral(), randomStringLiteral()) .makePipe(); @@ -42,27 +42,27 @@ public static ConcatFunctionPipe randomConcatFunctionPipe() { @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (source, expression), // skipping the children (the two parameters of the binary function) which are tested separately ConcatFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomConcatFunctionExpression()); ConcatFunctionPipe newB = new ConcatFunctionPipe( - b1.location(), + b1.source(), newExpression, b1.left(), b1.right()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); ConcatFunctionPipe b2 = randomInstance(); - Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); + Source newLoc = randomValueOtherThan(b2.source(), () -> randomSource()); newB = new ConcatFunctionPipe( newLoc, b2.expression(), b2.left(), b2.right()); assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); } @Override @@ -71,23 +71,23 @@ public void testReplaceChildren() { Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), () -> randomStringLiteral()))); Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), () -> randomStringLiteral()))); ConcatFunctionPipe newB = - new ConcatFunctionPipe(b.location(), b.expression(), b.left(), b.right()); + new ConcatFunctionPipe(b.source(), b.expression(), b.left(), b.right()); BinaryPipe transformed = newB.replaceChildren(newLeft, b.right()); assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), b.right()); transformed = newB.replaceChildren(b.left(), newRight); assertEquals(transformed.left(), b.left()); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), newRight); transformed = newB.replaceChildren(newLeft, newRight); assertEquals(transformed.left(), newLeft); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); assertEquals(transformed.expression(), b.expression()); assertEquals(transformed.right(), newRight); } @@ -95,15 +95,15 @@ public void testReplaceChildren() { @Override protected ConcatFunctionPipe mutate(ConcatFunctionPipe instance) { List> randoms = new ArrayList<>(); - randoms.add(f -> new ConcatFunctionPipe(f.location(), + randoms.add(f -> new ConcatFunctionPipe(f.source(), f.expression(), pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), f.right())); - randoms.add(f -> new ConcatFunctionPipe(f.location(), + randoms.add(f -> new ConcatFunctionPipe(f.source(), f.expression(), f.left(), pipe(((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral()))))); - randoms.add(f -> new ConcatFunctionPipe(f.location(), + randoms.add(f -> new ConcatFunctionPipe(f.source(), f.expression(), pipe(((Expression) randomValueOtherThan(f.left(), () -> randomStringLiteral()))), pipe(((Expression) randomValueOtherThan(f.right(), () -> randomStringLiteral()))))); @@ -113,7 +113,7 @@ protected ConcatFunctionPipe mutate(ConcatFunctionPipe instance) { @Override protected ConcatFunctionPipe copy(ConcatFunctionPipe instance) { - return new ConcatFunctionPipe(instance.location(), + return new ConcatFunctionPipe(instance.source(), instance.expression(), instance.left(), instance.right()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java index 6e1bf94e643bd..891428587d19c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class ConcatProcessorTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipeTests.java index c4ee5fd9ad764..fbb8c33061d93 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionPipeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.Combinations; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.BitSet; @@ -21,7 +21,7 @@ import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class InsertFunctionPipeTests extends AbstractNodeTestCase { @@ -35,7 +35,7 @@ private Expression randomInsertFunctionExpression() { } public static InsertFunctionPipe randomInsertFunctionPipe() { - return (InsertFunctionPipe) (new Insert(randomLocation(), + return (InsertFunctionPipe) (new Insert(randomSource(), randomStringLiteral(), randomIntLiteral(), randomIntLiteral(), @@ -45,30 +45,30 @@ public static InsertFunctionPipe randomInsertFunctionPipe() { @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (source, expression), // skipping the children (the two parameters of the binary function) which are tested separately InsertFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomInsertFunctionExpression()); InsertFunctionPipe newB = new InsertFunctionPipe( - b1.location(), - newExpression, b1.source(), + newExpression, + b1.src(), b1.start(), b1.length(), b1.replacement()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); InsertFunctionPipe b2 = randomInstance(); - Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); + Source newLoc = randomValueOtherThan(b2.source(), () -> randomSource()); newB = new InsertFunctionPipe( newLoc, b2.expression(), - b2.source(), + b2.src(), b2.start(), b2.length(), b2.replacement()); assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); } @Override @@ -79,23 +79,23 @@ public void testReplaceChildren() { Pipe newLength = pipe(((Expression) randomValueOtherThan(b.length(), () -> randomIntLiteral()))); Pipe newR = pipe(((Expression) randomValueOtherThan(b.replacement(), () -> randomStringLiteral()))); InsertFunctionPipe newB = - new InsertFunctionPipe(b.location(), b.expression(), b.source(), b.start(), b.length(), b.replacement()); + new InsertFunctionPipe(b.source(), b.expression(), b.src(), b.start(), b.length(), b.replacement()); InsertFunctionPipe transformed = null; // generate all the combinations of possible children modifications and test all of them for(int i = 1; i < 5; i++) { for(BitSet comb : new Combinations(4, i)) { transformed = (InsertFunctionPipe) newB.replaceChildren( - comb.get(0) ? newSource : b.source(), + comb.get(0) ? newSource : b.src(), comb.get(1) ? newStart : b.start(), comb.get(2) ? newLength : b.length(), comb.get(3) ? newR : b.replacement()); - assertEquals(transformed.source(), comb.get(0) ? newSource : b.source()); + assertEquals(transformed.src(), comb.get(0) ? newSource : b.src()); assertEquals(transformed.start(), comb.get(1) ? newStart : b.start()); assertEquals(transformed.length(), comb.get(2) ? newLength : b.length()); assertEquals(transformed.replacement(), comb.get(3) ? newR : b.replacement()); assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); } } } @@ -107,10 +107,10 @@ protected InsertFunctionPipe mutate(InsertFunctionPipe instance) { for(int i = 1; i < 5; i++) { for(BitSet comb : new Combinations(4, i)) { randoms.add(f -> new InsertFunctionPipe( - f.location(), + f.source(), f.expression(), comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral()))) : f.source(), + () -> randomStringLiteral()))) : f.src(), comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.start(), () -> randomIntLiteral()))) : f.start(), comb.get(2) ? pipe(((Expression) randomValueOtherThan(f.length(), @@ -125,11 +125,11 @@ protected InsertFunctionPipe mutate(InsertFunctionPipe instance) { @Override protected InsertFunctionPipe copy(InsertFunctionPipe instance) { - return new InsertFunctionPipe(instance.location(), + return new InsertFunctionPipe(instance.source(), instance.expression(), - instance.source(), + instance.src(), instance.start(), instance.length(), instance.replacement()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java index 1cafe1cfbd989..b9ffdf59e82fd 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; public class InsertProcessorTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipeTests.java index 25b361ab3811f..95c196c732bd0 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionPipeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.Combinations; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.BitSet; @@ -21,7 +21,7 @@ import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class LocateFunctionPipeTests extends AbstractNodeTestCase { @@ -35,7 +35,7 @@ private Expression randomLocateFunctionExpression() { } public static LocateFunctionPipe randomLocateFunctionPipe() { - return (LocateFunctionPipe) (new Locate(randomLocation(), + return (LocateFunctionPipe) (new Locate(randomSource(), randomStringLiteral(), randomStringLiteral(), randomFrom(true, false) ? randomIntLiteral() : null) @@ -44,30 +44,30 @@ public static LocateFunctionPipe randomLocateFunctionPipe() { @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (source, expression), // skipping the children (the two parameters of the binary function) which are tested separately LocateFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomLocateFunctionExpression()); LocateFunctionPipe newB = new LocateFunctionPipe( - b1.location(), + b1.source(), newExpression, b1.pattern(), - b1.source(), + b1.src(), b1.start()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); LocateFunctionPipe b2 = randomInstance(); - Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); + Source newLoc = randomValueOtherThan(b2.source(), () -> randomSource()); newB = new LocateFunctionPipe( newLoc, b2.expression(), b2.pattern(), - b2.source(), + b2.src(), b2.start()); assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); } @Override @@ -78,7 +78,7 @@ public void testReplaceChildren() { Pipe newStart; LocateFunctionPipe newB = new LocateFunctionPipe( - b.location(), b.expression(), b.pattern(), b.source(), b.start()); + b.source(), b.expression(), b.pattern(), b.src(), b.start()); newStart = pipe(((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()))); LocateFunctionPipe transformed = null; @@ -87,14 +87,14 @@ public void testReplaceChildren() { for(BitSet comb : new Combinations(3, i)) { transformed = (LocateFunctionPipe) newB.replaceChildren( comb.get(0) ? newPattern : b.pattern(), - comb.get(1) ? newSource : b.source(), + comb.get(1) ? newSource : b.src(), comb.get(2) ? newStart : b.start()); assertEquals(transformed.pattern(), comb.get(0) ? newPattern : b.pattern()); - assertEquals(transformed.source(), comb.get(1) ? newSource : b.source()); + assertEquals(transformed.src(), comb.get(1) ? newSource : b.src()); assertEquals(transformed.start(), comb.get(2) ? newStart : b.start()); assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); } } } @@ -105,24 +105,24 @@ protected LocateFunctionPipe mutate(LocateFunctionPipe instance) { if (instance.start() == null) { for(int i = 1; i < 3; i++) { for(BitSet comb : new Combinations(2, i)) { - randoms.add(f -> new LocateFunctionPipe(f.location(), + randoms.add(f -> new LocateFunctionPipe(f.source(), f.expression(), comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.pattern(), () -> randomStringLiteral()))) : f.pattern(), - comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral()))) : f.source(), + comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.src(), + () -> randomStringLiteral()))) : f.src(), null)); } } } else { for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { - randoms.add(f -> new LocateFunctionPipe(f.location(), + randoms.add(f -> new LocateFunctionPipe(f.source(), f.expression(), comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.pattern(), () -> randomStringLiteral()))) : f.pattern(), - comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral()))) : f.source(), + comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.src(), + () -> randomStringLiteral()))) : f.src(), comb.get(2) ? pipe(((Expression) randomValueOtherThan(f.start(), () -> randomIntLiteral()))) : f.start())); } @@ -134,10 +134,10 @@ protected LocateFunctionPipe mutate(LocateFunctionPipe instance) { @Override protected LocateFunctionPipe copy(LocateFunctionPipe instance) { - return new LocateFunctionPipe(instance.location(), + return new LocateFunctionPipe(instance.source(), instance.expression(), instance.pattern(), - instance.source(), + instance.src(), instance.start()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java index f000ab813ec95..7a43dd8bf3824 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateProcessorTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; public class LocateProcessorTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipeTests.java index bdc45f50ed216..3c352669a0637 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionPipeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.Combinations; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.BitSet; @@ -20,7 +20,7 @@ import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class ReplaceFunctionPipeTests extends AbstractNodeTestCase { @@ -29,13 +29,13 @@ protected ReplaceFunctionPipe randomInstance() { return randomReplaceFunctionPipe(); } - private Expression randomReplaceFunctionExpression() { + private Expression randomReplaceFunctionExpression() { return randomReplaceFunctionPipe().expression(); } public static ReplaceFunctionPipe randomReplaceFunctionPipe() { - return (ReplaceFunctionPipe) (new Replace(randomLocation(), - randomStringLiteral(), + return (ReplaceFunctionPipe) (new Replace(randomSource(), + randomStringLiteral(), randomStringLiteral(), randomStringLiteral()) .makePipe()); @@ -43,29 +43,29 @@ public static ReplaceFunctionPipe randomReplaceFunctionPipe() { @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (source, expression), // skipping the children (the two parameters of the binary function) which are tested separately ReplaceFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomReplaceFunctionExpression()); ReplaceFunctionPipe newB = new ReplaceFunctionPipe( - b1.location(), + b1.source(), newExpression, - b1.source(), + b1.src(), b1.pattern(), b1.replacement()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); ReplaceFunctionPipe b2 = randomInstance(); - Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); + Source newLoc = randomValueOtherThan(b2.source(), () -> randomSource()); newB = new ReplaceFunctionPipe( - newLoc, + newLoc, b2.expression(), - b2.source(), + b2.src(), b2.pattern(), b2.replacement()); - assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); + assertEquals(newB, + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); } @Override @@ -74,23 +74,23 @@ public void testReplaceChildren() { Pipe newSource = pipe(((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral()))); Pipe newPattern = pipe(((Expression) randomValueOtherThan(b.pattern(), () -> randomStringLiteral()))); Pipe newR = pipe(((Expression) randomValueOtherThan(b.replacement(), () -> randomStringLiteral()))); - ReplaceFunctionPipe newB = - new ReplaceFunctionPipe(b.location(), b.expression(), b.source(), b.pattern(), b.replacement()); + ReplaceFunctionPipe newB = + new ReplaceFunctionPipe(b.source(), b.expression(), b.src(), b.pattern(), b.replacement()); ReplaceFunctionPipe transformed = null; // generate all the combinations of possible children modifications and test all of them for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { transformed = (ReplaceFunctionPipe) newB.replaceChildren( - comb.get(0) ? newSource : b.source(), + comb.get(0) ? newSource : b.src(), comb.get(1) ? newPattern : b.pattern(), comb.get(2) ? newR : b.replacement()); - assertEquals(transformed.source(), comb.get(0) ? newSource : b.source()); + assertEquals(transformed.src(), comb.get(0) ? newSource : b.src()); assertEquals(transformed.pattern(), comb.get(1) ? newPattern : b.pattern()); assertEquals(transformed.replacement(), comb.get(2) ? newR : b.replacement()); assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); } } } @@ -101,10 +101,10 @@ protected ReplaceFunctionPipe mutate(ReplaceFunctionPipe instance) { for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { - randoms.add(f -> new ReplaceFunctionPipe(f.location(), - f.expression(), - comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral()))) : f.source(), + randoms.add(f -> new ReplaceFunctionPipe(f.source(), + f.expression(), + comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.src(), + () -> randomStringLiteral()))) : f.src(), comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.pattern(), () -> randomStringLiteral()))) : f.pattern(), comb.get(2) ? pipe(((Expression) randomValueOtherThan(f.replacement(), @@ -117,10 +117,10 @@ protected ReplaceFunctionPipe mutate(ReplaceFunctionPipe instance) { @Override protected ReplaceFunctionPipe copy(ReplaceFunctionPipe instance) { - return new ReplaceFunctionPipe(instance.location(), + return new ReplaceFunctionPipe(instance.source(), instance.expression(), - instance.source(), + instance.src(), instance.pattern(), instance.replacement()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java index 49a7dd78fee78..66b6af11d1f91 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; public class ReplaceProcessorTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipeTests.java index 3218339484e78..22199fab0b4d8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringFunctionPipeTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.Combinations; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.BitSet; @@ -21,7 +21,7 @@ import static org.elasticsearch.xpack.sql.expression.Expressions.pipe; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomIntLiteral; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class SubstringFunctionPipeTests extends AbstractNodeTestCase { @@ -36,7 +36,7 @@ private Expression randomSubstringFunctionExpression() { } public static SubstringFunctionPipe randomSubstringFunctionPipe() { - return (SubstringFunctionPipe) (new Substring(randomLocation(), + return (SubstringFunctionPipe) (new Substring(randomSource(), randomStringLiteral(), randomIntLiteral(), randomIntLiteral()) @@ -45,28 +45,28 @@ public static SubstringFunctionPipe randomSubstringFunctionPipe() { @Override public void testTransform() { - // test transforming only the properties (location, expression), + // test transforming only the properties (source, expression), // skipping the children (the two parameters of the binary function) which are tested separately SubstringFunctionPipe b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomSubstringFunctionExpression()); SubstringFunctionPipe newB = new SubstringFunctionPipe( - b1.location(), - newExpression, b1.source(), + newExpression, + b1.src(), b1.start(), b1.length()); assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); SubstringFunctionPipe b2 = randomInstance(); - Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); + Source newLoc = randomValueOtherThan(b2.source(), () -> randomSource()); newB = new SubstringFunctionPipe( newLoc, b2.expression(), - b2.source(), + b2.src(), b2.start(), b2.length()); assertEquals(newB, - b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); + b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class)); } @Override @@ -76,21 +76,21 @@ public void testReplaceChildren() { Pipe newStart = pipe(((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral()))); Pipe newLength = pipe(((Expression) randomValueOtherThan(b.length(), () -> randomIntLiteral()))); SubstringFunctionPipe newB = - new SubstringFunctionPipe(b.location(), b.expression(), b.source(), b.start(), b.length()); + new SubstringFunctionPipe(b.source(), b.expression(), b.src(), b.start(), b.length()); SubstringFunctionPipe transformed = null; // generate all the combinations of possible children modifications and test all of them for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { transformed = (SubstringFunctionPipe) newB.replaceChildren( - comb.get(0) ? newSource : b.source(), + comb.get(0) ? newSource : b.src(), comb.get(1) ? newStart : b.start(), comb.get(2) ? newLength : b.length()); - assertEquals(transformed.source(), comb.get(0) ? newSource : b.source()); + assertEquals(transformed.src(), comb.get(0) ? newSource : b.src()); assertEquals(transformed.start(), comb.get(1) ? newStart : b.start()); assertEquals(transformed.length(), comb.get(2) ? newLength : b.length()); assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.location(), b.location()); + assertEquals(transformed.source(), b.source()); } } } @@ -102,10 +102,10 @@ protected SubstringFunctionPipe mutate(SubstringFunctionPipe instance) { for(int i = 1; i < 4; i++) { for(BitSet comb : new Combinations(3, i)) { randoms.add(f -> new SubstringFunctionPipe( - f.location(), + f.source(), f.expression(), - comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral()))) : f.source(), + comb.get(0) ? pipe(((Expression) randomValueOtherThan(f.src(), + () -> randomStringLiteral()))) : f.src(), comb.get(1) ? pipe(((Expression) randomValueOtherThan(f.start(), () -> randomIntLiteral()))) : f.start(), comb.get(2) ? pipe(((Expression) randomValueOtherThan(f.length(), @@ -118,10 +118,10 @@ protected SubstringFunctionPipe mutate(SubstringFunctionPipe instance) { @Override protected SubstringFunctionPipe copy(SubstringFunctionPipe instance) { - return new SubstringFunctionPipe(instance.location(), + return new SubstringFunctionPipe(instance.source(), instance.expression(), - instance.source(), + instance.src(), instance.start(), instance.length()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java index bb22f005fe799..839817fb2103b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/SubstringProcessorTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; public class SubstringProcessorTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInputTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInputTests.java index 6d3bcbbe25ac0..19164d388de60 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInputTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/AttributeInputTests.java @@ -20,9 +20,9 @@ public void testResolveAttributes() { Expression expression = mock(Expression.class); Attribute attribute = mock(Attribute.class); - ReferenceInput expected = new ReferenceInput(expression.location(), expression, column); + ReferenceInput expected = new ReferenceInput(expression.source(), expression, column); - assertEquals(expected, new AttributeInput(expression.location(), expression, attribute).resolveAttributes(a -> { + assertEquals(expected, new AttributeInput(expression.source(), expression, attribute).resolveAttributes(a -> { assertSame(attribute, a); return column; })); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipesTests.java index 991036d2da3ac..a4b8a27f13e3c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/BinaryPipesTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe.AttributeResolver; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.util.List; @@ -86,16 +86,16 @@ public static BinaryPipe randomBinaryPipe() { } public static Pipe randomUnaryPipe() { - return new ConstantInput(Location.EMPTY, Literal.of(Location.EMPTY, randomAlphaOfLength(16)), randomAlphaOfLength(16)); + return new ConstantInput(Source.EMPTY, Literal.of(Source.EMPTY, randomAlphaOfLength(16)), randomAlphaOfLength(16)); } public static final class DummyBinaryPipe extends BinaryPipe { public DummyBinaryPipe(Pipe left, Pipe right) { - this(Location.EMPTY, left, right); + this(Source.EMPTY, left, right); } - public DummyBinaryPipe(Location location, Pipe left, Pipe right) { - super(location, null, left, right); + public DummyBinaryPipe(Source source, Pipe left, Pipe right) { + super(source, null, left, right); } @Override @@ -110,7 +110,7 @@ public Processor asProcessor() { @Override protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new DummyBinaryPipe(location(), left, right); + return new DummyBinaryPipe(source(), left, right); } } @@ -118,11 +118,11 @@ public static class DummyPipe extends Pipe { private final boolean supportedByAggsOnlyQuery; public DummyPipe(boolean supportedByAggsOnlyQuery) { - this(Location.EMPTY, supportedByAggsOnlyQuery); + this(Source.EMPTY, supportedByAggsOnlyQuery); } - public DummyPipe(Location location, boolean supportedByAggsOnlyQuery) { - super(location, null, emptyList()); + public DummyPipe(Source source, boolean supportedByAggsOnlyQuery) { + super(source, null, emptyList()); this.supportedByAggsOnlyQuery = supportedByAggsOnlyQuery; } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipeTests.java index f1c24ae2bd472..9c927aed7f4b9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/gen/pipeline/UnaryPipeTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipesTests.DummyPipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe.AttributeResolver; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import static org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipesTests.tracksScores; @@ -59,6 +59,6 @@ public void collectFields(SqlSourceBuilder sourceBuilder) { } private Pipe newUnaryProcessor(Pipe child) { - return new UnaryPipe(Location.EMPTY, null, child, null); + return new UnaryPipe(Source.EMPTY, null, child, null); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/literal/IntervalsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/literal/IntervalsTests.java index bc84f3837ecca..a54d902c69578 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/literal/IntervalsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/literal/IntervalsTests.java @@ -23,7 +23,7 @@ import static org.elasticsearch.xpack.sql.expression.literal.Intervals.intervalType; import static org.elasticsearch.xpack.sql.expression.literal.Intervals.of; import static org.elasticsearch.xpack.sql.expression.literal.Intervals.parseInterval; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY_TO_HOUR; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY_TO_MINUTE; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtilsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtilsTests.java index 058cfeabb2c34..8341abee22fac 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtilsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/fulltext/FullTextUtilsTests.java @@ -5,10 +5,11 @@ */ package org.elasticsearch.xpack.sql.expression.predicate.fulltext; +import io.netty.util.internal.StringUtil; + import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.expression.predicate.fulltext.FullTextUtils; import org.elasticsearch.xpack.sql.parser.ParsingException; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.Map; @@ -17,8 +18,10 @@ public class FullTextUtilsTests extends ESTestCase { + private final Source source = new Source(1, 1, StringUtil.EMPTY_STRING); + public void testColonDelimited() { - Map options = FullTextUtils.parseSettings("k1=v1;k2=v2", new Location(1, 1)); + Map options = FullTextUtils.parseSettings("k1=v1;k2=v2", source); assertThat(options.size(), is(2)); assertThat(options, hasEntry("k1", "v1")); assertThat(options, hasEntry("k2", "v2")); @@ -26,7 +29,7 @@ public void testColonDelimited() { public void testColonDelimitedErrorString() { ParsingException e = expectThrows(ParsingException.class, - () -> FullTextUtils.parseSettings("k1=v1;k2v2", new Location(1, 1))); + () -> FullTextUtils.parseSettings("k1=v1;k2v2", source)); assertThat(e.getMessage(), is("line 1:3: Cannot parse entry k2v2 in options k1=v1;k2v2")); assertThat(e.getLineNumber(), is(1)); assertThat(e.getColumnNumber(), is(3)); @@ -34,7 +37,7 @@ public void testColonDelimitedErrorString() { public void testColonDelimitedErrorDuplicate() { ParsingException e = expectThrows(ParsingException.class, - () -> FullTextUtils.parseSettings("k1=v1;k1=v2", new Location(1, 1))); + () -> FullTextUtils.parseSettings("k1=v1;k1=v2", source)); assertThat(e.getMessage(), is("line 1:3: Duplicate option k1=v2 detected in options k1=v1;k1=v2")); assertThat(e.getLineNumber(), is(1)); assertThat(e.getColumnNumber(), is(3)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java index ee021f056f829..03e1774c1fc52 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Sub; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class BinaryArithmeticProcessorTests extends AbstractWireSerializingTestCase { public static BinaryArithmeticProcessor randomProcessor() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java index 2618392a067bd..e329ad248108c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java @@ -20,7 +20,7 @@ import java.time.temporal.TemporalAmount; import static org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Arithmetics.mod; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY_TO_HOUR; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_HOUR; @@ -139,7 +139,7 @@ public void testSubYearMonthIntervalToDateIllegal() { TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> sub(r, l)); - assertEquals("Cannot substract a date from an interval; do you mean the reverse?", ex.getMessage()); + assertEquals("Cannot subtract a date from an interval; do you mean the reverse?", ex.getMessage()); } public void testSubNumberFromIntervalIllegal() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java index 5e79877f38c4a..d082fbd371f79 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import static org.elasticsearch.xpack.sql.expression.Literal.NULL; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class BinaryComparisonProcessorTests extends AbstractWireSerializingTestCase { public static BinaryComparisonProcessor randomProcessor() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessorTests.java index 3303072e50078..e5b9bb23052bd 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessorTests.java @@ -15,7 +15,7 @@ import java.util.Arrays; import static org.elasticsearch.xpack.sql.expression.Literal.NULL; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class InProcessorTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InTests.java index c78014afbc471..49fa8bed9dd9c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InTests.java @@ -11,7 +11,7 @@ import java.util.Arrays; import static org.elasticsearch.xpack.sql.expression.Literal.NULL; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; public class InTests extends ESTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 2412342c69c6e..bc5bc0985cb29 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; +import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.Order.OrderDirection; import org.elasticsearch.xpack.sql.expression.function.Function; @@ -83,8 +84,8 @@ import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.command.ShowTables; import org.elasticsearch.xpack.sql.session.EmptyExecutable; -import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.util.CollectionUtils; @@ -99,7 +100,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Literal.NULL; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; import static org.hamcrest.Matchers.contains; @@ -118,8 +119,8 @@ public static class DummyBooleanExpression extends Expression { private final int id; - public DummyBooleanExpression(Location location, int id) { - super(location, Collections.emptyList()); + public DummyBooleanExpression(Source source, int id) { + super(source, Collections.emptyList()); this.id = id; } @@ -134,8 +135,8 @@ public Expression replaceChildren(List newChildren) { } @Override - public boolean nullable() { - return false; + public Nullability nullable() { + return Nullability.FALSE; } @Override @@ -393,6 +394,8 @@ private static Object foldOperator(BinaryOperator b) { return ((Literal) new ConstantFolding().rule(b)).value(); } + // Null folding + public void testNullFoldingIsNull() { FoldNull foldNull = new FoldNull(); assertEquals(true, foldNull.rule(new IsNull(EMPTY, Literal.NULL)).fold()); @@ -422,6 +425,34 @@ public void testGenericNullableExpression() { assertNullLiteral(rule.rule(new RLike(EMPTY, Literal.NULL, "123"))); } + public void testNullFoldingDoesNotApplyOnLogicalExpressions() { + FoldNull rule = new FoldNull(); + + Or or = new Or(EMPTY, Literal.NULL, Literal.TRUE); + assertEquals(or, rule.rule(or)); + or = new Or(EMPTY, Literal.NULL, Literal.NULL); + assertEquals(or, rule.rule(or)); + + And and = new And(EMPTY, Literal.NULL, Literal.TRUE); + assertEquals(and, rule.rule(and)); + and = new And(EMPTY, Literal.NULL, Literal.NULL); + assertEquals(and, rule.rule(and)); + } + + public void testNullFoldingDoesNotApplyOnConditionals() { + FoldNull rule = new FoldNull(); + + Coalesce coalesce = new Coalesce(EMPTY, Arrays.asList(Literal.NULL, ONE, TWO)); + assertEquals(coalesce, rule.rule(coalesce)); + coalesce = new Coalesce(EMPTY, Arrays.asList(Literal.NULL, NULL, NULL)); + assertEquals(coalesce, rule.rule(coalesce)); + + Greatest greatest = new Greatest(EMPTY, Arrays.asList(Literal.NULL, ONE, TWO)); + assertEquals(greatest, rule.rule(greatest)); + greatest = new Greatest(EMPTY, Arrays.asList(Literal.NULL, ONE, TWO)); + assertEquals(greatest, rule.rule(greatest)); + } + public void testSimplifyCoalesceNulls() { Expression e = new SimplifyConditional().rule(new Coalesce(EMPTY, asList(Literal.NULL, Literal.NULL))); assertEquals(Coalesce.class, e.getClass()); @@ -554,18 +585,18 @@ public void testNullEqualsWithNullLiteralBecomesIsNull() { BooleanLiteralsOnTheRight swapLiteralsToRight = new BooleanLiteralsOnTheRight(); BinaryComparisonSimplification bcSimpl = new BinaryComparisonSimplification(); FieldAttribute fa = getFieldAttribute(); - Location loc = new Location(1, 10); + Source source = new Source(1, 10, StringUtils.EMPTY); - Expression e = bcSimpl.rule(swapLiteralsToRight.rule(new NullEquals(loc, fa, NULL))); + Expression e = bcSimpl.rule(swapLiteralsToRight.rule(new NullEquals(source, fa, NULL))); assertEquals(IsNull.class, e.getClass()); IsNull isNull = (IsNull) e; - assertEquals(loc, isNull.location()); + assertEquals(source, isNull.source()); assertEquals("IS_NULL(a)", isNull.name()); - e = bcSimpl.rule(swapLiteralsToRight.rule(new NullEquals(loc, NULL, fa))); + e = bcSimpl.rule(swapLiteralsToRight.rule(new NullEquals(source, NULL, fa))); assertEquals(IsNull.class, e.getClass()); isNull = (IsNull) e; - assertEquals(loc, isNull.location()); + assertEquals(source, isNull.source()); assertEquals("IS_NULL(a)", isNull.name()); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index a85add8b110a0..e56bd4ecd11ac 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -353,4 +353,16 @@ public void testCurrentTimestampInvalidPrecision() { ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("CURRENT_TIMESTAMP(100)")); assertEquals("line 1:20: Precision needs to be between [0-9], received [100]", ex.getMessage()); } + + public void testSourceKeyword() throws Exception { + String s = "CUrrENT_timestamP"; + Expression expr = parser.createExpression(s); + assertEquals(s, expr.sourceText()); + } + + public void testSourceFunction() throws Exception { + String s = "PerCentile_RaNK(fOO, 12 )"; + Expression expr = parser.createExpression(s); + assertEquals(s, expr.sourceText()); + } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java index 9f8cae9e52918..5221f9695699d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java @@ -12,19 +12,19 @@ import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.type.DataType; -import java.util.Locale; - import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; + public class LikeEscapingParsingTests extends ESTestCase { private final SqlParser parser = new SqlParser(); private String error(String pattern) { ParsingException ex = expectThrows(ParsingException.class, - () -> parser.createExpression(String.format(Locale.ROOT, "exp LIKE %s", pattern))); + () -> parser.createExpression(format(null, "exp LIKE {}", pattern))); return ex.getMessage(); } @@ -35,7 +35,7 @@ private LikePattern like(String pattern) { if (parameterized) { exp = parser.createExpression("exp LIKE ?", singletonList(new SqlTypedParamValue(DataType.KEYWORD.esType, pattern))); } else { - exp = parser.createExpression(String.format(Locale.ROOT, "exp LIKE '%s'", pattern)); + exp = parser.createExpression(format(null, "exp LIKE '{}'", pattern)); } assertThat(exp, instanceOf(Like.class)); Like l = (Like) exp; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelationTests.java index 99df12327e159..f8cfd179fd0f2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelationTests.java @@ -7,8 +7,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.plan.TableIdentifier; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.LocationTests; +import org.elasticsearch.xpack.sql.tree.SourceTests; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.ArrayList; import java.util.List; @@ -19,36 +19,36 @@ public class UnresolvedRelationTests extends ESTestCase { public void testEqualsAndHashCode() { - Location location = new Location(between(1, 1000), between(1, 1000)); - TableIdentifier table = new TableIdentifier(location, randomAlphaOfLength(5), randomAlphaOfLength(5)); + Source source = new Source(between(1, 1000), between(1, 1000), randomAlphaOfLength(16)); + TableIdentifier table = new TableIdentifier(source, randomAlphaOfLength(5), randomAlphaOfLength(5)); String alias = randomBoolean() ? null : randomAlphaOfLength(5); String unresolvedMessage = randomAlphaOfLength(5); - UnresolvedRelation relation = new UnresolvedRelation(location, table, alias, unresolvedMessage); + UnresolvedRelation relation = new UnresolvedRelation(source, table, alias, unresolvedMessage); List> mutators = new ArrayList<>(); mutators.add(r -> new UnresolvedRelation( - LocationTests.mutate(r.location()), + SourceTests.mutate(r.source()), r.table(), r.alias(), r.unresolvedMessage())); mutators.add(r -> new UnresolvedRelation( - r.location(), - new TableIdentifier(r.location(), r.table().cluster(), r.table().index() + "m"), + r.source(), + new TableIdentifier(r.source(), r.table().cluster(), r.table().index() + "m"), r.alias(), r.unresolvedMessage())); mutators.add(r -> new UnresolvedRelation( - r.location(), + r.source(), r.table(), randomValueOtherThanMany( a -> Objects.equals(a, r.alias()), () -> randomBoolean() ? null : randomAlphaOfLength(5)), r.unresolvedMessage())); mutators.add(r -> new UnresolvedRelation( - r.location(), + r.source(), r.table(), r.alias(), randomValueOtherThan(r.unresolvedMessage(), () -> randomAlphaOfLength(5)))); checkEqualsAndHashCode(relation, - r -> new UnresolvedRelation(r.location(), r.table(), r.alias(), r.unresolvedMessage()), + r -> new UnresolvedRelation(r.source(), r.table(), r.alias(), r.unresolvedMessage()), r -> randomFrom(mutators).apply(r)); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 559d676f1b95f..8ca1d64bd12fd 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -5,6 +5,11 @@ */ package org.elasticsearch.xpack.sql.planner; +import org.elasticsearch.index.query.ExistsQueryBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.TestUtils; @@ -19,11 +24,14 @@ import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.optimizer.Optimizer; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.Aggregate; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.Project; +import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation; import org.elasticsearch.xpack.sql.querydsl.agg.AggFilter; import org.elasticsearch.xpack.sql.querydsl.query.ExistsQuery; @@ -41,6 +49,7 @@ import org.junit.AfterClass; import org.junit.BeforeClass; +import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; @@ -55,6 +64,8 @@ public class QueryTranslatorTests extends ESTestCase { private static SqlParser parser; private static Analyzer analyzer; + private static Optimizer optimizer; + private static Planner planner; @BeforeClass public static void init() { @@ -64,6 +75,8 @@ public static void init() { EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), getIndexResult, new Verifier(new Metrics())); + optimizer = new Optimizer(); + planner = new Planner(); } @AfterClass @@ -75,6 +88,10 @@ public static void destroy() { private LogicalPlan plan(String sql) { return analyzer.analyze(parser.createStatement(sql), true); } + + private PhysicalPlan optimizeAndPlan(String sql) { + return planner.plan(optimizer.optimize(plan(sql)), true); + } public void testTermEqualityAnalyzer() { LogicalPlan p = plan("SELECT some.string FROM test WHERE some.string = 'value'"); @@ -433,6 +450,7 @@ public void testTranslateNullIf_GroupBy_Painless() { scriptTemplate.toString()); assertEquals("[{v=int}, {v=10}]", scriptTemplate.params().toString()); } + public void testGroupByDateHistogram() { LogicalPlan p = plan("SELECT MAX(int) FROM test GROUP BY HISTOGRAM(int, 1000)"); assertTrue(p instanceof Aggregate); @@ -448,7 +466,6 @@ public void testGroupByDateHistogram() { assertEquals(DataType.INTEGER, field.dataType()); } - public void testGroupByHistogram() { LogicalPlan p = plan("SELECT MAX(int) FROM test GROUP BY HISTOGRAM(date, INTERVAL 2 YEARS)"); assertTrue(p instanceof Aggregate); @@ -463,4 +480,91 @@ public void testGroupByHistogram() { assertEquals(FieldAttribute.class, field.getClass()); assertEquals(DataType.DATE, field.dataType()); } + + public void testCountAndCountDistinctFolding() { + PhysicalPlan p = optimizeAndPlan("SELECT COUNT(DISTINCT keyword) dkey, COUNT(keyword) key FROM test"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec ee = (EsQueryExec) p; + assertEquals(2, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("dkey{a->")); + assertThat(ee.output().get(1).toString(), startsWith("key{a->")); + + Collection subAggs = ee.queryContainer().aggs().asAggBuilder().getSubAggregations(); + assertEquals(2, subAggs.size()); + assertTrue(subAggs.toArray()[0] instanceof CardinalityAggregationBuilder); + assertTrue(subAggs.toArray()[1] instanceof FilterAggregationBuilder); + + CardinalityAggregationBuilder cardinalityKeyword = (CardinalityAggregationBuilder) subAggs.toArray()[0]; + assertEquals("keyword", cardinalityKeyword.field()); + + FilterAggregationBuilder existsKeyword = (FilterAggregationBuilder) subAggs.toArray()[1]; + assertTrue(existsKeyword.getFilter() instanceof ExistsQueryBuilder); + assertEquals("keyword", ((ExistsQueryBuilder) existsKeyword.getFilter()).fieldName()); + + assertThat(ee.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), + endsWith("{\"filter\":{\"exists\":{\"field\":\"keyword\",\"boost\":1.0}}}}}}")); + } + + public void testAllCountVariantsWithHavingGenerateCorrectAggregations() { + PhysicalPlan p = optimizeAndPlan("SELECT AVG(int), COUNT(keyword) ln, COUNT(distinct keyword) dln, COUNT(some.dotted.field) fn," + + "COUNT(distinct some.dotted.field) dfn, COUNT(*) ccc FROM test GROUP BY bool " + + "HAVING dln > 3 AND ln > 32 AND dfn > 1 AND fn > 2 AND ccc > 5 AND AVG(int) > 50000"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec ee = (EsQueryExec) p; + assertEquals(6, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("AVG(int){a->")); + assertThat(ee.output().get(1).toString(), startsWith("ln{a->")); + assertThat(ee.output().get(2).toString(), startsWith("dln{a->")); + assertThat(ee.output().get(3).toString(), startsWith("fn{a->")); + assertThat(ee.output().get(4).toString(), startsWith("dfn{a->")); + assertThat(ee.output().get(5).toString(), startsWith("ccc{a->")); + + Collection subAggs = ee.queryContainer().aggs().asAggBuilder().getSubAggregations(); + assertEquals(5, subAggs.size()); + assertTrue(subAggs.toArray()[0] instanceof AvgAggregationBuilder); + assertTrue(subAggs.toArray()[1] instanceof FilterAggregationBuilder); + assertTrue(subAggs.toArray()[2] instanceof CardinalityAggregationBuilder); + assertTrue(subAggs.toArray()[3] instanceof FilterAggregationBuilder); + assertTrue(subAggs.toArray()[4] instanceof CardinalityAggregationBuilder); + + AvgAggregationBuilder avgInt = (AvgAggregationBuilder) subAggs.toArray()[0]; + assertEquals("int", avgInt.field()); + + FilterAggregationBuilder existsKeyword = (FilterAggregationBuilder) subAggs.toArray()[1]; + assertTrue(existsKeyword.getFilter() instanceof ExistsQueryBuilder); + assertEquals("keyword", ((ExistsQueryBuilder) existsKeyword.getFilter()).fieldName()); + + CardinalityAggregationBuilder cardinalityKeyword = (CardinalityAggregationBuilder) subAggs.toArray()[2]; + assertEquals("keyword", cardinalityKeyword.field()); + + FilterAggregationBuilder existsDottedField = (FilterAggregationBuilder) subAggs.toArray()[3]; + assertTrue(existsDottedField.getFilter() instanceof ExistsQueryBuilder); + assertEquals("some.dotted.field", ((ExistsQueryBuilder) existsDottedField.getFilter()).fieldName()); + + CardinalityAggregationBuilder cardinalityDottedField = (CardinalityAggregationBuilder) subAggs.toArray()[4]; + assertEquals("some.dotted.field", cardinalityDottedField.field()); + + assertThat(ee.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), + endsWith("{\"buckets_path\":{" + + "\"a0\":\"" + cardinalityKeyword.getName() + "\"," + + "\"a1\":\"" + existsKeyword.getName() + "._count\"," + + "\"a2\":\"" + cardinalityDottedField.getName() + "\"," + + "\"a3\":\"" + existsDottedField.getName() + "._count\"," + + "\"a4\":\"_count\"," + + "\"a5\":\"" + avgInt.getName() + "\"}," + + "\"script\":{\"source\":\"" + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.and(" + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.and(" + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.and(" + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.and(" + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.and(" + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(params.a0,params.v0))," + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(params.a1,params.v1))))," + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(params.a2,params.v2))))," + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(params.a3,params.v3))))," + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(params.a4,params.v4))))," + + "InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(params.a5,params.v5))))\"," + + "\"lang\":\"painless\",\"params\":{\"v0\":3,\"v1\":32,\"v2\":1,\"v3\":2,\"v4\":5,\"v5\":50000}}," + + "\"gap_policy\":\"skip\"}}}}}")); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainerTests.java index 84cb62a5ada06..5e675b0be5a72 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainerTests.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.sql.querydsl.container; -import java.util.AbstractMap.SimpleImmutableEntry; - import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.querydsl.query.BoolQuery; @@ -14,51 +12,53 @@ import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery; import org.elasticsearch.xpack.sql.querydsl.query.Query; import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.LocationTests; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; + +import java.util.AbstractMap.SimpleImmutableEntry; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; public class QueryContainerTests extends ESTestCase { - private Location location = LocationTests.randomLocation(); + private Source source = SourceTests.randomSource(); private String path = randomAlphaOfLength(5); private String name = randomAlphaOfLength(5); private String format = DocValueFieldsContext.USE_DEFAULT_FORMAT; private boolean hasDocValues = randomBoolean(); public void testRewriteToContainNestedFieldNoQuery() { - Query expected = new NestedQuery(location, path, singletonMap(name, new SimpleImmutableEntry<>(hasDocValues, format)), - new MatchAll(location)); - assertEquals(expected, QueryContainer.rewriteToContainNestedField(null, location, path, name, format, hasDocValues)); + Query expected = new NestedQuery(source, path, singletonMap(name, new SimpleImmutableEntry<>(hasDocValues, format)), + new MatchAll(source)); + assertEquals(expected, QueryContainer.rewriteToContainNestedField(null, source, path, name, format, hasDocValues)); } public void testRewriteToContainsNestedFieldWhenContainsNestedField() { - Query original = new BoolQuery(location, true, - new NestedQuery(location, path, singletonMap(name, new SimpleImmutableEntry<>(hasDocValues, format)), - new MatchAll(location)), - new RangeQuery(location, randomAlphaOfLength(5), 0, randomBoolean(), 100, randomBoolean())); - assertSame(original, QueryContainer.rewriteToContainNestedField(original, location, path, name, format, randomBoolean())); + Query original = new BoolQuery(source, true, + new NestedQuery(source, path, singletonMap(name, new SimpleImmutableEntry<>(hasDocValues, format)), + new MatchAll(source)), + new RangeQuery(source, randomAlphaOfLength(5), 0, randomBoolean(), 100, randomBoolean())); + assertSame(original, QueryContainer.rewriteToContainNestedField(original, source, path, name, format, randomBoolean())); } public void testRewriteToContainsNestedFieldWhenCanAddNestedField() { - Query buddy = new RangeQuery(location, randomAlphaOfLength(5), 0, randomBoolean(), 100, randomBoolean()); - Query original = new BoolQuery(location, true, - new NestedQuery(location, path, emptyMap(), new MatchAll(location)), + Query buddy = new RangeQuery(source, randomAlphaOfLength(5), 0, randomBoolean(), 100, randomBoolean()); + Query original = new BoolQuery(source, true, + new NestedQuery(source, path, emptyMap(), new MatchAll(source)), buddy); - Query expected = new BoolQuery(location, true, - new NestedQuery(location, path, singletonMap(name, new SimpleImmutableEntry<>(hasDocValues, format)), - new MatchAll(location)), + Query expected = new BoolQuery(source, true, + new NestedQuery(source, path, singletonMap(name, new SimpleImmutableEntry<>(hasDocValues, format)), + new MatchAll(source)), buddy); - assertEquals(expected, QueryContainer.rewriteToContainNestedField(original, location, path, name, format, hasDocValues)); + assertEquals(expected, QueryContainer.rewriteToContainNestedField(original, source, path, name, format, hasDocValues)); } public void testRewriteToContainsNestedFieldWhenDoesNotContainNestedFieldAndCantAdd() { - Query original = new RangeQuery(location, randomAlphaOfLength(5), 0, randomBoolean(), 100, randomBoolean()); - Query expected = new BoolQuery(location, true, + Query original = new RangeQuery(source, randomAlphaOfLength(5), 0, randomBoolean(), 100, randomBoolean()); + Query expected = new BoolQuery(source, true, original, - new NestedQuery(location, path, singletonMap(name, new SimpleImmutableEntry<>(hasDocValues, format)), - new MatchAll(location))); - assertEquals(expected, QueryContainer.rewriteToContainNestedField(original, location, path, name, format, hasDocValues)); + new NestedQuery(source, path, singletonMap(name, new SimpleImmutableEntry<>(hasDocValues, format)), + new MatchAll(source))); + assertEquals(expected, QueryContainer.rewriteToContainNestedField(original, source, path, name, format, hasDocValues)); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/BoolQueryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/BoolQueryTests.java index 5837b069654ac..adc733a29c33b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/BoolQueryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/BoolQueryTests.java @@ -8,20 +8,21 @@ import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.LocationTests; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; +import org.elasticsearch.xpack.sql.util.StringUtils; +import java.util.AbstractMap.SimpleImmutableEntry; import java.util.Arrays; import java.util.List; -import java.util.AbstractMap.SimpleImmutableEntry; import java.util.function.Function; -import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static java.util.Collections.singletonMap; +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; public class BoolQueryTests extends ESTestCase { static BoolQuery randomBoolQuery(int depth) { - return new BoolQuery(LocationTests.randomLocation(), randomBoolean(), + return new BoolQuery(SourceTests.randomSource(), randomBoolean(), NestedQueryTests.randomQuery(depth), NestedQueryTests.randomQuery(depth)); } @@ -30,15 +31,15 @@ public void testEqualsAndHashCode() { } private static BoolQuery copy(BoolQuery query) { - return new BoolQuery(query.location(), query.isAnd(), query.left(), query.right()); + return new BoolQuery(query.source(), query.isAnd(), query.left(), query.right()); } private static BoolQuery mutate(BoolQuery query) { List> options = Arrays.asList( - q -> new BoolQuery(LocationTests.mutate(q.location()), q.isAnd(), q.left(), q.right()), - q -> new BoolQuery(q.location(), false == q.isAnd(), q.left(), q.right()), - q -> new BoolQuery(q.location(), q.isAnd(), randomValueOtherThan(q.left(), () -> NestedQueryTests.randomQuery(5)), q.right()), - q -> new BoolQuery(q.location(), q.isAnd(), q.left(), randomValueOtherThan(q.right(), () -> NestedQueryTests.randomQuery(5)))); + q -> new BoolQuery(SourceTests.mutate(q.source()), q.isAnd(), q.left(), q.right()), + q -> new BoolQuery(q.source(), false == q.isAnd(), q.left(), q.right()), + q -> new BoolQuery(q.source(), q.isAnd(), randomValueOtherThan(q.left(), () -> NestedQueryTests.randomQuery(5)), q.right()), + q -> new BoolQuery(q.source(), q.isAnd(), q.left(), randomValueOtherThan(q.right(), () -> NestedQueryTests.randomQuery(5)))); return randomFrom(options).apply(query); } @@ -80,15 +81,15 @@ public void testEnrichNestedSort() { } private Query boolQueryWithoutNestedChildren() { - return new BoolQuery(LocationTests.randomLocation(), randomBoolean(), - new MatchAll(LocationTests.randomLocation()), new MatchAll(LocationTests.randomLocation())); + return new BoolQuery(SourceTests.randomSource(), randomBoolean(), new MatchAll(SourceTests.randomSource()), + new MatchAll(SourceTests.randomSource())); } private Query boolQueryWithNestedChildren(String path, String field) { - NestedQuery match = new NestedQuery(LocationTests.randomLocation(), path, + NestedQuery match = new NestedQuery(SourceTests.randomSource(), path, singletonMap(field, new SimpleImmutableEntry<>(randomBoolean(), DocValueFieldsContext.USE_DEFAULT_FORMAT)), - new MatchAll(LocationTests.randomLocation())); - Query matchAll = new MatchAll(LocationTests.randomLocation()); + new MatchAll(SourceTests.randomSource())); + Query matchAll = new MatchAll(SourceTests.randomSource()); Query left; Query right; if (randomBoolean()) { @@ -98,13 +99,13 @@ private Query boolQueryWithNestedChildren(String path, String field) { left = matchAll; right = match; } - return new BoolQuery(LocationTests.randomLocation(), randomBoolean(), left, right); + return new BoolQuery(SourceTests.randomSource(), randomBoolean(), left, right); } public void testToString() { assertEquals("BoolQuery@1:2[ExistsQuery@1:2[f1] AND ExistsQuery@1:8[f2]]", - new BoolQuery(new Location(1, 1), true, - new ExistsQuery(new Location(1, 1), "f1"), - new ExistsQuery(new Location(1, 7), "f2")).toString()); + new BoolQuery(new Source(1, 1, StringUtils.EMPTY), true, + new ExistsQuery(new Source(1, 1, StringUtils.EMPTY), "f1"), + new ExistsQuery(new Source(1, 7, StringUtils.EMPTY), "f2")).toString()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/LeafQueryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/LeafQueryTests.java index 52043003ab5f3..05ef987480910 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/LeafQueryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/LeafQueryTests.java @@ -9,15 +9,16 @@ import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.LocationTests; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; +import org.elasticsearch.xpack.sql.util.StringUtils; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; public class LeafQueryTests extends ESTestCase { private static class DummyLeafQuery extends LeafQuery { - private DummyLeafQuery(Location location) { - super(location); + private DummyLeafQuery(Source source) { + super(source); } @Override @@ -32,33 +33,33 @@ protected String innerToString() { } public void testEqualsAndHashCode() { - DummyLeafQuery query = new DummyLeafQuery(LocationTests.randomLocation()); + DummyLeafQuery query = new DummyLeafQuery(SourceTests.randomSource()); checkEqualsAndHashCode(query, LeafQueryTests::copy, LeafQueryTests::mutate); } private static DummyLeafQuery copy(DummyLeafQuery query) { - return new DummyLeafQuery(query.location()); + return new DummyLeafQuery(query.source()); } private static DummyLeafQuery mutate(DummyLeafQuery query) { - return new DummyLeafQuery(LocationTests.mutate(query.location())); + return new DummyLeafQuery(SourceTests.mutate(query.source())); } public void testContainsNestedField() { - Query query = new DummyLeafQuery(LocationTests.randomLocation()); + Query query = new DummyLeafQuery(SourceTests.randomSource()); // Leaf queries don't contain nested fields. assertFalse(query.containsNestedField(randomAlphaOfLength(5), randomAlphaOfLength(5))); } public void testAddNestedField() { - Query query = new DummyLeafQuery(LocationTests.randomLocation()); + Query query = new DummyLeafQuery(SourceTests.randomSource()); // Leaf queries don't contain nested fields. assertSame(query, query.addNestedField(randomAlphaOfLength(5), randomAlphaOfLength(5), DocValueFieldsContext.USE_DEFAULT_FORMAT, randomBoolean())); } public void testEnrichNestedSort() { - Query query = new DummyLeafQuery(LocationTests.randomLocation()); + Query query = new DummyLeafQuery(SourceTests.randomSource()); // Leaf queries don't contain nested fields. NestedSortBuilder sort = new NestedSortBuilder(randomAlphaOfLength(5)); query.enrichNestedSort(sort); @@ -66,6 +67,6 @@ public void testEnrichNestedSort() { } public void testToString() { - assertEquals("DummyLeafQuery@1:2[]", new DummyLeafQuery(new Location(1, 1)).toString()); + assertEquals("DummyLeafQuery@1:2[]", new DummyLeafQuery(new Source(1, 1, StringUtils.EMPTY)).toString()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MatchQueryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MatchQueryTests.java index b8b4b2fb32bc5..1660ea3b8e8cb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MatchQueryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MatchQueryTests.java @@ -10,10 +10,11 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.LocationTests; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; +import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Arrays; import java.util.List; @@ -21,13 +22,13 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.hamcrest.Matchers.equalTo; public class MatchQueryTests extends ESTestCase { static MatchQuery randomMatchQuery() { return new MatchQuery( - LocationTests.randomLocation(), + SourceTests.randomSource(), randomAlphaOfLength(5), randomAlphaOfLength(5)); // TODO add the predicate @@ -38,14 +39,14 @@ public void testEqualsAndHashCode() { } private static MatchQuery copy(MatchQuery query) { - return new MatchQuery(query.location(), query.name(), query.text(), query.predicate()); + return new MatchQuery(query.source(), query.name(), query.text(), query.predicate()); } private static MatchQuery mutate(MatchQuery query) { List> options = Arrays.asList( - q -> new MatchQuery(LocationTests.mutate(q.location()), q.name(), q.text(), q.predicate()), - q -> new MatchQuery(q.location(), randomValueOtherThan(q.name(), () -> randomAlphaOfLength(5)), q.text(), q.predicate()), - q -> new MatchQuery(q.location(), q.name(), randomValueOtherThan(q.text(), () -> randomAlphaOfLength(5)), q.predicate())); + q -> new MatchQuery(SourceTests.mutate(q.source()), q.name(), q.text(), q.predicate()), + q -> new MatchQuery(q.source(), randomValueOtherThan(q.name(), () -> randomAlphaOfLength(5)), q.text(), q.predicate()), + q -> new MatchQuery(q.source(), q.name(), randomValueOtherThan(q.text(), () -> randomAlphaOfLength(5)), q.predicate())); // TODO mutate the predicate return randomFrom(options).apply(query); } @@ -66,18 +67,18 @@ public void testQueryBuilding() { } private static MatchQueryBuilder getBuilder(String options) { - final Location location = new Location(1, 1); + final Source source = new Source(1, 1, StringUtils.EMPTY); FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.KEYWORD, emptyMap(), true)); - final MatchQueryPredicate mmqp = new MatchQueryPredicate(location, fa, "eggplant", options); - final MatchQuery mmq = new MatchQuery(location, "eggplant", "foo", mmqp); + final MatchQueryPredicate mmqp = new MatchQueryPredicate(source, fa, "eggplant", options); + final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", mmqp); return (MatchQueryBuilder) mmq.asBuilder(); } public void testToString() { - final Location location = new Location(1, 1); + final Source source = new Source(1, 1, StringUtils.EMPTY); FieldAttribute fa = new FieldAttribute(EMPTY, "a", new EsField("af", DataType.KEYWORD, emptyMap(), true)); - final MatchQueryPredicate mmqp = new MatchQueryPredicate(location, fa, "eggplant", ""); - final MatchQuery mmq = new MatchQuery(location, "eggplant", "foo", mmqp); + final MatchQueryPredicate mmqp = new MatchQueryPredicate(source, fa, "eggplant", ""); + final MatchQuery mmq = new MatchQuery(source, "eggplant", "foo", mmqp); assertEquals("MatchQuery@1:2[eggplant:foo]", mmq.toString()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQueryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQueryTests.java index 2e26c7c0595c2..fb9c8d805cd18 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQueryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/MultiMatchQueryTests.java @@ -9,7 +9,8 @@ import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.HashMap; import java.util.Map; @@ -34,23 +35,23 @@ public void testQueryBuilding() { } private static MultiMatchQueryBuilder getBuilder(String options) { - final Location location = new Location(1, 1); - final MultiMatchQueryPredicate mmqp = new MultiMatchQueryPredicate(location, "foo,bar", "eggplant", options); + final Source source = new Source(1, 1, StringUtils.EMPTY); + final MultiMatchQueryPredicate mmqp = new MultiMatchQueryPredicate(source, "foo,bar", "eggplant", options); final Map fields = new HashMap<>(); fields.put("foo", 1.0f); fields.put("bar", 1.0f); - final MultiMatchQuery mmq = new MultiMatchQuery(location, "eggplant", fields, mmqp); + final MultiMatchQuery mmq = new MultiMatchQuery(source, "eggplant", fields, mmqp); return (MultiMatchQueryBuilder) mmq.asBuilder(); } public void testToString() { - final Location location = new Location(1, 1); - final MultiMatchQueryPredicate mmqp = new MultiMatchQueryPredicate(location, "foo,bar", "eggplant", ""); + final Source source = new Source(1, 1, StringUtils.EMPTY); + final MultiMatchQueryPredicate mmqp = new MultiMatchQueryPredicate(source, "foo,bar", "eggplant", ""); // Use a TreeMap so we get the fields in a predictable order. final Map fields = new TreeMap<>(); fields.put("foo", 1.0f); fields.put("bar", 1.0f); - final MultiMatchQuery mmq = new MultiMatchQuery(location, "eggplant", fields, mmqp); + final MultiMatchQuery mmq = new MultiMatchQuery(source, "eggplant", fields, mmqp); assertEquals("MultiMatchQuery@1:2[{bar=1.0, foo=1.0}:eggplant]", mmq.toString()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQueryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQueryTests.java index 3e22895494968..5fe69760a694d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQueryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/NestedQueryTests.java @@ -9,8 +9,9 @@ import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.LocationTests; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; +import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.ArrayList; @@ -21,9 +22,9 @@ import java.util.function.Function; import java.util.function.Supplier; +import static java.util.Collections.singletonMap; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.hamcrest.Matchers.hasEntry; -import static java.util.Collections.singletonMap; public class NestedQueryTests extends ESTestCase { static Query randomQuery(int depth) { @@ -37,7 +38,7 @@ static Query randomQuery(int depth) { } static NestedQuery randomNestedQuery(int depth) { - return new NestedQuery(LocationTests.randomLocation(), randomAlphaOfLength(5), randomFields(), randomQuery(depth)); + return new NestedQuery(SourceTests.randomSource(), randomAlphaOfLength(5), randomFields(), randomQuery(depth)); } private static Map> randomFields() { @@ -54,15 +55,15 @@ public void testEqualsAndHashCode() { } private static NestedQuery copy(NestedQuery query) { - return new NestedQuery(query.location(), query.path(), query.fields(), query.child()); + return new NestedQuery(query.source(), query.path(), query.fields(), query.child()); } private static NestedQuery mutate(NestedQuery query) { List> options = Arrays.asList( - q -> new NestedQuery(LocationTests.mutate(q.location()), q.path(), q.fields(), q.child()), - q -> new NestedQuery(q.location(), randomValueOtherThan(q.path(), () -> randomAlphaOfLength(5)), q.fields(), q.child()), - q -> new NestedQuery(q.location(), q.path(), randomValueOtherThan(q.fields(), NestedQueryTests::randomFields), q.child()), - q -> new NestedQuery(q.location(), q.path(), q.fields(), randomValueOtherThan(q.child(), () -> randomQuery(5)))); + q -> new NestedQuery(SourceTests.mutate(q.source()), q.path(), q.fields(), q.child()), + q -> new NestedQuery(q.source(), randomValueOtherThan(q.path(), () -> randomAlphaOfLength(5)), q.fields(), q.child()), + q -> new NestedQuery(q.source(), q.path(), randomValueOtherThan(q.fields(), NestedQueryTests::randomFields), q.child()), + q -> new NestedQuery(q.source(), q.path(), q.fields(), randomValueOtherThan(q.child(), () -> randomQuery(5)))); return randomFrom(options).apply(query); } @@ -123,7 +124,7 @@ public void testEnrichNestedSort() { q.enrichNestedSort(sort); // But enriching using another query is not - NestedQuery other = new NestedQuery(LocationTests.randomLocation(), q.path(), q.fields(), + NestedQuery other = new NestedQuery(SourceTests.randomSource(), q.path(), q.fields(), randomValueOtherThan(q.child(), () -> randomQuery(0))); Exception e = expectThrows(SqlIllegalArgumentException.class, () -> other.enrichNestedSort(sort)); assertEquals("nested query should have been grouped in one place", e.getMessage()); @@ -131,9 +132,9 @@ public void testEnrichNestedSort() { } public void testToString() { - NestedQuery q = new NestedQuery(new Location(1, 1), "a.b", + NestedQuery q = new NestedQuery(new Source(1, 1, StringUtils.EMPTY), "a.b", singletonMap("f", new SimpleImmutableEntry<>(true, DocValueFieldsContext.USE_DEFAULT_FORMAT)), - new MatchAll(new Location(1, 1))); + new MatchAll(new Source(1, 1, StringUtils.EMPTY))); assertEquals("NestedQuery@1:2[a.b.{f=true=use_field_mapping}[MatchAll@1:2[]]]", q.toString()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/QueryStringQueryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/QueryStringQueryTests.java index 229a4392ed2e0..89e88b3e9af1f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/QueryStringQueryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/querydsl/query/QueryStringQueryTests.java @@ -6,11 +6,12 @@ package org.elasticsearch.xpack.sql.querydsl.query; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.StringQueryPredicate; -import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Collections; @@ -34,17 +35,17 @@ public void testQueryBuilding() { } private static QueryStringQueryBuilder getBuilder(String options) { - final Location location = new Location(1, 1); - final StringQueryPredicate mmqp = new StringQueryPredicate(location, "eggplant", options); - final QueryStringQuery mmq = new QueryStringQuery(location, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); + final Source source = new Source(1, 1, StringUtils.EMPTY); + final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", options); + final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); return (QueryStringQueryBuilder) mmq.asBuilder(); } public void testToString() { - final Location location = new Location(1, 1); - final StringQueryPredicate mmqp = new StringQueryPredicate(location, "eggplant", ""); - final QueryStringQuery mmq = new QueryStringQuery(location, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); + final Source source = new Source(1, 1, StringUtils.EMPTY); + final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", ""); + final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); assertEquals("QueryStringQuery@1:2[{foo=1.0}:eggplant]", mmq.toString()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/LocationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/LocationTests.java deleted file mode 100644 index b54cb503c8d10..0000000000000 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/LocationTests.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.tree; - -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; -import java.util.List; -import java.util.function.Function; - -import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; - -public class LocationTests extends ESTestCase { - public static Location randomLocation() { - return new Location(between(1, Integer.MAX_VALUE), between(1, Integer.MAX_VALUE)); - } - - public static Location mutate(Location location) { - List> options = Arrays.asList( - l -> new Location( - randomValueOtherThan(l.getLineNumber(), () -> between(1, Integer.MAX_VALUE)), - l.getColumnNumber() - 1), - l -> new Location( - l.getLineNumber(), - randomValueOtherThan(l.getColumnNumber() - 1, () -> between(1, Integer.MAX_VALUE)))); - return randomFrom(options).apply(location); - } - - public void testEqualsAndHashCode() { - checkEqualsAndHashCode(randomLocation(), - l -> new Location(l.getLineNumber(), l.getColumnNumber() - 1), - LocationTests::mutate); - } -} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java index a8145d9f3bf58..e7485157d3b8b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java @@ -252,7 +252,7 @@ private void assertTransformedOrReplacedChildren(T node, B transformed, Construc * Transforming using the way we did above should only change * the one property of the node that we intended to transform. */ - assertEquals(node.location(), transformed.location()); + assertEquals(node.source(), transformed.source()); List op = node.properties(); List tp = transformed.properties(); for (int p = 0; p < op.size(); p++) { @@ -463,7 +463,7 @@ public boolean equals(Object obj) { } } else if (toBuildClass == CurrentDateTime.class) { if (argClass == Expression.class) { - return Literal.of(LocationTests.randomLocation(), randomInt(9)); + return Literal.of(SourceTests.randomSource(), randomInt(9)); } } if (Expression.class == argClass) { @@ -517,9 +517,9 @@ public boolean equals(Object obj) { // Nor strings return randomAlphaOfLength(5); } - if (argClass == Location.class) { + if (argClass == Source.class) { // Location is final and can't be mocked but we have a handy method to generate ones. - return LocationTests.randomLocation(); + return SourceTests.randomSource(); } try { return mock(argClass); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeTests.java index e9d03d31c1b1a..f73597bc0c4f8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeTests.java @@ -7,40 +7,39 @@ import org.elasticsearch.test.ESTestCase; -import static org.elasticsearch.xpack.sql.tree.LocationTests.randomLocation; - import java.util.Arrays; import java.util.List; import java.util.Objects; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource; public class NodeTests extends ESTestCase { public void testToString() { - assertEquals("NoChildren[thing]", new NoChildren(randomLocation(), "thing").toString()); + assertEquals("NoChildren[thing]", new NoChildren(randomSource(), "thing").toString()); { - ChildrenAreAProperty empty = new ChildrenAreAProperty(randomLocation(), emptyList(), "thing"); + ChildrenAreAProperty empty = new ChildrenAreAProperty(randomSource(), emptyList(), "thing"); assertEquals("ChildrenAreAProperty[thing]", empty.toString()); assertEquals("ChildrenAreAProperty[single]\n\\_ChildrenAreAProperty[thing]", - new ChildrenAreAProperty(randomLocation(), singletonList(empty), "single").toString()); + new ChildrenAreAProperty(randomSource(), singletonList(empty), "single").toString()); assertEquals("ChildrenAreAProperty[many]\n" + "|_ChildrenAreAProperty[thing]\n" + "\\_ChildrenAreAProperty[thing]", - new ChildrenAreAProperty(randomLocation(), Arrays.asList(empty, empty), "many").toString()); + new ChildrenAreAProperty(randomSource(), Arrays.asList(empty, empty), "many").toString()); } { - NoChildren empty = new NoChildren(randomLocation(), "thing"); + NoChildren empty = new NoChildren(randomSource(), "thing"); assertEquals("AChildIsAProperty[single]\n" + "\\_NoChildren[thing]", - new AChildIsAProperty(randomLocation(), empty, "single").toString()); + new AChildIsAProperty(randomSource(), empty, "single").toString()); } } public abstract static class Dummy extends Node { private final String thing; - public Dummy(Location location, List children, String thing) { - super(location, children); + public Dummy(Source source, List children, String thing) { + super(source, children); this.thing = thing; } @@ -68,8 +67,8 @@ public int hashCode() { } public static class ChildrenAreAProperty extends Dummy { - public ChildrenAreAProperty(Location location, List children, String thing) { - super(location, children, thing); + public ChildrenAreAProperty(Source source, List children, String thing) { + super(source, children, thing); } @Override @@ -79,13 +78,13 @@ protected NodeInfo info() { @Override public ChildrenAreAProperty replaceChildren(List newChildren) { - return new ChildrenAreAProperty(location(), newChildren, thing()); + return new ChildrenAreAProperty(source(), newChildren, thing()); } } public static class AChildIsAProperty extends Dummy { - public AChildIsAProperty(Location location, Dummy child, String thing) { - super(location, singletonList(child), thing); + public AChildIsAProperty(Source source, Dummy child, String thing) { + super(source, singletonList(child), thing); } @Override @@ -98,7 +97,7 @@ public AChildIsAProperty replaceChildren(List newChildren) { if (newChildren.size() != 1) { throw new IllegalArgumentException("expected [1] child but received [" + newChildren.size() + "]"); } - return new AChildIsAProperty(location(), newChildren.get(0), thing()); + return new AChildIsAProperty(source(), newChildren.get(0), thing()); } public Dummy child() { @@ -107,8 +106,8 @@ public Dummy child() { } public static class NoChildren extends Dummy { - public NoChildren(Location location, String thing) { - super(location, emptyList(), thing); + public NoChildren(Source source, String thing) { + super(source, emptyList(), thing); } @Override diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/SourceTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/SourceTests.java new file mode 100644 index 0000000000000..bd9855377193c --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/SourceTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.tree; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; + +public class SourceTests extends ESTestCase { + public static Source randomSource() { + return new Source(between(1, Integer.MAX_VALUE), between(1, Integer.MAX_VALUE), randomAlphaOfLength(25)); + } + + public static Source mutate(Source source) { + List> options = Arrays.asList( + l -> new Source( + randomValueOtherThan(l.source().getLineNumber(), () -> between(1, Integer.MAX_VALUE)), + l.source().getColumnNumber() - 1, + l.text()), + l -> new Source( + l.source().getLineNumber(), + randomValueOtherThan(l.source().getColumnNumber() - 1, () -> between(1, Integer.MAX_VALUE)), + l.text())); + return randomFrom(options).apply(source); + } + + public void testEqualsAndHashCode() { + checkEqualsAndHashCode(randomSource(), + l -> new Source(l.source().getLineNumber(), l.source().getColumnNumber() - 1, l.text()), + SourceTests::mutate); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index a301c1218c492..4aa3cba3d7f18 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -14,7 +14,7 @@ import java.time.ZonedDateTime; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; -import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.sql.type.DataType.BYTE; import static org.elasticsearch.xpack.sql.type.DataType.DATE; diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 413447dd204ec..57c944788482e 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -46,7 +46,7 @@ import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; -import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HIT_AS_INT_PARAM; +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -139,7 +139,7 @@ private void waitForWatcher() throws Exception { return; } Request searchWatchesRequest = new Request("GET", ".watches/_search"); - searchWatchesRequest.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + searchWatchesRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); searchWatchesRequest.addParameter("size", "1000"); Response response = adminClient().performRequest(searchWatchesRequest); ObjectPath objectPathResponse = ObjectPath.createFromResponse(response); @@ -184,7 +184,7 @@ private void enableMonitoring() throws Exception { () -> "Exception when enabling monitoring"); Map searchParams = new HashMap<>(); searchParams.put("index", ".monitoring-*"); - searchParams.put(TOTAL_HIT_AS_INT_PARAM, "true"); + searchParams.put(TOTAL_HITS_AS_INT_PARAM, "true"); awaitCallApi("search", searchParams, emptyList(), response -> ((Number) response.evaluate("hits.total")).intValue() > 0, () -> "Exception when waiting for monitoring documents to be indexed"); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.resume_follow.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.resume_follow.json index 61bdf82372fc0..61e3b8580fc2c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.resume_follow.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.resume_follow.json @@ -15,7 +15,7 @@ }, "body": { "description" : "The name of the leader index and other optional ccr related parameters", - "required" : true + "required" : false } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.upgrade.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.upgrade.json new file mode 100644 index 0000000000000..b67b125bb692a --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ml.upgrade.json @@ -0,0 +1,21 @@ +{ + "ml.upgrade": { + "documentation": "TODO", + "methods": [ "POST" ], + "url": { + "path": "/_ml/_upgrade", + "paths": [ "/_ml/_upgrade" ], + "params": { + "wait_for_completion": { + "type": "boolean", + "description": "Should this request wait until the operation has completed before returning", + "default": false + } + } + }, + "body": { + "description" : "Upgrade options", + "required" : false + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml index a2914834c8978..742fc00beda74 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml @@ -80,8 +80,7 @@ setup: body: > { "job_id":"a-missing-job", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } --- @@ -105,13 +104,11 @@ setup: body: > { "job_id":"datafeeds-crud-1", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - match: { datafeed_id: "test-datafeed-1" } - match: { job_id: "datafeeds-crud-1" } - match: { indices: ["index-foo"] } - - match: { types: ["type-bar"] } - match: { scroll_size: 1000 } - is_true: query.match_all - match: { chunking_config: { mode: "auto" }} @@ -124,8 +121,7 @@ setup: body: > { "job_id":"datafeeds-crud-1", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - match: { datafeed_id: "test-datafeed-1" } @@ -136,8 +132,7 @@ setup: body: > { "job_id":"datafeeds-crud-2", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } --- @@ -148,8 +143,7 @@ setup: body: > { "job_id":"datafeeds-crud-1", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - match: { datafeed_id: "test-datafeed-1" } @@ -160,8 +154,7 @@ setup: body: > { "job_id":"datafeeds-crud-1", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } --- @@ -174,7 +167,6 @@ setup: { "job_id":"datafeeds-crud-1", "indexes":["index-foo"], - "types":["type-bar"], "query":{"match_all_misspelled":{}} } @@ -187,7 +179,6 @@ setup: { "job_id":"datafeeds-crud-1", "indexes":["index-foo"], - "types":["type-bar"], "scroll_size": 2000, "frequency": "1m", "query_delay": "30s" @@ -206,7 +197,6 @@ setup: - match: { datafeed_id: "test-datafeed-1" } - match: { job_id: "datafeeds-crud-1" } - match: { indices: ["index-*"] } - - match: { types: ["type-bar"] } - match: { scroll_size: 10000 } - match: { frequency: "2m" } - match: { query_delay: "0s" } @@ -220,7 +210,6 @@ setup: { "job_id":"datafeeds-crud-1", "indexes":["index-foo"], - "types":["type-bar"], "scroll_size": 2000 } @@ -234,7 +223,6 @@ setup: - match: { datafeed_id: "test-datafeed-1" } - match: { job_id: "datafeeds-crud-2" } - match: { indices: ["index-foo"] } - - match: { types: ["type-bar"] } --- "Test update datafeed with missing id": @@ -255,7 +243,6 @@ setup: { "job_id":"datafeeds-crud-1", "indexes":["index-foo"], - "types":["type-bar"], "scroll_size": 2000 } @@ -276,8 +263,7 @@ setup: body: > { "job_id":"datafeeds-crud-1", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - do: @@ -286,8 +272,7 @@ setup: body: > { "job_id":"datafeeds-crud-2", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - do: @@ -315,7 +300,6 @@ setup: { "job_id":"datafeeds-crud-1", "indices":["index-foo"], - "types":["type-bar"], "chunking_config": {"mode":"manual","time_span": "1h"} } - match: { datafeed_id: "test-datafeed-1" } @@ -331,7 +315,6 @@ setup: { "job_id":"datafeeds-crud-1", "indices":["index-foo"], - "types":["type-bar"], "aggs": { "histogram_buckets":{ "date_histogram": { @@ -385,8 +368,7 @@ setup: body: > { "job_id":"datafeeds-crud-1", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - match: { datafeed_id: "test-datafeed-1" } @@ -403,8 +385,7 @@ setup: body: > { "job_id":"datafeeds-crud-1", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - match: { datafeed_id: "test-datafeed-1" } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_job_force.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_job_force.yml index b5e3e68c3c5b8..8e12056c27fed 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_job_force.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_job_force.yml @@ -65,8 +65,7 @@ setup: body: > { "job_id":"force-delete-job", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - match: { datafeed_id: force-delete-job-datafeed } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_datafeed_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_datafeed_stats.yml index 0c89c444d00c6..70626a0afd752 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_datafeed_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_datafeed_stats.yml @@ -81,8 +81,7 @@ setup: body: > { "job_id":"get-datafeed-stats-1", - "indexes":["index-1"], - "types":["type-1"] + "indexes":["index-1"] } - do: @@ -93,8 +92,7 @@ setup: body: > { "job_id":"get-datafeed-stats-2", - "indexes":["index-2"], - "types":["type-2"] + "indexes":["index-2"] } --- diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_datafeeds.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_datafeeds.yml index 5595f4d99077b..414ee9acb2254 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_datafeeds.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_datafeeds.yml @@ -45,8 +45,7 @@ setup: body: > { "job_id":"get-datafeed-1", - "indexes":["index-1"], - "types":["type-1"] + "indexes":["index-1"] } - do: @@ -57,8 +56,7 @@ setup: body: > { "job_id":"get-datafeed-2", - "indexes":["index-2"], - "types":["type-2"] + "indexes":["index-2"] } --- diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml index 7a6bec44bc0db..1ab385aee9162 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml @@ -516,8 +516,7 @@ body: > { "job_id":"jobs-crud-datafeed-job", - "indexes":["index-foo"], - "types":["type-bar"] + "indexes":["index-foo"] } - match: { datafeed_id: "jobs-crud-test-datafeed-1" } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml index aea80c69f4988..e29700e423bdd 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml @@ -66,8 +66,7 @@ setup: body: > { "job_id":"jobs-get-stats-datafeed-job", - "indexes":["farequote"], - "types":["response"] + "indexes":["farequote"] } --- @@ -151,7 +150,9 @@ setup: ml.get_job_stats: job_id: _all - match: { count: 2 } + - match: { jobs.0.job_id : job-stats-test } - match: { jobs.0.state: opened } + - match: { jobs.1.job_id : jobs-get-stats-datafeed-job } - match: { jobs.1.state: opened } --- @@ -161,7 +162,9 @@ setup: ml.get_job_stats: job_id: "*" - match: { count: 2 } + - match: { jobs.0.job_id : job-stats-test } - match: { jobs.0.state: opened } + - match: { jobs.1.job_id : jobs-get-stats-datafeed-job } - match: { jobs.1.state: opened } --- @@ -311,6 +314,7 @@ setup: - do: ml.get_job_stats: {} - match: { count: 2 } + - match: { jobs.0.job_id : job-stats-test } - match: { jobs.0.data_counts.processed_record_count: 0 } - match: { jobs.0.data_counts.processed_field_count: 0 } - match: { jobs.0.data_counts.input_field_count: 0 } @@ -318,6 +322,7 @@ setup: - match: { jobs.0.state: closed } - is_false: jobs.0.node - is_false: jobs.0.open_time + - match: { jobs.1.job_id : jobs-get-stats-datafeed-job } - match: { jobs.1.data_counts.processed_record_count: 0 } - match: { jobs.1.data_counts.processed_field_count: 0 } - match: { jobs.1.data_counts.input_field_count: 0 } @@ -325,5 +330,3 @@ setup: - match: { jobs.1.state: closed } - is_false: jobs.1.node - is_false: jobs.1.open_time - - diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_info.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_info.yml index 7d2b05e852873..0013661e6d436 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_info.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_info.yml @@ -1,3 +1,11 @@ +teardown: + + - do: + cluster.put_settings: + body: + persistent: + xpack.ml.max_model_memory_limit: null + --- "Test ml info": - do: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_upgrade.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_upgrade.yml new file mode 100644 index 0000000000000..ee1f9f77f9325 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_upgrade.yml @@ -0,0 +1,70 @@ +setup: + - skip: + features: headers + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + ml.put_job: + job_id: jobs-upgrade-results + body: > + { + "analysis_config" : { + "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] + }, + "data_description" : { + "format":"xcontent", + "time_field":"time" + } + } + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + index: + index: .ml-anomalies-jobs-upgrade-results + type: doc + id: "jobs-upgrade-results_1464739200000_1" + body: + { + "job_id": "jobs-upgrade-results", + "result_type": "bucket", + "timestamp": "2016-06-01T00:00:00Z", + "anomaly_score": 90.0, + "bucket_span":1 + } + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + indices.refresh: + index: .ml-anomalies-jobs-upgrade-results + +--- +"Upgrade results when there is nothing to upgrade": + - do: + ml.upgrade: + wait_for_completion: true + + - match: { acknowledged: true } + + - do: + indices.exists: + index: .ml-anomalies-shared + + - is_true: '' +--- +"Upgrade results when there is nothing to upgrade not waiting for results": + - do: + ml.upgrade: + wait_for_completion: false + + - match: {task: '/.+:\d+/'} + - set: {task: task} + + - do: + tasks.get: + wait_for_completion: true + task_id: $task + - match: {completed: true} + - match: {response.acknowledged: true} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/preview_datafeed.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/preview_datafeed.yml index 2de179b395cf1..23cdb3b55c664 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/preview_datafeed.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/preview_datafeed.yml @@ -94,8 +94,7 @@ setup: body: > { "job_id":"preview-datafeed-job", - "indexes":"airline-data", - "types":"response" + "indexes":"airline-data" } - do: @@ -140,7 +139,6 @@ setup: { "job_id":"aggregation-doc-count-job", "indexes":"airline-data", - "types":"response", "aggregations": { "buckets": { "histogram": { @@ -213,7 +211,6 @@ setup: { "job_id":"aggregation-custom-single-metric-summary-job", "indexes":"airline-data", - "types":"response", "aggregations": { "buckets": { "histogram": { @@ -272,7 +269,6 @@ setup: { "job_id":"aggregation-custom-multi-metric-summary-job", "indexes":"airline-data", - "types":"response", "aggregations": { "buckets": { "histogram": { @@ -358,8 +354,7 @@ setup: body: > { "job_id":"unavailable-job", - "indexes":"unavailable-data", - "types":"response" + "indexes":"unavailable-data" } - do: @@ -391,7 +386,6 @@ setup: { "job_id":"empty-job", "indexes":"airline-data", - "types":"response", "query": { "term": {"airline":"missing"} } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/start_stop_datafeed.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/start_stop_datafeed.yml index 081709332dc20..1dd201007f96f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/start_stop_datafeed.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/start_stop_datafeed.yml @@ -48,8 +48,7 @@ setup: body: > { "job_id":"start-stop-datafeed-job", - "indexes":"airline-data", - "types":"response" + "indexes":"airline-data" } --- @@ -222,8 +221,7 @@ setup: body: > { "job_id":"start-stop-datafeed-job-field-without-mappings", - "indexes":"airline-data", - "types":"response" + "indexes":"airline-data" } - do: @@ -330,8 +328,7 @@ setup: body: > { "job_id":"start-stop-datafeed-job-foo-1", - "indexes":"airline-data", - "types":"response" + "indexes":"airline-data" } - do: @@ -340,8 +337,7 @@ setup: body: > { "job_id":"start-stop-datafeed-job-foo-2", - "indexes":"airline-data", - "types":"response" + "indexes":"airline-data" } - do: @@ -350,8 +346,7 @@ setup: body: > { "job_id":"start-stop-datafeed-job-bar-1", - "indexes":"airline-data", - "types":"response" + "indexes":"airline-data" } - do: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml index 81389ac8524f4..3400b4f83f647 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml @@ -79,7 +79,6 @@ teardown: body: token: $token - - match: { created: true} - match: { invalidated_tokens: 1 } - match: { previously_invalidated_tokens: 0 } - match: { error_count: 0 } @@ -120,7 +119,6 @@ teardown: body: username: "token_user" - - match: { created: true} - match: { invalidated_tokens: 2 } - match: { previously_invalidated_tokens: 0 } - match: { error_count: 0 } @@ -162,7 +160,6 @@ teardown: body: realm_name: "default_native" - - match: { created: true} - match: { invalidated_tokens: 2 } - match: { previously_invalidated_tokens: 0 } - match: { error_count: 0 } diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index 3d764e5db3e36..fe6885b1c9d72 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -48,28 +48,41 @@ dependencies { } // classes are missing, e.g. com.ibm.icu.lang.UCharacter -thirdPartyAudit.excludes = [ - // uses internal java api: sun.misc.Unsafe - 'com.google.common.cache.Striped64', - 'com.google.common.cache.Striped64$1', - 'com.google.common.cache.Striped64$Cell', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - - // pulled in as external dependency to work on java 9 - 'com.sun.activation.registries.LineTokenizer', - 'com.sun.activation.registries.LogSupport', - 'com.sun.activation.registries.MailcapFile', - 'com.sun.activation.registries.MailcapTokenizer', - 'com.sun.activation.registries.MimeTypeEntry', - 'com.sun.activation.registries.MimeTypeFile', - 'javax.activation.MailcapCommandMap', - 'javax.activation.MimetypesFileTypeMap', -] +thirdPartyAudit { + ignoreViolations ( + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1' + ) + + ignoreViolations( + 'com.sun.activation.registries.LineTokenizer', + 'com.sun.activation.registries.LogSupport', + 'com.sun.activation.registries.MailcapFile', + 'com.sun.activation.registries.MailcapTokenizer', + 'com.sun.activation.registries.MimeTypeEntry', + 'com.sun.activation.registries.MimeTypeFile', + 'javax.activation.MailcapCommandMap', + 'javax.activation.MimetypesFileTypeMap' + ) +} // pulled in as external dependency to work on java 9 if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { - thirdPartyAudit.excludes += [ + thirdPartyAudit.ignoreJarHellWithJDK ( + // pulled in as external dependency to work on java 9 + 'com.sun.activation.registries.LineTokenizer', + 'com.sun.activation.registries.LogSupport', + 'com.sun.activation.registries.MailcapFile', + 'com.sun.activation.registries.MailcapTokenizer', + 'com.sun.activation.registries.MimeTypeEntry', + 'com.sun.activation.registries.MimeTypeFile', + 'javax.activation.MailcapCommandMap', + 'javax.activation.MimetypesFileTypeMap', + 'com.sun.activation.registries.MailcapParseException', 'javax.activation.ActivationDataFlavor', 'javax.activation.CommandInfo', @@ -96,7 +109,7 @@ if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_8) { 'javax.activation.SecuritySupport', 'javax.activation.URLDataSource', 'javax.activation.UnsupportedDataTypeException' - ] + ) } run { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 3ea99e5787fe0..7b17d7f9973d1 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -273,7 +273,7 @@ public Collection createComponents(Client client, ClusterService cluster new WatcherIndexTemplateRegistry(clusterService, threadPool, client); // http client - httpClient = new HttpClient(settings, getSslService(), cryptoService); + httpClient = new HttpClient(settings, getSslService(), cryptoService, clusterService); // notification EmailService emailService = new EmailService(settings, cryptoService, clusterService.getClusterSettings()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index bd5907df24310..1974dd055ddc8 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -194,7 +194,7 @@ public void clusterChanged(ClusterChangedEvent event) { // if there is no master node configured in the current state, this node should not try to trigger anything, but consider itself // inactive. the same applies, if there is a cluster block that does not allow writes if (Strings.isNullOrEmpty(event.state().nodes().getMasterNodeId()) || - event.state().getBlocks().hasGlobalBlock(ClusterBlockLevel.WRITE)) { + event.state().getBlocks().hasGlobalBlockWithLevel(ClusterBlockLevel.WRITE)) { configuration = INACTIVE; return; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java index 74fa81c89a397..fca29821bfaa4 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java @@ -82,7 +82,7 @@ public void clusterChanged(ClusterChangedEvent event) { return; } - if (event.state().getBlocks().hasGlobalBlock(ClusterBlockLevel.WRITE)) { + if (event.state().getBlocks().hasGlobalBlockWithLevel(ClusterBlockLevel.WRITE)) { pauseExecution("write level cluster block"); return; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java index a2714e02c6229..10fb8889fae33 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java @@ -8,7 +8,9 @@ import org.apache.http.Header; import org.apache.http.HttpHeaders; import org.apache.http.HttpHost; +import org.apache.http.HttpRequestInterceptor; import org.apache.http.NameValuePair; +import org.apache.http.ProtocolException; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.auth.UsernamePasswordCredentials; @@ -19,6 +21,7 @@ import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.client.methods.HttpRequestWrapper; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.client.utils.URIUtils; import org.apache.http.client.utils.URLEncodedUtils; @@ -31,11 +34,20 @@ import org.apache.http.impl.client.BasicAuthCache; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.DefaultRedirectStrategy; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicNameValuePair; +import org.apache.http.protocol.HttpContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.apache.lucene.util.automaton.MinimizationOperations; +import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -59,6 +71,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; public class HttpClient implements Closeable { @@ -69,20 +82,29 @@ public class HttpClient implements Closeable { private static final int MAX_CONNECTIONS = 500; private static final Logger logger = LogManager.getLogger(HttpClient.class); + private final AtomicReference whitelistAutomaton = new AtomicReference<>(); private final CloseableHttpClient client; private final HttpProxy settingsProxy; private final TimeValue defaultConnectionTimeout; private final TimeValue defaultReadTimeout; private final ByteSizeValue maxResponseSize; private final CryptoService cryptoService; + private final SSLService sslService; - public HttpClient(Settings settings, SSLService sslService, CryptoService cryptoService) { + public HttpClient(Settings settings, SSLService sslService, CryptoService cryptoService, ClusterService clusterService) { this.defaultConnectionTimeout = HttpSettings.CONNECTION_TIMEOUT.get(settings); this.defaultReadTimeout = HttpSettings.READ_TIMEOUT.get(settings); this.maxResponseSize = HttpSettings.MAX_HTTP_RESPONSE_SIZE.get(settings); this.settingsProxy = getProxyFromSettings(settings); this.cryptoService = cryptoService; + this.sslService = sslService; + setWhitelistAutomaton(HttpSettings.HOSTS_WHITELIST.get(settings)); + clusterService.getClusterSettings().addSettingsUpdateConsumer(HttpSettings.HOSTS_WHITELIST, this::setWhitelistAutomaton); + this.client = createHttpClient(); + } + + private CloseableHttpClient createHttpClient() { HttpClientBuilder clientBuilder = HttpClientBuilder.create(); // ssl setup @@ -95,8 +117,48 @@ public HttpClient(Settings settings, SSLService sslService, CryptoService crypto clientBuilder.evictExpiredConnections(); clientBuilder.setMaxConnPerRoute(MAX_CONNECTIONS); clientBuilder.setMaxConnTotal(MAX_CONNECTIONS); + clientBuilder.setRedirectStrategy(new DefaultRedirectStrategy() { + @Override + public boolean isRedirected(org.apache.http.HttpRequest request, org.apache.http.HttpResponse response, + HttpContext context) throws ProtocolException { + boolean isRedirected = super.isRedirected(request, response, context); + if (isRedirected) { + String host = response.getHeaders("Location")[0].getValue(); + if (isWhitelisted(host) == false) { + throw new ElasticsearchException("host [" + host + "] is not whitelisted in setting [" + + HttpSettings.HOSTS_WHITELIST.getKey() + "], will not redirect"); + } + } + + return isRedirected; + } + }); + + clientBuilder.addInterceptorFirst((HttpRequestInterceptor) (request, context) -> { + if (request instanceof HttpRequestWrapper == false) { + throw new ElasticsearchException("unable to check request [{}/{}] for white listing", request, + request.getClass().getName()); + } + + HttpRequestWrapper wrapper = ((HttpRequestWrapper) request); + final String host; + if (wrapper.getTarget() != null) { + host = wrapper.getTarget().toURI(); + } else { + host = wrapper.getOriginal().getRequestLine().getUri(); + } - client = clientBuilder.build(); + if (isWhitelisted(host) == false) { + throw new ElasticsearchException("host [" + host + "] is not whitelisted in setting [" + + HttpSettings.HOSTS_WHITELIST.getKey() + "], will not connect"); + } + }); + + return clientBuilder.build(); + } + + private void setWhitelistAutomaton(List whiteListedHosts) { + whitelistAutomaton.set(createAutomaton(whiteListedHosts)); } public HttpResponse execute(HttpRequest request) throws IOException { @@ -285,6 +347,24 @@ final class HttpMethodWithEntity extends HttpEntityEnclosingRequestBase { public String getMethod() { return methodName; } + } + private boolean isWhitelisted(String host) { + return whitelistAutomaton.get().run(host); + } + + private static final CharacterRunAutomaton MATCH_ALL_AUTOMATON = new CharacterRunAutomaton(Regex.simpleMatchToAutomaton("*")); + // visible for testing + static CharacterRunAutomaton createAutomaton(List whiteListedHosts) { + if (whiteListedHosts.isEmpty()) { + // the default is to accept everything, this should change in the next major version, being 8.0 + // we could emit depreciation warning here, if the whitelist is empty + return MATCH_ALL_AUTOMATON; + } + + Automaton whiteListAutomaton = Regex.simpleMatchToAutomaton(whiteListedHosts.toArray(Strings.EMPTY_ARRAY)); + whiteListAutomaton = MinimizationOperations.minimize(whiteListAutomaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES); + return new CharacterRunAutomaton(whiteListAutomaton); + } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java index 6bcfc9e5e7d5f..3cda915b7f336 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java @@ -35,6 +35,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -154,10 +155,8 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params toX builder.field(Field.PARAMS.getPreferredName(), this.params); } if (headers.isEmpty() == false) { - if (WatcherParams.hideSecrets(toXContentParams) && headers.containsKey("Authorization")) { - Map sanitizedHeaders = new HashMap<>(headers); - sanitizedHeaders.put("Authorization", WatcherXContentParser.REDACTED_PASSWORD); - builder.field(Field.HEADERS.getPreferredName(), sanitizedHeaders); + if (WatcherParams.hideSecrets(toXContentParams)) { + builder.field(Field.HEADERS.getPreferredName(), sanitizeHeaders(headers)); } else { builder.field(Field.HEADERS.getPreferredName(), headers); } @@ -184,6 +183,15 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params toX return builder.endObject(); } + private Map sanitizeHeaders(Map headers) { + if (headers.containsKey("Authorization") == false) { + return headers; + } + Map sanitizedHeaders = new HashMap<>(headers); + sanitizedHeaders.put("Authorization", WatcherXContentParser.REDACTED_PASSWORD); + return sanitizedHeaders; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -220,16 +228,9 @@ public String toString() { sb.append("port=[").append(port).append("], "); sb.append("path=[").append(path).append("], "); if (!headers.isEmpty()) { - sb.append(", headers=["); - boolean first = true; - for (Map.Entry header : headers.entrySet()) { - if (!first) { - sb.append(", "); - } - sb.append("[").append(header.getKey()).append(": ").append(header.getValue()).append("]"); - first = false; - } - sb.append("], "); + sb.append(sanitizeHeaders(headers).entrySet().stream() + .map(header -> header.getKey() + ": " + header.getValue()) + .collect(Collectors.joining(", ", "headers=[", "], "))); } if (auth != null) { sb.append("auth=[").append(BasicAuth.TYPE).append("], "); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpSettings.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpSettings.java index f4f97df1d4fd8..2894d77a28807 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpSettings.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpSettings.java @@ -13,7 +13,9 @@ import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.function.Function; /** * Handles the configuration and parsing of settings for the xpack.http. prefix @@ -34,8 +36,10 @@ public class HttpSettings { private static final String SSL_KEY_PREFIX = "xpack.http.ssl."; static final Setting PROXY_HOST = Setting.simpleString(PROXY_HOST_KEY, Property.NodeScope); - static final Setting PROXY_SCHEME = Setting.simpleString(PROXY_SCHEME_KEY, (v, s) -> Scheme.parse(v), Property.NodeScope); + static final Setting PROXY_SCHEME = Setting.simpleString(PROXY_SCHEME_KEY, Scheme::parse, Property.NodeScope); static final Setting PROXY_PORT = Setting.intSetting(PROXY_PORT_KEY, 0, 0, 0xFFFF, Property.NodeScope); + static final Setting> HOSTS_WHITELIST = Setting.listSetting("xpack.http.whitelist", Collections.singletonList("*"), + Function.identity(), Property.NodeScope, Property.Dynamic); static final Setting MAX_HTTP_RESPONSE_SIZE = Setting.byteSizeSetting("xpack.http.max_response_size", new ByteSizeValue(10, ByteSizeUnit.MB), // default @@ -54,6 +58,7 @@ public static List> getSettings() { settings.add(PROXY_PORT); settings.add(PROXY_SCHEME); settings.add(MAX_HTTP_RESPONSE_SIZE); + settings.add(HOSTS_WHITELIST); return settings; } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java index 511fcd7698e35..e8b59ca9e6310 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookActionTests.java @@ -47,6 +47,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.watcher.common.http.HttpClientTests.mockClusterService; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.containsString; @@ -214,7 +215,8 @@ private WebhookActionFactory webhookFactory(HttpClient client) { public void testThatSelectingProxyWorks() throws Exception { Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); - try (HttpClient httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), null); + try (HttpClient httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), null, + mockClusterService()); MockWebServer proxyServer = new MockWebServer()) { proxyServer.start(); proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody("fullProxiedContent")); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java index 519dbbeee8685..88225efba466a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java @@ -11,6 +11,10 @@ import org.apache.http.client.config.RequestConfig; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -40,6 +44,9 @@ import java.net.SocketTimeoutException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.Locale; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -55,6 +62,8 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class HttpClientTests extends ESTestCase { @@ -65,7 +74,10 @@ public class HttpClientTests extends ESTestCase { @Before public void init() throws Exception { webServer.start(); - httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), null); + ClusterService clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(HttpSettings.getSettings())); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), null, clusterService); } @After @@ -179,7 +191,7 @@ public void testHttps() throws Exception { .setSecureSettings(secureSettings) .build(); } - try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null)) { + try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null, mockClusterService())) { secureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it is only a truststore secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); @@ -220,7 +232,7 @@ public void testHttpsDisableHostnameVerification() throws Exception { } settings = builder.build(); } - try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null)) { + try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null, mockClusterService())) { MockSecureSettings secureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it only defines a truststore secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-no-subjaltname"); @@ -247,7 +259,7 @@ public void testHttpsClientAuth() throws Exception { .build(); TestsSSLService sslService = new TestsSSLService(settings, environment); - try (HttpClient client = new HttpClient(settings, sslService, null)) { + try (HttpClient client = new HttpClient(settings, sslService, null, mockClusterService())) { testSslMockWebserver(client, sslService.sslContext(), true); } } @@ -295,7 +307,7 @@ public void testHttpResponseWithAnyStatusCodeCanReturnBody() throws Exception { @Network public void testHttpsWithoutTruststore() throws Exception { - try (HttpClient client = new HttpClient(Settings.EMPTY, new SSLService(Settings.EMPTY, environment), null)) { + try (HttpClient client = new HttpClient(Settings.EMPTY, new SSLService(Settings.EMPTY, environment), null, mockClusterService())) { // Known server with a valid cert from a commercial CA HttpRequest.Builder request = HttpRequest.builder("www.elastic.co", 443).scheme(Scheme.HTTPS); HttpResponse response = client.execute(request.build()); @@ -319,7 +331,7 @@ public void testThatProxyCanBeConfigured() throws Exception { .method(HttpMethod.GET) .path("/"); - try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null)) { + try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null, mockClusterService())) { HttpResponse response = client.execute(requestBuilder.build()); assertThat(response.status(), equalTo(200)); assertThat(response.body().utf8ToString(), equalTo("fullProxiedContent")); @@ -400,7 +412,7 @@ public void testProxyCanHaveDifferentSchemeThanRequest() throws Exception { .scheme(Scheme.HTTP) .path("/"); - try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null)) { + try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null, mockClusterService())) { HttpResponse response = client.execute(requestBuilder.build()); assertThat(response.status(), equalTo(200)); assertThat(response.body().utf8ToString(), equalTo("fullProxiedContent")); @@ -428,7 +440,7 @@ public void testThatProxyCanBeOverriddenByRequest() throws Exception { .proxy(new HttpProxy("localhost", proxyServer.getPort(), Scheme.HTTP)) .path("/"); - try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null)) { + try (HttpClient client = new HttpClient(settings, new SSLService(settings, environment), null, mockClusterService())) { HttpResponse response = client.execute(requestBuilder.build()); assertThat(response.status(), equalTo(200)); assertThat(response.body().utf8ToString(), equalTo("fullProxiedContent")); @@ -449,7 +461,7 @@ public void testThatProxyConfigurationRequiresHostAndPort() { } IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new HttpClient(settings.build(), new SSLService(settings.build(), environment), null)); + () -> new HttpClient(settings.build(), new SSLService(settings.build(), environment), null, mockClusterService())); assertThat(e.getMessage(), containsString("HTTP proxy requires both settings: [xpack.http.proxy.host] and [xpack.http.proxy.port]")); } @@ -548,7 +560,8 @@ public void testMaxHttpResponseSize() throws Exception { HttpRequest.Builder requestBuilder = HttpRequest.builder("localhost", webServer.getPort()).method(HttpMethod.GET).path("/"); - try (HttpClient client = new HttpClient(settings, new SSLService(environment.settings(), environment), null)) { + try (HttpClient client = new HttpClient(settings, new SSLService(environment.settings(), environment), null, + mockClusterService())) { IOException e = expectThrows(IOException.class, () -> client.execute(requestBuilder.build())); assertThat(e.getMessage(), startsWith("Maximum limit of")); } @@ -617,4 +630,133 @@ public void testThatUrlDoesNotContainQuestionMarkAtTheEnd() throws Exception { assertThat(webServer.requests(), hasSize(1)); assertThat(webServer.requests().get(0).getUri().getRawPath(), is("/foo")); } + + public void testThatWhiteListingWorks() throws Exception { + webServer.enqueue(new MockResponse().setResponseCode(200).setBody("whatever")); + Settings settings = Settings.builder().put(HttpSettings.HOSTS_WHITELIST.getKey(), getWebserverUri()).build(); + + try (HttpClient client = new HttpClient(settings, new SSLService(environment.settings(), environment), null, + mockClusterService())) { + HttpRequest request = HttpRequest.builder(webServer.getHostName(), webServer.getPort()).path("foo").build(); + client.execute(request); + } + } + + public void testThatWhiteListBlocksRequests() throws Exception { + Settings settings = Settings.builder() + .put(HttpSettings.HOSTS_WHITELIST.getKey(), getWebserverUri()) + .build(); + + try (HttpClient client = new HttpClient(settings, new SSLService(environment.settings(), environment), null, + mockClusterService())) { + HttpRequest request = HttpRequest.builder("blocked.domain.org", webServer.getPort()) + .path("foo") + .build(); + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client.execute(request)); + assertThat(e.getMessage(), is("host [http://blocked.domain.org:" + webServer.getPort() + + "] is not whitelisted in setting [xpack.http.whitelist], will not connect")); + } + } + + public void testThatWhiteListBlocksRedirects() throws Exception { + String redirectUrl = "http://blocked.domain.org:" + webServer.getPort() + "/foo"; + webServer.enqueue(new MockResponse().setResponseCode(302).addHeader("Location", redirectUrl)); + HttpMethod method = randomFrom(HttpMethod.GET, HttpMethod.HEAD); + + if (method == HttpMethod.GET) { + webServer.enqueue(new MockResponse().setResponseCode(200).setBody("shouldBeRead")); + } else if (method == HttpMethod.HEAD) { + webServer.enqueue(new MockResponse().setResponseCode(200)); + } + + Settings settings = Settings.builder().put(HttpSettings.HOSTS_WHITELIST.getKey(), getWebserverUri()).build(); + + try (HttpClient client = new HttpClient(settings, new SSLService(environment.settings(), environment), null, + mockClusterService())) { + HttpRequest request = HttpRequest.builder(webServer.getHostName(), webServer.getPort()).path("/") + .method(method) + .build(); + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client.execute(request)); + assertThat(e.getMessage(), is("host [" + redirectUrl + "] is not whitelisted in setting [xpack.http.whitelist], " + + "will not redirect")); + } + } + + public void testThatWhiteListingWorksForRedirects() throws Exception { + int numberOfRedirects = randomIntBetween(1, 10); + for (int i = 0; i < numberOfRedirects; i++) { + String redirectUrl = "http://" + webServer.getHostName() + ":" + webServer.getPort() + "/redirect" + i; + webServer.enqueue(new MockResponse().setResponseCode(302).addHeader("Location", redirectUrl)); + } + webServer.enqueue(new MockResponse().setResponseCode(200).setBody("shouldBeRead")); + + Settings settings = Settings.builder().put(HttpSettings.HOSTS_WHITELIST.getKey(), getWebserverUri() + "*").build(); + + try (HttpClient client = new HttpClient(settings, new SSLService(environment.settings(), environment), null, + mockClusterService())) { + HttpRequest request = HttpRequest.builder(webServer.getHostName(), webServer.getPort()).path("/") + .method(HttpMethod.GET) + .build(); + HttpResponse response = client.execute(request); + + assertThat(webServer.requests(), hasSize(numberOfRedirects + 1)); + assertThat(response.body().utf8ToString(), is("shouldBeRead")); + } + } + + public void testThatWhiteListReloadingWorks() throws Exception { + webServer.enqueue(new MockResponse().setResponseCode(200).setBody("whatever")); + Settings settings = Settings.builder().put(HttpSettings.HOSTS_WHITELIST.getKey(), "example.org").build(); + ClusterService clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(HttpSettings.getSettings())); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + + try (HttpClient client = + new HttpClient(settings, new SSLService(environment.settings(), environment), null, clusterService)) { + + // blacklisted + HttpRequest request = HttpRequest.builder(webServer.getHostName(), webServer.getPort()).path("/") + .method(HttpMethod.GET) + .build(); + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client.execute(request)); + assertThat(e.getMessage(), containsString("is not whitelisted")); + + Settings newSettings = Settings.builder().put(HttpSettings.HOSTS_WHITELIST.getKey(), getWebserverUri()).build(); + clusterSettings.applySettings(newSettings); + + HttpResponse response = client.execute(request); + assertThat(response.status(), is(200)); + } + } + + public void testAutomatonWhitelisting() { + CharacterRunAutomaton automaton = HttpClient.createAutomaton(Arrays.asList("https://example*", "https://bar.com/foo", + "htt*://www.test.org")); + assertThat(automaton.run("https://example.org"), is(true)); + assertThat(automaton.run("https://example.com"), is(true)); + assertThat(automaton.run("https://examples.com"), is(true)); + assertThat(automaton.run("https://example-website.com"), is(true)); + assertThat(automaton.run("https://noexample.com"), is(false)); + assertThat(automaton.run("https://bar.com/foo"), is(true)); + assertThat(automaton.run("https://bar.com/foo2"), is(false)); + assertThat(automaton.run("https://bar.com"), is(false)); + assertThat(automaton.run("https://www.test.org"), is(true)); + assertThat(automaton.run("http://www.test.org"), is(true)); + } + + public void testWhitelistEverythingByDefault() { + CharacterRunAutomaton automaton = HttpClient.createAutomaton(Collections.emptyList()); + assertThat(automaton.run(randomAlphaOfLength(10)), is(true)); + } + + public static ClusterService mockClusterService() { + ClusterService clusterService = mock(ClusterService.class); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(HttpSettings.getSettings())); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + return clusterService; + } + + private String getWebserverUri() { + return String.format(Locale.ROOT, "http://%s:%s", webServer.getHostName(), webServer.getPort()); + } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpConnectionTimeoutTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpConnectionTimeoutTests.java index 21efe5b2b94ea..3451c771e3e60 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpConnectionTimeoutTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpConnectionTimeoutTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.xpack.core.ssl.SSLService; +import static org.elasticsearch.xpack.watcher.common.http.HttpClientTests.mockClusterService; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; @@ -24,7 +25,8 @@ public class HttpConnectionTimeoutTests extends ESTestCase { @Network public void testDefaultTimeout() throws Exception { Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); - HttpClient httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), null); + HttpClient httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), null, + mockClusterService()); HttpRequest request = HttpRequest.builder(UNROUTABLE_IP, 12345) .method(HttpMethod.POST) @@ -49,7 +51,8 @@ public void testDefaultTimeout() throws Exception { public void testDefaultTimeoutCustom() throws Exception { Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); HttpClient httpClient = new HttpClient(Settings.builder() - .put("xpack.http.default_connection_timeout", "5s").build(), new SSLService(environment.settings(), environment), null); + .put("xpack.http.default_connection_timeout", "5s").build(), new SSLService(environment.settings(), environment), null, + mockClusterService()); HttpRequest request = HttpRequest.builder(UNROUTABLE_IP, 12345) .method(HttpMethod.POST) @@ -74,7 +77,8 @@ public void testDefaultTimeoutCustom() throws Exception { public void testTimeoutCustomPerRequest() throws Exception { Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); HttpClient httpClient = new HttpClient(Settings.builder() - .put("xpack.http.default_connection_timeout", "10s").build(), new SSLService(environment.settings(), environment), null); + .put("xpack.http.default_connection_timeout", "10s").build(), new SSLService(environment.settings(), environment), null, + mockClusterService()); HttpRequest request = HttpRequest.builder(UNROUTABLE_IP, 12345) .connectionTimeout(TimeValue.timeValueSeconds(5)) @@ -95,5 +99,4 @@ public void testTimeoutCustomPerRequest() throws Exception { // expected } } - } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpReadTimeoutTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpReadTimeoutTests.java index bc328dc586e9e..e534a2a90757e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpReadTimeoutTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpReadTimeoutTests.java @@ -18,6 +18,7 @@ import java.net.SocketTimeoutException; +import static org.elasticsearch.xpack.watcher.common.http.HttpClientTests.mockClusterService; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; @@ -43,7 +44,8 @@ public void testDefaultTimeout() throws Exception { .path("/") .build(); - try (HttpClient httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), null)) { + try (HttpClient httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), + null, mockClusterService())) { long start = System.nanoTime(); expectThrows(SocketTimeoutException.class, () -> httpClient.execute(request)); @@ -65,7 +67,8 @@ public void testDefaultTimeoutCustom() throws Exception { .build(); try (HttpClient httpClient = new HttpClient(Settings.builder() - .put("xpack.http.default_read_timeout", "3s").build(), new SSLService(environment.settings(), environment), null)) { + .put("xpack.http.default_read_timeout", "3s").build(), new SSLService(environment.settings(), environment), + null, mockClusterService())) { long start = System.nanoTime(); expectThrows(SocketTimeoutException.class, () -> httpClient.execute(request)); @@ -88,7 +91,8 @@ public void testTimeoutCustomPerRequest() throws Exception { .build(); try (HttpClient httpClient = new HttpClient(Settings.builder() - .put("xpack.http.default_read_timeout", "10s").build(), new SSLService(environment.settings(), environment), null)) { + .put("xpack.http.default_read_timeout", "10s").build(), new SSLService(environment.settings(), environment), + null, mockClusterService())) { long start = System.nanoTime(); expectThrows(SocketTimeoutException.class, () -> httpClient.execute(request)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTests.java index 0d1541577a58b..dea6db9aaf4de 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpRequestTests.java @@ -149,6 +149,12 @@ public void testXContentRemovesAuthorization() throws Exception { } } + public void testToStringDoesNotContainAuthorizationheader() { + HttpRequest request = HttpRequest.builder("localhost", 443).setHeader("Authorization", "Bearer Foo").build(); + assertThat(request.toString(), not(containsString("Bearer Foo"))); + assertThat(request.toString(), containsString("Authorization: " + WatcherXContentParser.REDACTED_PASSWORD)); + } + private void assertThatManualBuilderEqualsParsingFromUrl(String url, HttpRequest.Builder builder) throws Exception { XContentBuilder urlContentBuilder = jsonBuilder().startObject().field("url", url).endObject(); XContentParser urlContentParser = createParser(urlContentBuilder); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index 6d2d63d4f4158..b03d75af113af 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.test.integration; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -27,6 +28,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/36782") @ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, maxNumDataNodes = 1, supportsDedicatedMasters = false) public class SingleNodeTests extends AbstractWatcherIntegrationTestCase { diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index bf2d556c4afb6..716289359faa2 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -3,7 +3,6 @@ import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.Version import java.nio.charset.StandardCharsets -import java.nio.file.Paths import java.util.regex.Matcher // Apply the java plugin to this project so the sources can be edited in an IDE @@ -89,6 +88,10 @@ licenseHeaders { forbiddenPatterns { exclude '**/system_key' } + +// tests are pushed down to subprojects +testingConventions.enabled = false + /** * Subdirectories of this project are test rolling upgrades with various * configuration options based on their name. diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index beea8c9a21856..e52a6dd3b4303 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.document.RestGetAction; +import org.elasticsearch.rest.action.document.RestIndexAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; @@ -41,7 +42,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; -import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HIT_AS_INT_PARAM; +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -345,7 +346,7 @@ public void testRollupIDSchemeAfterRestart() throws Exception { client().performRequest(new Request("POST", "id-test-results-rollup/_refresh")); final Request searchRequest = new Request("GET", "id-test-results-rollup/_search"); if (isRunningAgainstOldCluster() == false) { - searchRequest.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + searchRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); } try { Map searchResponse = entityAsMap(client().performRequest(searchRequest)); @@ -365,6 +366,7 @@ public void testRollupIDSchemeAfterRestart() throws Exception { final Request indexRequest = new Request("POST", "/id-test-rollup/doc/2"); indexRequest.setJsonEntity("{\"timestamp\":\"2018-01-02T00:00:01\",\"value\":345}"); + indexRequest.setOptions(expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)); client().performRequest(indexRequest); assertRollUpJob("rollup-id-test"); @@ -387,7 +389,7 @@ public void testRollupIDSchemeAfterRestart() throws Exception { client().performRequest(new Request("POST", "id-test-results-rollup/_refresh")); final Request searchRequest = new Request("GET", "id-test-results-rollup/_search"); if (isRunningAgainstOldCluster() == false) { - searchRequest.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + searchRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); } try { Map searchResponse = entityAsMap(client().performRequest(searchRequest)); @@ -498,7 +500,7 @@ private void assertWatchIndexContentsWork() throws Exception { assertThat(basic, hasEntry(is("password"), anyOf(startsWith("::es_encrypted::"), is("::es_redacted::")))); Request searchRequest = new Request("GET", ".watcher-history*/_search"); if (isRunningAgainstOldCluster() == false) { - searchRequest.addParameter(RestSearchAction.TOTAL_HIT_AS_INT_PARAM, "true"); + searchRequest.addParameter(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, "true"); } Map history = entityAsMap(client().performRequest(searchRequest)); Map hits = (Map) history.get("hits"); diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java index 36857635951e2..e71e116ca6695 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java @@ -69,7 +69,6 @@ private void createTestIndex() throws IOException { client().performRequest(createTestIndex); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/36816") public void testMigration() throws Exception { if (isRunningAgainstOldCluster()) { createTestIndex(); @@ -157,6 +156,12 @@ private void waitForDatafeedToBeAssigned(String datafeedId) throws Exception { @SuppressWarnings("unchecked") private void waitForMigration(List expectedMigratedJobs, List expectedMigratedDatafeeds, List unMigratedJobs, List unMigratedDatafeeds) throws Exception { + + // After v6.6.0 jobs are created in the index so no migration will take place + if (getOldClusterVersion().onOrAfter(Version.V_6_6_0)) { + return; + } + assertBusy(() -> { // wait for the eligible configs to be moved from the clusterstate Request getClusterState = new Request("GET", "/_cluster/state/metadata"); diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java index a7dd57bea89ed..1feb05b9c6468 100644 --- a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java @@ -92,7 +92,7 @@ public void testBigRollup() throws Exception { // index documents for the rollup job final StringBuilder bulk = new StringBuilder(); for (int i = 0; i < numDocs; i++) { - bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n"); + bulk.append("{\"index\":{\"_index\":\"rollup-docs\"}}\n"); ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochSecond(1531221196 + (60*i)), ZoneId.of("UTC")); String date = zdt.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME); bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n"); diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index f59dd6bc3e451..0636f943c6d32 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -76,6 +76,9 @@ forbiddenPatterns { exclude '**/system_key' } +// Tests are pushed down to subprojects +testingConventions.enabled = false + /** * Subdirectories of this project are test rolling upgrades with various * configuration options based on their name. diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/CCRIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/CCRIT.java new file mode 100644 index 0000000000000..f2914c3514d90 --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/CCRIT.java @@ -0,0 +1,282 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.upgrades; + +import org.apache.http.util.EntityUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ObjectPath; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class CCRIT extends AbstractUpgradeTestCase { + + private static final Logger LOGGER = LogManager.getLogger(CCRIT.class); + + private static final Version UPGRADE_FROM_VERSION = + Version.fromString(System.getProperty("tests.upgrade_from_version")); + + private static final boolean SECOND_ROUND = "false".equals(System.getProperty("tests.first_round")); + + @Override + protected boolean preserveClusterSettings() { + return true; + } + + public void testIndexFollowing() throws Exception { + assumeTrue("CCR became available in 6.5, but test relies on a fix that was shipped with 6.6.0", + UPGRADE_FROM_VERSION.onOrAfter(Version.V_6_6_0)); + setupRemoteCluster(); + + final String leaderIndex = "my-leader-index"; + final String followerIndex = "my-follower-index"; + + switch (CLUSTER_TYPE) { + case OLD: + Settings indexSettings = Settings.builder() + .put("index.soft_deletes.enabled", true) + .put("index.number_of_shards", 1) + .build(); + createIndex(leaderIndex, indexSettings); + followIndex(leaderIndex, followerIndex); + index(leaderIndex, "1"); + assertDocumentExists(leaderIndex, "1"); + assertBusy(() -> { + assertFollowerGlobalCheckpoint(followerIndex, 0); + assertDocumentExists(followerIndex, "1"); + }); + break; + case MIXED: + if (SECOND_ROUND == false) { + index(leaderIndex, "2"); + assertDocumentExists(leaderIndex, "1", "2"); + assertBusy(() -> { + assertFollowerGlobalCheckpoint(followerIndex, 1); + assertDocumentExists(followerIndex, "1", "2"); + }); + } else { + index(leaderIndex, "3"); + assertDocumentExists(leaderIndex, "1", "2", "3"); + assertBusy(() -> { + assertFollowerGlobalCheckpoint(followerIndex, 2); + assertDocumentExists(followerIndex, "1", "2", "3"); + }); + } + break; + case UPGRADED: + index(leaderIndex, "4"); + assertDocumentExists(leaderIndex, "1", "2", "3", "4"); + assertBusy(() -> { + assertFollowerGlobalCheckpoint(followerIndex, 3); + assertDocumentExists(followerIndex, "1", "2", "3", "4"); + }); + stopIndexFollowing(followerIndex); + break; + default: + throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); + } + } + + public void testAutoFollowing() throws Exception { + assumeTrue("CCR became available in 6.5, but test relies on a fix that was shipped with 6.6.0", + UPGRADE_FROM_VERSION.onOrAfter(Version.V_6_6_0)); + setupRemoteCluster(); + + final Settings indexSettings = Settings.builder() + .put("index.soft_deletes.enabled", true) + .put("index.number_of_shards", 1) + .build(); + + String leaderIndex1 = "logs-20200101"; + String leaderIndex2 = "logs-20200102"; + String leaderIndex3 = "logs-20200103"; + + switch (CLUSTER_TYPE) { + case OLD: + putAutoFollowPattern("test_pattern", "logs-*"); + createIndex(leaderIndex1, indexSettings); + index(leaderIndex1, "1"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex1; + assertThat(getNumberOfSuccessfulFollowedIndices(), equalTo(1)); + assertFollowerGlobalCheckpoint(followerIndex, 0); + assertDocumentExists(followerIndex, "1"); + }); + break; + case MIXED: + if (SECOND_ROUND == false) { + index(leaderIndex1, "2"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex1; + assertFollowerGlobalCheckpoint(followerIndex, 1); + assertDocumentExists(followerIndex, "2"); + }); + // Auto follow stats are kept in-memory on master elected node + // and if this node get updated then auto follow stats are reset + int previousNumberOfSuccessfulFollowedIndices = getNumberOfSuccessfulFollowedIndices(); + createIndex(leaderIndex2, indexSettings); + index(leaderIndex2, "1"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex2; + assertThat(getNumberOfSuccessfulFollowedIndices(), equalTo(previousNumberOfSuccessfulFollowedIndices + 1)); + assertFollowerGlobalCheckpoint(followerIndex, 0); + assertDocumentExists(followerIndex, "1"); + }); + } else { + index(leaderIndex1, "3"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex1; + assertFollowerGlobalCheckpoint(followerIndex, 2); + assertDocumentExists(followerIndex, "3"); + }); + index(leaderIndex2, "2"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex2; + assertFollowerGlobalCheckpoint(followerIndex, 1); + assertDocumentExists(followerIndex, "2"); + }); + + // Auto follow stats are kept in-memory on master elected node + // and if this node get updated then auto follow stats are reset + int previousNumberOfSuccessfulFollowedIndices = getNumberOfSuccessfulFollowedIndices(); + createIndex(leaderIndex3, indexSettings); + index(leaderIndex3, "1"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex3; + assertThat(getNumberOfSuccessfulFollowedIndices(), equalTo(previousNumberOfSuccessfulFollowedIndices + 1)); + assertFollowerGlobalCheckpoint(followerIndex, 0); + assertDocumentExists(followerIndex, "1"); + }); + } + break; + case UPGRADED: + index(leaderIndex1, "4"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex1; + assertFollowerGlobalCheckpoint(followerIndex, 3); + assertDocumentExists(followerIndex, "4"); + }); + index(leaderIndex2, "3"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex2; + assertFollowerGlobalCheckpoint(followerIndex, 2); + assertDocumentExists(followerIndex, "3"); + }); + index(leaderIndex3, "2"); + assertBusy(() -> { + String followerIndex = "copy-" + leaderIndex3; + assertFollowerGlobalCheckpoint(followerIndex, 1); + assertDocumentExists(followerIndex, "2"); + }); + + deleteAutoFollowPattern("test_pattern"); + + stopIndexFollowing("copy-" + leaderIndex1); + stopIndexFollowing("copy-" + leaderIndex2); + stopIndexFollowing("copy-" + leaderIndex3); + break; + default: + throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); + } + } + + private static void stopIndexFollowing(String followerIndex) throws IOException { + pauseFollow(followerIndex); + closeIndex(followerIndex); + unfollow(followerIndex); + } + + private static void followIndex(String leaderIndex, String followIndex) throws IOException { + final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); + request.setJsonEntity("{\"remote_cluster\": \"local\", \"leader_index\": \"" + leaderIndex + + "\", \"read_poll_timeout\": \"10ms\"}"); + assertOK(client().performRequest(request)); + } + + private static void pauseFollow(String followIndex) throws IOException { + assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow"))); + } + + private static void unfollow(String followIndex) throws IOException { + assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/unfollow"))); + } + + private static void putAutoFollowPattern(String name, String pattern) throws IOException { + Request request = new Request("PUT", "/_ccr/auto_follow/" + name); + request.setJsonEntity("{\"leader_index_patterns\": [\"" + pattern + "\"], \"remote_cluster\": \"local\"," + + "\"follow_index_pattern\": \"copy-{{leader_index}}\", \"read_poll_timeout\": \"10ms\"}"); + assertOK(client().performRequest(request)); + } + + private static void deleteAutoFollowPattern(String patternName) throws IOException { + Request request = new Request("DELETE", "/_ccr/auto_follow/" + patternName); + assertOK(client().performRequest(request)); + } + + private static void index(String index, String id) throws IOException { + Request request = new Request("POST", "/" + index + "/_doc/" + id); + request.setJsonEntity("{}"); + assertOK(client().performRequest(request)); + } + + private static void assertDocumentExists(String index, String... ids) throws IOException { + for (String id : ids) { + Request request = new Request("HEAD", "/" + index + "/_doc/" + id); + Response response = client().performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + } + } + + private static void setupRemoteCluster() throws IOException { + Request request = new Request("GET", "/_nodes"); + Map nodesResponse = (Map) toMap(client().performRequest(request)).get("nodes"); + // Select node info of first node (we don't know the node id): + nodesResponse = (Map) nodesResponse.get(nodesResponse.keySet().iterator().next()); + String transportAddress = (String) nodesResponse.get("transport_address"); + + LOGGER.info("Configuring local remote cluster [{}]", transportAddress); + request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity("{\"persistent\": {\"cluster.remote.local.seeds\": \"" + transportAddress + "\"}}"); + assertThat(client().performRequest(request).getStatusLine().getStatusCode(), equalTo(200)); + } + + private int getNumberOfSuccessfulFollowedIndices() throws IOException { + Request statsRequest = new Request("GET", "/_ccr/stats"); + Map response = toMap(client().performRequest(statsRequest)); + Integer actualSuccessfulFollowedIndices = ObjectPath.eval("auto_follow_stats.number_of_successful_follow_indices", response); + if (actualSuccessfulFollowedIndices != null) { + return actualSuccessfulFollowedIndices; + } else { + return -1; + } + } + + private void assertFollowerGlobalCheckpoint(String followerIndex, int expectedFollowerCheckpoint) throws IOException { + Request statsRequest = new Request("GET", "/" + followerIndex + "/_stats"); + statsRequest.addParameter("level", "shards"); + // Just docs metric is sufficient here: + statsRequest.addParameter("metric", "docs"); + Map response = toMap(client().performRequest(statsRequest)); + LOGGER.info("INDEX STATS={}", response); + assertThat(((Map) response.get("indices")).size(), equalTo(1)); + Integer actualFollowerCheckpoint = ObjectPath.eval("indices." + followerIndex + ".shards.0.0.seq_no.global_checkpoint", response); + assertThat(actualFollowerCheckpoint, equalTo(expectedFollowerCheckpoint)); + } + + private static Map toMap(Response response) throws IOException { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); + } + +} diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java index d657a42d7df4c..8d8d999a55b8d 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -17,7 +18,7 @@ import java.nio.charset.StandardCharsets; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HIT_AS_INT_PARAM; +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.equalTo; /** @@ -137,13 +138,14 @@ private void bulk(String index, String valueSuffix, int count) throws IOExceptio } Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); + bulk.setOptions(expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); bulk.setJsonEntity(b.toString()); client().performRequest(bulk); } private void assertCount(String index, int count) throws IOException { Request searchTestIndexRequest = new Request("POST", "/" + index + "/_search"); - searchTestIndexRequest.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + searchTestIndexRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); searchTestIndexRequest.addParameter("filter_path", "hits.total"); Response searchTestIndexResponse = client().performRequest(searchTestIndexRequest); assertEquals("{\"hits\":{\"total\":" + count + "}}", diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupIDUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupIDUpgradeIT.java index 9eef5968f2774..2f653c2bbf1ca 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupIDUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupIDUpgradeIT.java @@ -25,7 +25,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HIT_AS_INT_PARAM; +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -197,7 +197,7 @@ private List getSearchResults(int expectedCount) throws Exception { collectedIDs.clear(); client().performRequest(new Request("POST", "rollup/_refresh")); final Request searchRequest = new Request("GET", "rollup/_search"); - searchRequest.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + searchRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); try { Map searchResponse = entityAsMap(client().performRequest(searchRequest)); assertNotNull(ObjectPath.eval("hits.total", searchResponse)); diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index 97a793991f4b7..b9ae06499d112 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -5,7 +5,6 @@ datafeed_id: old-cluster-datafeed - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed"} - length: { datafeeds.0.indices: 1 } - - length: { datafeeds.0.types: 1 } - gte: { datafeeds.0.scroll_size: 2000 } - do: @@ -43,7 +42,6 @@ { "job_id":"mixed-cluster-datafeed-job", "indices":["airline-data"], - "types":["response"], "scroll_size": 2000 } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_ml_results_upgrade.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_ml_results_upgrade.yml new file mode 100644 index 0000000000000..73478be65597e --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_ml_results_upgrade.yml @@ -0,0 +1,11 @@ +--- +"Verify jobs exist": + - do: + ml.get_jobs: + job_id: old-cluster-job-to-upgrade + - match: { count: 1 } + + - do: + ml.get_jobs: + job_id: old-cluster-job-to-upgrade-custom + - match: { count: 1 } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_ml_results_upgrade.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_ml_results_upgrade.yml new file mode 100644 index 0000000000000..d21b5e6def61d --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_ml_results_upgrade.yml @@ -0,0 +1,120 @@ +--- +"Put job on the old cluster and post some data": + + - do: + ml.put_job: + job_id: old-cluster-job-to-upgrade + body: > + { + "description":"Cluster upgrade", + "analysis_config" : { + "bucket_span": "60s", + "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] + }, + "analysis_limits" : { + "model_memory_limit": "50mb" + }, + "data_description" : { + "format":"xcontent", + "time_field":"time", + "time_format":"epoch" + } + } + - match: { job_id: old-cluster-job-to-upgrade } + + - do: + ml.open_job: + job_id: old-cluster-job-to-upgrade + + - do: + ml.post_data: + job_id: old-cluster-job-to-upgrade + body: + - airline: AAL + responsetime: 132.2046 + sourcetype: post-data-job + time: 1403481600 + - airline: JZA + responsetime: 990.4628 + sourcetype: post-data-job + time: 1403481700 + - match: { processed_record_count: 2 } + + - do: + ml.close_job: + job_id: old-cluster-job-to-upgrade + + - do: + ml.get_buckets: + job_id: old-cluster-job-to-upgrade + - match: { count: 1 } + +# Wait for indices to be fully allocated before +# killing the node + - do: + cluster.health: + index: [".ml-state", ".ml-anomalies-shared"] + wait_for_status: green + +--- +"Put job on the old cluster with a custom index": + - do: + ml.put_job: + job_id: old-cluster-job-to-upgrade-custom + body: > + { + "description":"Cluster upgrade", + "analysis_config" : { + "bucket_span": "60s", + "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] + }, + "analysis_limits" : { + "model_memory_limit": "50mb" + }, + "data_description" : { + "format":"xcontent", + "time_field":"time", + "time_format":"epoch" + }, + "results_index_name": "old-cluster-job-to-upgrade-custom" + } + - match: { job_id: old-cluster-job-to-upgrade-custom } + + - do: + ml.open_job: + job_id: old-cluster-job-to-upgrade-custom + + - do: + ml.post_data: + job_id: old-cluster-job-to-upgrade-custom + body: + - airline: AAL + responsetime: 132.2046 + sourcetype: post-data-job + time: 1403481600 + - airline: JZA + responsetime: 990.4628 + sourcetype: post-data-job + time: 1403481700 + - airline: JZA + responsetime: 423.0000 + sourcetype: post-data-job + time: 1403481800 + - match: { processed_record_count: 3 } + + - do: + ml.close_job: + job_id: old-cluster-job-to-upgrade-custom + + - do: + ml.get_buckets: + job_id: old-cluster-job-to-upgrade-custom + - match: { count: 3 } + +# Wait for indices to be fully allocated before +# killing the node + - do: + cluster.health: + index: [".ml-state", ".ml-anomalies-old-cluster-job-to-upgrade-custom"] + wait_for_status: green + diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml index a1ab7dfb91435..39864ae6d9cf0 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml @@ -23,7 +23,6 @@ setup: datafeed_id: old-cluster-datafeed - match: { datafeeds.0.datafeed_id: "old-cluster-datafeed"} - length: { datafeeds.0.indices: 1 } - - length: { datafeeds.0.types: 1 } - gte: { datafeeds.0.scroll_size: 2000 } - do: @@ -37,7 +36,6 @@ setup: datafeed_id: mixed-cluster-datafeed - match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed"} - length: { datafeeds.0.indices: 1 } - - length: { datafeeds.0.types: 1 } - gte: { datafeeds.0.scroll_size: 2000 } - do: diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml index fad78b2a085f8..a207c43086028 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/50_token_auth.yml @@ -36,7 +36,7 @@ - do: headers: Authorization: Bearer boom - catch: /missing authentication token/ + catch: /missing authentication credentials/ search: rest_total_hits_as_int: true index: token_index diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_ml_results_upgrade.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_ml_results_upgrade.yml new file mode 100644 index 0000000000000..f049b9c073ad8 --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_ml_results_upgrade.yml @@ -0,0 +1,158 @@ +--- +"Migrate results data to latest index binary version": + # Verify that all the results are there and the typical indices exist + - do: + ml.get_buckets: + job_id: old-cluster-job-to-upgrade + - match: { count: 1 } + + - do: + ml.get_buckets: + job_id: old-cluster-job-to-upgrade-custom + - match: { count: 3 } + + - do: + indices.exists: + index: .ml-anomalies-shared + + - is_true: '' + + - do: + indices.get_settings: + index: .ml-anomalies-shared + name: index.version.created + + - match: { \.ml-anomalies-shared.settings.index.version.created: '/6\d+/' } + + - do: + indices.exists: + index: .ml-anomalies-custom-old-cluster-job-to-upgrade-custom + + - is_true: '' + + # Do the upgrade + - do: + ml.upgrade: + wait_for_completion: true + + - match: { acknowledged: true } + + # Verify that old indices are gone + - do: + indices.exists: + index: .ml-anomalies-shared + + - is_false: '' + + - do: + indices.exists: + index: .ml-anomalies-custom-old-cluster-job-to-upgrade-custom + + - is_false: '' + + # Verify that results can still be retrieved + + - do: + indices.refresh: {} + + - do: + ml.get_buckets: + job_id: old-cluster-job-to-upgrade + - match: { count: 1 } + + - do: + ml.get_buckets: + job_id: old-cluster-job-to-upgrade-custom + - match: { count: 3 } + + # Verify the created version is correct + + - do: + indices.get_settings: + index: .ml-anomalies-old-cluster-job-to-upgrade + name: index.version.created + - match: { \.ml-anomalies-shared-7.settings.index.version.created: '/7\d+/' } + - match: { \.ml-anomalies-shared-7r.settings.index.version.created: '/7\d+/' } + + - do: + indices.get_settings: + index: .ml-anomalies-old-cluster-job-to-upgrade-custom + name: index.version.created + - match: { \.ml-anomalies-custom-old-cluster-job-to-upgrade-custom-7.settings.index.version.created: '/7\d+/' } + - match: { \.ml-anomalies-custom-old-cluster-job-to-upgrade-custom-7r.settings.index.version.created: '/7\d+/' } + + # Create a new job to verify that the .ml-anomalies-shared index gets created again without issues + + - do: + ml.put_job: + job_id: upgraded-cluster-job-should-not-upgrade + body: > + { + "description":"Cluster upgrade", + "analysis_config" : { + "bucket_span": "60s", + "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] + }, + "analysis_limits" : { + "model_memory_limit": "50mb" + }, + "data_description" : { + "format":"xcontent", + "time_field":"time", + "time_format":"epoch" + } + } + - match: { job_id: upgraded-cluster-job-should-not-upgrade } + + - do: + ml.open_job: + job_id: upgraded-cluster-job-should-not-upgrade + + - do: + ml.post_data: + job_id: upgraded-cluster-job-should-not-upgrade + body: + - airline: AAL + responsetime: 132.2046 + sourcetype: post-data-job + time: 1403481600 + - airline: JZA + responsetime: 990.4628 + sourcetype: post-data-job + time: 1403481700 + - match: { processed_record_count: 2 } + + - do: + ml.close_job: + job_id: upgraded-cluster-job-should-not-upgrade + + - do: + ml.get_buckets: + job_id: upgraded-cluster-job-should-not-upgrade + - match: { count: 1 } + + - do: + indices.exists: + index: .ml-anomalies-shared + + - is_true: '' + + - do: + indices.get_settings: + index: .ml-anomalies-shared + name: index.version.created + + - match: { \.ml-anomalies-shared.settings.index.version.created: '/7\d+/' } + + # Do the upgrade Again as nothing needs upgraded now + - do: + ml.upgrade: + wait_for_completion: true + + - match: { acknowledged: true } + + - do: + indices.exists: + index: .ml-anomalies-shared + + - is_true: '' diff --git a/x-pack/qa/saml-idp-tests/build.gradle b/x-pack/qa/saml-idp-tests/build.gradle index c2a4b7ce7a299..2b90cbaf9a679 100644 --- a/x-pack/qa/saml-idp-tests/build.gradle +++ b/x-pack/qa/saml-idp-tests/build.gradle @@ -83,15 +83,19 @@ forbiddenPatterns { exclude '**/*.key' } -thirdPartyAudit.excludes = [ - // uses internal java api: sun.misc.Unsafe - 'com.google.common.cache.Striped64', - 'com.google.common.cache.Striped64$1', - 'com.google.common.cache.Striped64$Cell', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - // missing - 'com.ibm.icu.lang.UCharacter' -] +thirdPartyAudit { + ignoreViolations ( + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1' + ) + + ignoreMissingClasses ( + 'com.ibm.icu.lang.UCharacter' + ) +} diff --git a/x-pack/qa/security-migrate-tests/build.gradle b/x-pack/qa/security-migrate-tests/build.gradle index abc3564ca13f2..88006a38bd5e7 100644 --- a/x-pack/qa/security-migrate-tests/build.gradle +++ b/x-pack/qa/security-migrate-tests/build.gradle @@ -31,3 +31,12 @@ integTestCluster { return tmpFile.exists() } } + +testingConventions { + naming.clear() + naming { + IT { + baseClass 'org.elasticsearch.xpack.security.MigrateToolTestCase' + } + } +} diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index 2304421bee724..5d1bccd10a6fe 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -1,6 +1,4 @@ -import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.MavenFilteringHack -import org.elasticsearch.gradle.plugin.PluginBuildPlugin import org.elasticsearch.gradle.test.NodeInfo import javax.net.ssl.HttpsURLConnection diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java index 1b3b3f1bbeb1b..226ebc29ec739 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java +++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java @@ -25,7 +25,7 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HIT_AS_INT_PARAM; +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -149,7 +149,7 @@ public void testSearchInputHasPermissions() throws Exception { builder.startObject("condition").startObject("compare").startObject("ctx.payload.hits.total").field("gte", 1) .endObject().endObject().endObject(); builder.startObject("actions").startObject("logging").startObject("logging") - .field("text", "successfully ran " + watchId + "to test for search inpput").endObject().endObject().endObject(); + .field("text", "successfully ran " + watchId + "to test for search input").endObject().endObject().endObject(); builder.endObject(); indexWatch(watchId, builder); @@ -324,7 +324,7 @@ private ObjectPath getWatchHistoryEntry(String watchId, String state) throws Exc builder.endObject(); Request searchRequest = new Request("POST", "/.watcher-history-*/_search"); - searchRequest.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + searchRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); searchRequest.setJsonEntity(Strings.toString(builder)); Response response = client().performRequest(searchRequest); ObjectPath objectPath = ObjectPath.createFromResponse(response); diff --git a/x-pack/qa/smoke-test-watcher/build.gradle b/x-pack/qa/smoke-test-watcher/build.gradle index fc22fe9aa065f..fb2e4c06ced88 100644 --- a/x-pack/qa/smoke-test-watcher/build.gradle +++ b/x-pack/qa/smoke-test-watcher/build.gradle @@ -1,8 +1,3 @@ -import groovy.json.JsonSlurper - -import javax.net.ssl.HttpsURLConnection -import java.nio.charset.StandardCharsets - apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' @@ -21,3 +16,7 @@ integTestCluster { setting 'xpack.license.self_generated.type', 'trial' setting 'logger.org.elasticsearch.xpack.watcher', 'DEBUG' } + +integTestRunner { + include "**/*Tests.class" +} \ No newline at end of file diff --git a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java index 162c6c07643cf..a7350fcff03d1 100644 --- a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java +++ b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherTestSuiteIT.java @@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HIT_AS_INT_PARAM; +import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasEntry; @@ -194,7 +194,7 @@ private ObjectPath getWatchHistoryEntry(String watchId) throws Exception { builder.endObject(); Request searchRequest = new Request("POST", "/.watcher-history-*/_search"); - searchRequest.addParameter(TOTAL_HIT_AS_INT_PARAM, "true"); + searchRequest.addParameter(TOTAL_HITS_AS_INT_PARAM, "true"); searchRequest.setJsonEntity(Strings.toString(builder)); Response response = client().performRequest(searchRequest); ObjectPath objectPath = ObjectPath.createFromResponse(response); diff --git a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateTests.java similarity index 99% rename from x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java rename to x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateTests.java index e9c5106d44e87..df98e73118711 100644 --- a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java +++ b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateTests.java @@ -30,7 +30,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -public class WatcherTemplateIT extends ESTestCase { +public class WatcherTemplateTests extends ESTestCase { private TextTemplateEngine textTemplateEngine; diff --git a/x-pack/qa/third-party/hipchat/build.gradle b/x-pack/qa/third-party/hipchat/build.gradle index d72bb778e52c8..f864fb62398a4 100644 --- a/x-pack/qa/third-party/hipchat/build.gradle +++ b/x-pack/qa/third-party/hipchat/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.LoggedExec - apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index 03537966b29ff..9fdfaeb826667 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -1,6 +1,3 @@ -import org.elasticsearch.gradle.test.NodeInfo -import org.elasticsearch.gradle.LoggedExec - apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' diff --git a/x-pack/qa/transport-client-tests/build.gradle b/x-pack/qa/transport-client-tests/build.gradle index 3ece6dd1147c4..5ca96eb0d7a87 100644 --- a/x-pack/qa/transport-client-tests/build.gradle +++ b/x-pack/qa/transport-client-tests/build.gradle @@ -10,3 +10,13 @@ integTestCluster { setting 'xpack.security.enabled', 'false' setting 'xpack.license.self_generated.type', 'trial' } + + +testingConventions { + naming.clear() + naming { + IT { + baseClass 'org.elasticsearch.xpack.ml.client.ESXPackSmokeClientTestCase' + } + } +} \ No newline at end of file diff --git a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java index dceb2d1398ab9..1a4959c0be84a 100644 --- a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java +++ b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/MLTransportClientIT.java @@ -137,7 +137,6 @@ public void testMLTransportClient_DateFeedActions() { String datafeedIndex = "ml-transport-client-test"; String datatype = "type-bar"; datafeed.setIndices(Collections.singletonList(datafeedIndex)); - datafeed.setTypes(Collections.singletonList("type-bar")); mlClient.putDatafeed(new PutDatafeedAction.Request(datafeed.build())).actionGet(); diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash b/x-pack/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash index 587b782541d87..2e62635b6baad 100644 --- a/x-pack/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash +++ b/x-pack/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash @@ -104,7 +104,7 @@ SETUP_OK false } - curl -s -XGET 'http://127.0.0.1:9200' | grep "missing authentication token for REST" + curl -s -XGET 'http://127.0.0.1:9200' | grep "missing authentication credentials for REST" # Disable bash history expansion because passwords can contain "!" set +H diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/certgen.bash b/x-pack/qa/vagrant/src/test/resources/packaging/tests/certgen.bash index fbf0c9efcfcc9..1f81bfcbd490a 100644 --- a/x-pack/qa/vagrant/src/test/resources/packaging/tests/certgen.bash +++ b/x-pack/qa/vagrant/src/test/resources/packaging/tests/certgen.bash @@ -362,7 +362,7 @@ DATA_SETTINGS echo "$output" false } - echo "$output" | grep "missing authentication token" + echo "$output" | grep "missing authentication credentials" } @test "[$GROUP] test node to node communication" { diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash b/x-pack/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash index c33947c1ac930..d892b379df4f0 100644 --- a/x-pack/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash +++ b/x-pack/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash @@ -62,7 +62,7 @@ SETUP_AUTO false } - curl -s -XGET localhost:9200 | grep "missing authentication token for REST" + curl -s -XGET localhost:9200 | grep "missing authentication credentials for REST" # Disable bash history expansion because passwords can contain "!" set +H diff --git a/x-pack/transport-client/build.gradle b/x-pack/transport-client/build.gradle index a96f4146fbf67..87a626be65d42 100644 --- a/x-pack/transport-client/build.gradle +++ b/x-pack/transport-client/build.gradle @@ -29,6 +29,15 @@ namingConventions { skipIntegTestInDisguise = true } +testingConventions { + naming.clear() + naming { + Tests { + baseClass 'com.carrotsearch.randomizedtesting.RandomizedTest' + } + } +} + publishing { publications { nebula(MavenPublication) {