diff --git a/.ci/dockerOnLinuxExclusions b/.ci/dockerOnLinuxExclusions index 3a898926c395b..18d7baeee8b11 100644 --- a/.ci/dockerOnLinuxExclusions +++ b/.ci/dockerOnLinuxExclusions @@ -7,9 +7,11 @@ debian-8 opensuse-15-1 ol-7.7 -sles-12 +sles-12.3 # older version used in Vagrant image +sles-12.4 +sles-15.1 -# These OSes are deprecated and filtered starting with 8.0.0, but need to be excluded +# These OSes are deprecated and filtered starting with 8.0.0, but need to be excluded # for PR checks centos-6 ol-6.10 diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/fs/AvailableIndexFoldersBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/fs/AvailableIndexFoldersBenchmark.java index 3b0416c761d16..f2c1208ac2462 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/fs/AvailableIndexFoldersBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/fs/AvailableIndexFoldersBenchmark.java @@ -55,13 +55,14 @@ public class AvailableIndexFoldersBenchmark { @Setup public void setup() throws IOException { Path path = Files.createTempDirectory("test"); - String[] paths = new String[] {path.toString()}; + String[] paths = new String[] { path.toString() }; nodePath = new NodeEnvironment.NodePath(path); LogConfigurator.setNodeName("test"); Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), path) - .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build(); + .putList(Environment.PATH_DATA_SETTING.getKey(), paths) + .build(); nodeEnv = new NodeEnvironment(settings, new Environment(settings, null)); Files.createDirectories(nodePath.indicesPath); @@ -80,7 +81,6 @@ public void setup() throws IOException { } } - @Benchmark public Set availableIndexFolderNaive() throws IOException { return nodeEnv.availableIndexFoldersForPath(nodePath); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/indices/breaker/MemoryStatsBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/indices/breaker/MemoryStatsBenchmark.java index 9537cfb0bb3cf..3104b19fc2d5d 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/indices/breaker/MemoryStatsBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/indices/breaker/MemoryStatsBenchmark.java @@ -41,11 +41,11 @@ @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @State(Scope.Benchmark) -@SuppressWarnings("unused") //invoked by benchmarking framework +@SuppressWarnings("unused") // invoked by benchmarking framework public class MemoryStatsBenchmark { private static final MemoryMXBean MEMORY_MX_BEAN = ManagementFactory.getMemoryMXBean(); - @Param({"0", "16", "256", "4096"}) + @Param({ "0", "16", "256", "4096" }) private int tokens; @Benchmark @@ -102,4 +102,3 @@ public long getMemoryStats_64() { return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed(); } } - diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java index 173a293f3e5cc..91573e5a406e1 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/AllocationBenchmark.java @@ -49,7 +49,7 @@ @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) @State(Scope.Benchmark) -@SuppressWarnings("unused") //invoked by benchmarking framework +@SuppressWarnings("unused") // invoked by benchmarking framework public class AllocationBenchmark { // Do NOT make any field final (even if it is not annotated with @Param)! See also // http://hg.openjdk.java.net/code-tools/jmh/file/tip/jmh-samples/src/main/java/org/openjdk/jmh/samples/JMHSample_10_ConstantFold.java @@ -106,8 +106,7 @@ public class AllocationBenchmark { " 10| 10| 2| 50", " 100| 1| 2| 50", " 100| 3| 2| 50", - " 100| 10| 2| 50" - }) + " 100| 10| 2| 50" }) public String indicesShardsReplicasNodes = "10|1|0|1"; public int numTags = 2; @@ -124,13 +123,14 @@ public void setUp() throws Exception { int numReplicas = toInt(params[2]); int numNodes = toInt(params[3]); - strategy = Allocators.createAllocationService(Settings.builder() - .put("cluster.routing.allocation.awareness.attributes", "tag") - .build()); + strategy = Allocators.createAllocationService( + Settings.builder().put("cluster.routing.allocation.awareness.attributes", "tag").build() + ); MetaData.Builder mb = MetaData.builder(); for (int i = 1; i <= numIndices; i++) { - mb.put(IndexMetaData.builder("test_" + i) + mb.put( + IndexMetaData.builder("test_" + i) .settings(Settings.builder().put("index.version.created", Version.CURRENT)) .numberOfShards(numShards) .numberOfReplicas(numReplicas) @@ -147,8 +147,10 @@ public void setUp() throws Exception { nb.add(Allocators.newNode("node" + i, Collections.singletonMap("tag", "tag_" + (i % numTags)))); } initialClusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) - .metaData(metaData).routingTable(routingTable).nodes - (nb).build(); + .metaData(metaData) + .routingTable(routingTable) + .nodes(nb) + .build(); } private int toInt(String v) { @@ -159,8 +161,10 @@ private int toInt(String v) { public ClusterState measureAllocation() { ClusterState clusterState = initialClusterState; while (clusterState.getRoutingNodes().hasUnassignedShards()) { - clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes() - .shardsWithState(ShardRoutingState.INITIALIZING)); + clusterState = strategy.applyStartedShards( + clusterState, + clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING) + ); clusterState = strategy.reroute(clusterState, "reroute"); } return clusterState; diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java index 90f03c26dcb27..9e86bef480bb1 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java @@ -36,7 +36,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.gateway.GatewayAllocator; -import java.lang.reflect.InvocationTargetException; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -67,33 +66,34 @@ private Allocators() { throw new AssertionError("Do not instantiate"); } - - public static AllocationService createAllocationService(Settings settings) throws NoSuchMethodException, InstantiationException, - IllegalAccessException, InvocationTargetException { - return createAllocationService(settings, new ClusterSettings(Settings.EMPTY, ClusterSettings - .BUILT_IN_CLUSTER_SETTINGS)); + public static AllocationService createAllocationService(Settings settings) { + return createAllocationService(settings, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); } - public static AllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings) throws - InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException { + public static AllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings) { return new AllocationService( defaultAllocationDeciders(settings, clusterSettings), - NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE); + NoopGatewayAllocator.INSTANCE, + new BalancedShardsAllocator(settings), + EmptyClusterInfoService.INSTANCE + ); } - public static AllocationDeciders defaultAllocationDeciders(Settings settings, ClusterSettings clusterSettings) throws - IllegalAccessException, InvocationTargetException, InstantiationException, NoSuchMethodException { - Collection deciders = - ClusterModule.createAllocationDeciders(settings, clusterSettings, Collections.emptyList()); + public static AllocationDeciders defaultAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { + Collection deciders = ClusterModule.createAllocationDeciders(settings, clusterSettings, Collections.emptyList()); return new AllocationDeciders(deciders); - } private static final AtomicInteger portGenerator = new AtomicInteger(); public static DiscoveryNode newNode(String nodeId, Map attributes) { - return new DiscoveryNode("", nodeId, new TransportAddress(TransportAddress.META_ADDRESS, - portGenerator.incrementAndGet()), attributes, Sets.newHashSet(DiscoveryNodeRole.MASTER_ROLE, - DiscoveryNodeRole.DATA_ROLE), Version.CURRENT); + return new DiscoveryNode( + "", + nodeId, + new TransportAddress(TransportAddress.META_ADDRESS, portGenerator.incrementAndGet()), + attributes, + Sets.newHashSet(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE), + Version.CURRENT + ); } } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java index a364a331400a5..1ea2e9c1b2212 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java @@ -39,7 +39,7 @@ @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) @State(Scope.Benchmark) -@SuppressWarnings("unused") //invoked by benchmarking framework +@SuppressWarnings("unused") // invoked by benchmarking framework public class DateFormatterBenchmark { private final DateFormatter javaFormatter = DateFormatter.forPattern("8year_month_day||ordinal_date||epoch_millis"); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterFromBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterFromBenchmark.java index 86753dba02b0b..0826b1cc109bb 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterFromBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterFromBenchmark.java @@ -39,7 +39,7 @@ @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) @State(Scope.Benchmark) -@SuppressWarnings("unused") //invoked by benchmarking framework +@SuppressWarnings("unused") // invoked by benchmarking framework public class DateFormatterFromBenchmark { private final TemporalAccessor accessor = DateFormatter.forPattern("epoch_millis").parse("1234567890"); diff --git a/build.gradle b/build.gradle index 1a10c8946925a..a01fa26652b18 100644 --- a/build.gradle +++ b/build.gradle @@ -108,11 +108,13 @@ subprojects { // is greater than the number of unformatted projects, this can be // switched to an exclude list, and eventualy removed completely. def projectPathsToFormat = [ + ':benchmarks', ':build-tools', ':distribution:tools:java-version-checker', ':distribution:tools:keystore-cli', ':distribution:tools:launchers', ':distribution:tools:plugin-cli', + ':qa:os', ':x-pack:plugin:autoscaling', ':x-pack:plugin:enrich' ] diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 52f225f11794a..88060ebc0837c 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -24,6 +24,7 @@ import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.info.BuildParams +import org.elasticsearch.gradle.test.rest.RestResourcesPlugin import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.RunTask import org.elasticsearch.gradle.testclusters.TestClustersPlugin @@ -51,6 +52,7 @@ class PluginBuildPlugin implements Plugin { void apply(Project project) { project.pluginManager.apply(BuildPlugin) project.pluginManager.apply(TestClustersPlugin) + project.pluginManager.apply(RestResourcesPlugin) PluginPropertiesExtension extension = project.extensions.create(PLUGIN_EXTENSION_NAME, PluginPropertiesExtension, project) configureDependencies(project) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 58e72e2dd7be4..8d9517fe42629 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -18,18 +18,12 @@ */ package org.elasticsearch.gradle.test -import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.info.BuildParams import org.elasticsearch.gradle.testclusters.ElasticsearchCluster import org.elasticsearch.gradle.testclusters.RestTestRunnerTask -import org.elasticsearch.gradle.tool.Boilerplate import org.gradle.api.DefaultTask import org.gradle.api.Task -import org.gradle.api.file.FileCopyDetails -import org.gradle.api.tasks.Copy -import org.gradle.api.tasks.Input import org.gradle.api.tasks.testing.Test -import org.gradle.plugins.ide.idea.IdeaPlugin + /** * A wrapper task around setting up a cluster and running rest tests. */ @@ -37,10 +31,6 @@ class RestIntegTestTask extends DefaultTask { protected Test runner - /** Flag indicating whether the rest tests in the rest spec should be run. */ - @Input - Boolean includePackaged = false - RestIntegTestTask() { runner = project.tasks.create("${name}Runner", RestTestRunnerTask.class) super.dependsOn(runner) @@ -69,10 +59,6 @@ class RestIntegTestTask extends DefaultTask { runner.systemProperty('test.clustername', System.getProperty("tests.clustername")) } - // copy the rest spec/tests onto the test classpath - Copy copyRestSpec = createCopyRestSpecTask() - project.sourceSets.test.output.builtBy(copyRestSpec) - // this must run after all projects have been configured, so we know any project // references can be accessed as a fully configured project.gradle.projectsEvaluated { @@ -83,12 +69,6 @@ class RestIntegTestTask extends DefaultTask { } } - /** Sets the includePackaged property */ - public void includePackaged(boolean include) { - includePackaged = include - } - - @Override public Task dependsOn(Object... dependencies) { runner.dependsOn(dependencies) @@ -114,37 +94,4 @@ class RestIntegTestTask extends DefaultTask { project.tasks.getByName("${name}Runner").configure(configure) } - Copy createCopyRestSpecTask() { - Boilerplate.maybeCreate(project.configurations, 'restSpec') { - project.dependencies.add( - 'restSpec', - BuildParams.internal ? project.project(':rest-api-spec') : - "org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch}" - ) - } - - return Boilerplate.maybeCreate(project.tasks, 'copyRestSpec', Copy) { Copy copy -> - copy.dependsOn project.configurations.restSpec - copy.into(project.sourceSets.test.output.resourcesDir) - copy.from({ project.zipTree(project.configurations.restSpec.singleFile) }) { - includeEmptyDirs = false - include 'rest-api-spec/**' - filesMatching('rest-api-spec/test/**') { FileCopyDetails details -> - if (includePackaged == false) { - details.exclude() - } - } - } - - if (project.plugins.hasPlugin(IdeaPlugin)) { - project.idea { - module { - if (scopes.TEST != null) { - scopes.TEST.plus.add(project.configurations.restSpec) - } - } - } - } - } - } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy index b3d323bf3942f..d668aa4b6b7fa 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy @@ -26,9 +26,9 @@ import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask import org.elasticsearch.gradle.info.BuildParams import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin import org.elasticsearch.gradle.precommit.PrecommitTasks +import org.elasticsearch.gradle.test.rest.RestResourcesPlugin import org.elasticsearch.gradle.testclusters.TestClustersPlugin import org.gradle.api.InvalidUserDataException -import org.gradle.api.JavaVersion import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.artifacts.Configuration @@ -42,6 +42,7 @@ import org.gradle.api.tasks.compile.JavaCompile import org.gradle.api.tasks.testing.Test import org.gradle.plugins.ide.eclipse.model.EclipseModel import org.gradle.plugins.ide.idea.model.IdeaModel + /** * Configures the build to compile tests against Elasticsearch's test framework * and run REST tests. Use BuildPlugin if you want to build main code as well @@ -74,6 +75,8 @@ class StandaloneRestTestPlugin implements Plugin { // only setup tests to build SourceSetContainer sourceSets = project.extensions.getByType(SourceSetContainer) SourceSet testSourceSet = sourceSets.create('test') + // need to apply plugin after test source sets are created + project.pluginManager.apply(RestResourcesPlugin) project.tasks.withType(Test) { Test test -> test.testClassesDirs = testSourceSet.output.classesDirs diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java index 7ffa83c0ff4c9..0230e772b97b2 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java @@ -220,7 +220,7 @@ static Map parseOsRelease(final List osReleaseLines) { // remove optional leading and trailing quotes and whitespace final String value = parts[1].replaceAll("^['\"]?\\s*", "").replaceAll("\\s*['\"]?$", ""); - values.put(key, value); + values.put(key, value.toLowerCase()); }); return values; diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java index 5b16ca7d40f1e..a662ff4dbcaeb 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java @@ -250,7 +250,7 @@ public void doCheck() throws IOException { Files.write(getSuccessMarker().toPath(), new byte[] {}, StandardOpenOption.CREATE); } else { getLogger().error(problems); - throw new IllegalStateException("Testing conventions are not honored"); + throw new IllegalStateException(String.format("Testing conventions [%s] are not honored", problems)); } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/CopyRestApiTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/CopyRestApiTask.java new file mode 100644 index 0000000000000..6e66df54dc48d --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/CopyRestApiTask.java @@ -0,0 +1,195 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.test.rest; + +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.info.BuildParams; +import org.elasticsearch.gradle.tool.Boilerplate; +import org.gradle.api.DefaultTask; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.FileTree; +import org.gradle.api.provider.ListProperty; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.OutputDirectory; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.util.PatternFilterable; +import org.gradle.api.tasks.util.PatternSet; +import org.gradle.internal.Factory; + +import javax.inject.Inject; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Copies the files needed for the Rest YAML specs to the current projects test resources output directory. + * This is intended to be be used from {@link RestResourcesPlugin} since the plugin wires up the needed + * configurations and custom extensions. + * @see RestResourcesPlugin + */ +public class CopyRestApiTask extends DefaultTask { + private static final String COPY_TO = "rest-api-spec/api"; + final ListProperty includeCore = getProject().getObjects().listProperty(String.class); + final ListProperty includeXpack = getProject().getObjects().listProperty(String.class); + + Configuration coreConfig; + Configuration xpackConfig; + + private final PatternFilterable corePatternSet; + private final PatternFilterable xpackPatternSet; + + public CopyRestApiTask() { + corePatternSet = getPatternSetFactory().create(); + xpackPatternSet = getPatternSetFactory().create(); + } + + @Inject + protected Factory getPatternSetFactory() { + throw new UnsupportedOperationException(); + } + + @Input + public ListProperty getIncludeCore() { + return includeCore; + } + + @Input + public ListProperty getIncludeXpack() { + return includeXpack; + } + + @SkipWhenEmpty + @InputFiles + public FileTree getInputDir() { + xpackPatternSet.setIncludes(includeXpack.get().stream().map(prefix -> prefix + "*/**").collect(Collectors.toList())); + ConfigurableFileCollection fileCollection = getProject().files(xpackConfig.getAsFileTree().matching(xpackPatternSet)); + if (BuildParams.isInternal()) { + corePatternSet.setIncludes(includeCore.get().stream().map(prefix -> prefix + "*/**").collect(Collectors.toList())); + fileCollection.plus(coreConfig.getAsFileTree().matching(corePatternSet)); + } else { + fileCollection.plus(coreConfig); + } + // if project has rest tests or the includes are explicitly configured execute the task, else NO-SOURCE due to the null input + return projectHasYamlRestTests() || includeCore.get().isEmpty() == false || includeXpack.get().isEmpty() == false + ? fileCollection.getAsFileTree() + : null; + } + + @OutputDirectory + public File getOutputDir() { + return new File(getTestSourceSet().getOutput().getResourcesDir(), COPY_TO); + } + + @TaskAction + void copy() { + Project project = getProject(); + // always copy the core specs if the task executes + if (BuildParams.isInternal()) { + getLogger().debug("Rest specs for project [{}] will be copied to the test resources.", project.getPath()); + project.copy(c -> { + c.from(coreConfig.getSingleFile()); + c.into(getOutputDir()); + c.include(corePatternSet.getIncludes()); + }); + } else { + getLogger().debug( + "Rest specs for project [{}] will be copied to the test resources from the published jar (version: [{}]).", + project.getPath(), + VersionProperties.getElasticsearch() + ); + project.copy(c -> { + c.from(project.zipTree(coreConfig.getSingleFile())); + c.into(getTestSourceSet().getOutput().getResourcesDir()); // this ends up as the same dir as outputDir + c.include(includeCore.get().stream().map(prefix -> COPY_TO + "/" + prefix + "*/**").collect(Collectors.toList())); + }); + } + // only copy x-pack specs if explicitly instructed + if (includeXpack.get().isEmpty() == false) { + getLogger().debug("X-pack rest specs for project [{}] will be copied to the test resources.", project.getPath()); + project.copy(c -> { + c.from(xpackConfig.getSingleFile()); + c.into(getOutputDir()); + c.include(xpackPatternSet.getIncludes()); + }); + } + } + + /** + * Returns true if any files with a .yml extension exist the test resources rest-api-spec/test directory (from source or output dir) + */ + private boolean projectHasYamlRestTests() { + File testSourceResourceDir = getTestSourceResourceDir(); + File testOutputResourceDir = getTestOutputResourceDir(); // check output for cases where tests are copied programmatically + + if (testSourceResourceDir == null && testOutputResourceDir == null) { + return false; + } + try { + if (testSourceResourceDir != null) { + return new File(testSourceResourceDir, "rest-api-spec/test").exists() == false + || Files.walk(testSourceResourceDir.toPath().resolve("rest-api-spec/test")) + .anyMatch(p -> p.getFileName().toString().endsWith("yml")); + } + if (testOutputResourceDir != null) { + return new File(testOutputResourceDir, "rest-api-spec/test").exists() == false + || Files.walk(testOutputResourceDir.toPath().resolve("rest-api-spec/test")) + .anyMatch(p -> p.getFileName().toString().endsWith("yml")); + } + } catch (IOException e) { + throw new IllegalStateException(String.format("Error determining if this project [%s] has rest tests.", getProject()), e); + } + return false; + } + + private File getTestSourceResourceDir() { + SourceSet testSources = getTestSourceSet(); + if (testSources == null) { + return null; + } + Set resourceDir = testSources.getResources() + .getSrcDirs() + .stream() + .filter(f -> f.isDirectory() && f.getParentFile().getName().equals("test") && f.getName().equals("resources")) + .collect(Collectors.toSet()); + assert resourceDir.size() <= 1; + if (resourceDir.size() == 0) { + return null; + } + return resourceDir.iterator().next(); + } + + private File getTestOutputResourceDir() { + SourceSet testSources = getTestSourceSet(); + if (testSources == null) { + return null; + } + return testSources.getOutput().getResourcesDir(); + } + + private SourceSet getTestSourceSet() { + return Boilerplate.getJavaSourceSets(getProject()).findByName("test"); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/CopyRestTestsTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/CopyRestTestsTask.java new file mode 100644 index 0000000000000..2fd5a207482a4 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/CopyRestTestsTask.java @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.test.rest; + +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.info.BuildParams; +import org.elasticsearch.gradle.tool.Boilerplate; +import org.gradle.api.DefaultTask; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.FileTree; +import org.gradle.api.provider.ListProperty; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.OutputDirectory; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.util.PatternFilterable; +import org.gradle.api.tasks.util.PatternSet; +import org.gradle.internal.Factory; + +import javax.inject.Inject; +import java.io.File; +import java.util.stream.Collectors; + +/** + * Copies the Rest YAML test to the current projects test resources output directory. + * This is intended to be be used from {@link RestResourcesPlugin} since the plugin wires up the needed + * configurations and custom extensions. + * @see RestResourcesPlugin + */ +public class CopyRestTestsTask extends DefaultTask { + private static final String COPY_TO = "rest-api-spec/test"; + final ListProperty includeCore = getProject().getObjects().listProperty(String.class); + final ListProperty includeXpack = getProject().getObjects().listProperty(String.class); + + Configuration coreConfig; + Configuration xpackConfig; + + private final PatternFilterable corePatternSet; + private final PatternFilterable xpackPatternSet; + + public CopyRestTestsTask() { + corePatternSet = getPatternSetFactory().create(); + xpackPatternSet = getPatternSetFactory().create(); + } + + @Inject + protected Factory getPatternSetFactory() { + throw new UnsupportedOperationException(); + } + + @Input + public ListProperty getIncludeCore() { + return includeCore; + } + + @Input + public ListProperty getIncludeXpack() { + return includeXpack; + } + + @SkipWhenEmpty + @InputFiles + public FileTree getInputDir() { + xpackPatternSet.setIncludes(includeXpack.get().stream().map(prefix -> prefix + "*/**").collect(Collectors.toList())); + ConfigurableFileCollection fileCollection = getProject().files(xpackConfig.getAsFileTree().matching(xpackPatternSet)); + if (BuildParams.isInternal()) { + corePatternSet.setIncludes(includeCore.get().stream().map(prefix -> prefix + "*/**").collect(Collectors.toList())); + fileCollection.plus(coreConfig.getAsFileTree().matching(corePatternSet)); + } else { + fileCollection.plus(coreConfig); + } + // copy tests only if explicitly requested + return includeCore.get().isEmpty() == false || includeXpack.get().isEmpty() == false ? fileCollection.getAsFileTree() : null; + } + + @OutputDirectory + public File getOutputDir() { + return new File(getTestSourceSet().getOutput().getResourcesDir(), COPY_TO); + } + + @TaskAction + void copy() { + Project project = getProject(); + // only copy core tests if explicitly instructed + if (includeCore.get().isEmpty() == false) { + if (BuildParams.isInternal()) { + getLogger().debug("Rest tests for project [{}] will be copied to the test resources.", project.getPath()); + project.copy(c -> { + c.from(coreConfig.getSingleFile()); + c.into(getOutputDir()); + c.include(corePatternSet.getIncludes()); + }); + + } else { + getLogger().debug( + "Rest tests for project [{}] will be copied to the test resources from the published jar (version: [{}]).", + project.getPath(), + VersionProperties.getElasticsearch() + ); + project.copy(c -> { + c.from(project.zipTree(coreConfig.getSingleFile())); + c.into(getTestSourceSet().getOutput().getResourcesDir()); // this ends up as the same dir as outputDir + c.include(includeCore.get().stream().map(prefix -> COPY_TO + "/" + prefix + "*/**").collect(Collectors.toList())); + }); + } + } + // only copy x-pack tests if explicitly instructed + if (includeXpack.get().isEmpty() == false) { + getLogger().debug("X-pack rest tests for project [{}] will be copied to the test resources.", project.getPath()); + project.copy(c -> { + c.from(xpackConfig.getSingleFile()); + c.into(getOutputDir()); + c.include(xpackPatternSet.getIncludes()); + }); + } + } + + private SourceSet getTestSourceSet() { + return Boilerplate.getJavaSourceSets(getProject()).findByName("test"); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/RestResourcesExtension.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/RestResourcesExtension.java new file mode 100644 index 0000000000000..2865963bdb3bd --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/RestResourcesExtension.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.test.rest; + +import org.elasticsearch.gradle.info.BuildParams; +import org.gradle.api.Action; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.provider.ListProperty; + +import javax.inject.Inject; + +/** + * Custom extension to configure the {@link CopyRestApiTask} + */ +public class RestResourcesExtension { + + final RestResourcesSpec restApi; + final RestResourcesSpec restTests; + + @Inject + public RestResourcesExtension(ObjectFactory objects) { + restApi = new RestResourcesSpec(objects); + restTests = new RestResourcesSpec(objects); + } + + void restApi(Action spec) { + spec.execute(restApi); + } + + void restTests(Action spec) { + spec.execute(restTests); + } + + static class RestResourcesSpec { + + private final ListProperty includeCore; + private final ListProperty includeXpack; + + RestResourcesSpec(ObjectFactory objects) { + includeCore = objects.listProperty(String.class); + includeXpack = objects.listProperty(String.class); + } + + public void includeCore(String... include) { + this.includeCore.addAll(include); + } + + public void includeXpack(String... include) { + if (BuildParams.isInternal() == false) { + throw new IllegalStateException("Can not include x-pack rest resources from an external build."); + } + this.includeXpack.addAll(include); + } + + public ListProperty getIncludeCore() { + return includeCore; + } + + public ListProperty getIncludeXpack() { + return includeXpack; + } + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/RestResourcesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/RestResourcesPlugin.java new file mode 100644 index 0000000000000..a512b9b1fc025 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/rest/RestResourcesPlugin.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle.test.rest; + +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.info.BuildParams; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Dependency; +import org.gradle.api.provider.Provider; + +import java.util.Map; + +/** + *

+ * Gradle plugin to help configure {@link CopyRestApiTask}'s and {@link CopyRestTestsTask} that copies the artifacts needed for the Rest API + * spec and YAML based rest tests. + *

+ * Rest API specification:
+ * When the {@link RestResourcesPlugin} has been applied the {@link CopyRestApiTask} will automatically copy the core Rest API specification + * if there are any Rest YAML tests present in source, or copied from {@link CopyRestTestsTask} output. X-pack specs must be explicitly + * declared to be copied. + *
+ * For example: + *
+ * restResources {
+ *   restApi {
+ *     includeXpack 'enrich'
+ *   }
+ * }
+ * 
+ * Will copy the entire core Rest API specifications (assuming the project has tests) and any of the the X-pack specs starting with enrich*. + * It is recommended (but not required) to also explicitly declare which core specs your project depends on to help optimize the caching + * behavior. + * For example: + *
+ * restResources {
+ *   restApi {
+ *     includeCore 'index', 'cat'
+ *     includeXpack 'enrich'
+ *   }
+ * }
+ * 
+ *
+ * Rest YAML tests :
+ * When the {@link RestResourcesPlugin} has been applied the {@link CopyRestTestsTask} will copy the Rest YAML tests if explicitly + * configured with `includeCore` or `includeXpack` through the `restResources.restTests` extension. + * For example: + *
+ * restResources {
+ *  restApi {
+ *      includeXpack 'graph'
+ *   }
+ *   restTests {
+ *     includeXpack 'graph'
+ *   }
+ * }
+ * 
+ * Will copy any of the the x-pack tests that start with graph, and will copy the X-pack graph specification, as well as the full core + * Rest API specification. + * + * @see CopyRestApiTask + * @see CopyRestTestsTask + */ +public class RestResourcesPlugin implements Plugin { + + private static final String EXTENSION_NAME = "restResources"; + + @Override + public void apply(Project project) { + RestResourcesExtension extension = project.getExtensions().create(EXTENSION_NAME, RestResourcesExtension.class); + + Provider copyRestYamlTestTask = project.getTasks() + .register("copyYamlTestsTask", CopyRestTestsTask.class, task -> { + task.includeCore.set(extension.restTests.getIncludeCore()); + task.includeXpack.set(extension.restTests.getIncludeXpack()); + task.coreConfig = project.getConfigurations().create("restTest"); + if (BuildParams.isInternal()) { + Dependency restTestdependency = project.getDependencies() + .project(Map.of("path", ":rest-api-spec", "configuration", "restTests")); + project.getDependencies().add(task.coreConfig.getName(), restTestdependency); + + task.xpackConfig = project.getConfigurations().create("restXpackTest"); + Dependency restXPackTestdependency = project.getDependencies() + .project(Map.of("path", ":x-pack:plugin", "configuration", "restXpackTests")); + project.getDependencies().add(task.xpackConfig.getName(), restXPackTestdependency); + task.dependsOn(task.xpackConfig); + } else { + Dependency dependency = project.getDependencies() + .create("org.elasticsearch:rest-api-spec:" + VersionProperties.getElasticsearch()); + project.getDependencies().add(task.coreConfig.getName(), dependency); + } + task.dependsOn(task.coreConfig); + }); + + Provider copyRestYamlSpecTask = project.getTasks() + .register("copyRestApiSpecsTask", CopyRestApiTask.class, task -> { + task.includeCore.set(extension.restApi.getIncludeCore()); + task.includeXpack.set(extension.restApi.getIncludeXpack()); + task.dependsOn(copyRestYamlTestTask); + task.coreConfig = project.getConfigurations().create("restSpec"); + if (BuildParams.isInternal()) { + Dependency restSpecDependency = project.getDependencies() + .project(Map.of("path", ":rest-api-spec", "configuration", "restSpecs")); + project.getDependencies().add(task.coreConfig.getName(), restSpecDependency); + + task.xpackConfig = project.getConfigurations().create("restXpackSpec"); + Dependency restXpackSpecDependency = project.getDependencies() + .project(Map.of("path", ":x-pack:plugin", "configuration", "restXpackSpecs")); + project.getDependencies().add(task.xpackConfig.getName(), restXpackSpecDependency); + task.dependsOn(task.xpackConfig); + } else { + Dependency dependency = project.getDependencies() + .create("org.elasticsearch:rest-api-spec:" + VersionProperties.getElasticsearch()); + project.getDependencies().add(task.coreConfig.getName(), dependency); + } + task.dependsOn(task.coreConfig); + }); + + project.getTasks().named("processTestResources").configure(t -> t.dependsOn(copyRestYamlSpecTask)); + } +} diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.rest-resources.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.rest-resources.properties new file mode 100644 index 0000000000000..af2d3e866ea05 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.rest-resources.properties @@ -0,0 +1,20 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +implementation-class=org.elasticsearch.gradle.test.rest.RestResourcesPlugin diff --git a/buildSrc/src/main/resources/minimumGradleVersion b/buildSrc/src/main/resources/minimumGradleVersion index 913671cdf7af5..0df17dd0f6a31 100644 --- a/buildSrc/src/main/resources/minimumGradleVersion +++ b/buildSrc/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -6.2 \ No newline at end of file +6.2.1 \ No newline at end of file diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java index cba7590701154..5258d3cb8e737 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java @@ -38,12 +38,12 @@ public void testParseOsReleaseOnOracle() { expected.put("CPE_NAME", "cpe:/o:oracle:linux:6:10:server"); expected.put("HOME_URL" + "", "https://linux.oracle.com/"); expected.put("ID", "ol"); - expected.put("NAME", "Oracle Linux Server"); - expected.put("ORACLE_BUGZILLA_PRODUCT" + "", "Oracle Linux 6"); + expected.put("NAME", "oracle linux server"); + expected.put("ORACLE_BUGZILLA_PRODUCT" + "", "oracle linux 6"); expected.put("ORACLE_BUGZILLA_PRODUCT_VERSION", "6.10"); - expected.put("ORACLE_SUPPORT_PRODUCT", "Oracle Linux"); + expected.put("ORACLE_SUPPORT_PRODUCT", "oracle linux"); expected.put("ORACLE_SUPPORT_PRODUCT_VERSION", "6.10"); - expected.put("PRETTY_NAME", "Oracle Linux Server 6.10"); + expected.put("PRETTY_NAME", "oracle linux server 6.10"); expected.put("VERSION", "6.10"); expected.put("VERSION_ID", "6.10"); @@ -58,7 +58,7 @@ public void testRemoveTrailingWhitespace() { final Map results = parseOsRelease(lines); - final Map expected = Map.of("NAME", "Oracle Linux Server"); + final Map expected = Map.of("NAME", "oracle linux server"); assertThat(expected, equalTo(results)); } @@ -71,7 +71,7 @@ public void testRemoveComments() { final Map results = parseOsRelease(lines); - final Map expected = Map.of("NAME", "Oracle Linux Server"); + final Map expected = Map.of("NAME", "oracle linux server"); assertThat(expected, equalTo(results)); } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index e707dff3998e0..fabf6e70eacf3 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -25,6 +25,7 @@ apply plugin: 'elasticsearch.rest-test' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' apply plugin: 'com.github.johnrengelman.shadow' +apply plugin: 'elasticsearch.rest-resources' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' @@ -37,15 +38,10 @@ publishing { } } -configurations { - restSpec -} - -idea { - module { - if (scopes.TEST != null) { - scopes.TEST.plus.add(project.configurations.restSpec) - } +restResources { + //we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) + restApi { + includeCore '*' } } @@ -72,16 +68,9 @@ dependencies { exclude group: 'org.elasticsearch', module: 'elasticsearch-rest-high-level-client' } testCompile(project(':x-pack:plugin:eql')) - - restSpec project(':rest-api-spec') } -//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) processTestResources { - dependsOn configurations.restSpec // so that configurations resolve - from({ zipTree(configurations.restSpec.singleFile) }) { - include 'rest-api-spec/api/**' - } from(project(':client:rest-high-level').file('src/test/resources')) } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java index b5862178666e3..bd4e0e17a8b5a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphIT.java @@ -37,33 +37,33 @@ import java.util.Map; public class GraphIT extends ESRestHighLevelClientTestCase { - + @Before public void indexDocuments() throws IOException { // Create chain of doc IDs across indices 1->2->3 Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/_doc/1"); doc1.setJsonEntity("{ \"num\":[1], \"const\":\"start\"}"); client().performRequest(doc1); - + Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/1"); doc2.setJsonEntity("{\"num\":[1,2], \"const\":\"foo\"}"); client().performRequest(doc2); - + Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/2"); doc3.setJsonEntity("{\"num\":[2,3], \"const\":\"foo\"}"); - client().performRequest(doc3); + client().performRequest(doc3); Request doc4 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/_doc/2"); doc4.setJsonEntity("{\"num\":\"string\", \"const\":\"foo\"}"); - client().performRequest(doc4); - + client().performRequest(doc4); + Request doc5 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/_doc/2"); doc5.setJsonEntity("{\"num\":[2,4], \"const\":\"foo\"}"); - client().performRequest(doc5); + client().performRequest(doc5); + - - client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); - } + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); + } public void testCleanExplore() throws Exception { GraphExploreRequest graphExploreRequest = new GraphExploreRequest(); @@ -75,7 +75,7 @@ public void testCleanExplore() throws Exception { if (i == 0) { guidingQuery = new TermQueryBuilder("const.keyword", "start"); } else if (randomBoolean()){ - guidingQuery = new TermQueryBuilder("const.keyword", "foo"); + guidingQuery = new TermQueryBuilder("const.keyword", "foo"); } Hop hop = graphExploreRequest.createNextHop(guidingQuery); VertexRequest vr = hop.addVertexRequest("num"); @@ -94,13 +94,13 @@ public void testCleanExplore() throws Exception { } assertEquals(expectedTermsAndDepths, actualTermsAndDepths); assertThat(exploreResponse.isTimedOut(), Matchers.is(false)); - ShardOperationFailedException[] failures = exploreResponse.getShardFailures(); + ShardOperationFailedException[] failures = exploreResponse.getShardFailures(); assertThat(failures.length, Matchers.equalTo(0)); - + } public void testBadExplore() throws Exception { - //Explore indices where lack of fielddata=true on one index leads to partial failures + //Explore indices where lack of fielddata=true on one index leads to partial failures GraphExploreRequest graphExploreRequest = new GraphExploreRequest(); graphExploreRequest.indices("index1", "index2", "index_no_field_data"); graphExploreRequest.useSignificance(false); @@ -110,7 +110,7 @@ public void testBadExplore() throws Exception { if (i == 0) { guidingQuery = new TermQueryBuilder("const.keyword", "start"); } else if (randomBoolean()){ - guidingQuery = new TermQueryBuilder("const.keyword", "foo"); + guidingQuery = new TermQueryBuilder("const.keyword", "foo"); } Hop hop = graphExploreRequest.createNextHop(guidingQuery); VertexRequest vr = hop.addVertexRequest("num"); @@ -131,9 +131,9 @@ public void testBadExplore() throws Exception { assertThat(exploreResponse.isTimedOut(), Matchers.is(false)); ShardOperationFailedException[] failures = exploreResponse.getShardFailures(); assertThat(failures.length, Matchers.equalTo(1)); - assertTrue(failures[0].reason().contains("Fielddata is disabled")); - + assertTrue(failures[0].reason().contains("Text fields are not optimised for operations that require per-document field data")); + } - - + + } diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index c72d09adde302..029eb0733023f 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -317,9 +317,14 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) { group = "org.elasticsearch.distribution.integ-test-zip" + restResources { + restTests { + includeCore '*' + } + } + integTest { dependsOn assemble - includePackaged = true } processTestResources { diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index e0573155b7af8..a6b2295c5318e 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -14,13 +14,11 @@ configurations { dockerPlugins dockerSource ossDockerSource - restSpec } dependencies { dockerSource project(path: ":distribution:archives:linux-tar") ossDockerSource project(path: ":distribution:archives:oss-linux-tar") - restSpec project(':rest-api-spec') } ext.expansions = { oss, local -> @@ -138,12 +136,8 @@ preProcessFixture { } processTestResources { - from({ zipTree(configurations.restSpec.singleFile) }) { - include 'rest-api-spec/api/**' - } from project(':x-pack:plugin:core') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') - dependsOn configurations.restSpec } task integTest(type: Test) { diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index f034e8ce6310d..ced76ee3b52d8 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -545,21 +545,24 @@ private Path downloadAndValidate( final BufferedReader checksumReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); final String checksumLine = checksumReader.readLine(); final String[] fields = checksumLine.split(" {2}"); - if (fields.length != 2) { + if (officialPlugin && fields.length != 2 || officialPlugin == false && fields.length > 2) { throw new UserException(ExitCodes.IO_ERROR, "Invalid checksum file at " + checksumUrl); } expectedChecksum = fields[0]; - final String[] segments = URI.create(urlString).getPath().split("/"); - final String expectedFile = segments[segments.length - 1]; - if (fields[1].equals(expectedFile) == false) { - final String message = String.format( - Locale.ROOT, - "checksum file at [%s] is not for this plugin, expected [%s] but was [%s]", - checksumUrl, - expectedFile, - fields[1] - ); - throw new UserException(ExitCodes.IO_ERROR, message); + if (fields.length == 2) { + // checksum line contains filename as well + final String[] segments = URI.create(urlString).getPath().split("/"); + final String expectedFile = segments[segments.length - 1]; + if (fields[1].equals(expectedFile) == false) { + final String message = String.format( + Locale.ROOT, + "checksum file at [%s] is not for this plugin, expected [%s] but was [%s]", + checksumUrl, + expectedFile, + fields[1] + ); + throw new UserException(ExitCodes.IO_ERROR, message); + } } if (checksumReader.readLine() != null) { throw new UserException(ExitCodes.IO_ERROR, "Invalid checksum file at " + checksumUrl); diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 02a2c860859e6..dbcfb9e66d9ad 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -114,6 +114,7 @@ import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.startsWith; @LuceneTestCase.SuppressFileSystems("*") public class InstallPluginCommandTests extends ESTestCase { @@ -1109,6 +1110,45 @@ public void testMavenSha1Backcompat() throws Exception { assertTrue(terminal.getOutput(), terminal.getOutput().contains("sha512 not found, falling back to sha1")); } + public void testMavenChecksumWithoutFilename() throws Exception { + String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip"; + MessageDigest digest = MessageDigest.getInstance("SHA-512"); + assertInstallPluginFromUrl( + "mygroup:myplugin:1.0.0", + "myplugin", + url, + null, + false, + ".sha512", + checksum(digest), + null, + (b, p) -> null + ); + } + + public void testOfficialChecksumWithoutFilename() throws Exception { + String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + + Build.CURRENT.getQualifiedVersion() + + ".zip"; + MessageDigest digest = MessageDigest.getInstance("SHA-512"); + UserException e = expectThrows( + UserException.class, + () -> assertInstallPluginFromUrl( + "analysis-icu", + "analysis-icu", + url, + null, + false, + ".sha512", + checksum(digest), + null, + (b, p) -> null + ) + ); + assertEquals(ExitCodes.IO_ERROR, e.exitCode); + assertThat(e.getMessage(), startsWith("Invalid checksum file")); + } + public void testOfficialShaMissing() throws Exception { String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-" + Build.CURRENT.getQualifiedVersion() diff --git a/docs/build.gradle b/docs/build.gradle index 299e48d9872ed..0deeb0ea9cdd2 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -41,6 +41,12 @@ buildRestTests.expectedUnconvertedCandidates = [ 'reference/ml/anomaly-detection/apis/update-job.asciidoc' ] +restResources { + restApi { + includeCore '*' + } +} + testClusters.integTest { if (singleNode().testDistribution == DEFAULT) { setting 'xpack.license.self_generated.type', 'trial' diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc index 1d6601ed3b7b1..6f12302b30ec0 100644 --- a/docs/reference/cat.asciidoc +++ b/docs/reference/cat.asciidoc @@ -4,10 +4,18 @@ ["float",id="intro"] === Introduction -JSON is great... for computers. Even if it's pretty-printed, trying -to find relationships in the data is tedious. Human eyes, especially -when looking at a terminal, need compact and aligned text. The cat API -aims to meet this need. +JSON is great... for computers. Even if it's pretty-printed, trying +to find relationships in the data is tedious. Human eyes, especially +when looking at a terminal, need compact and aligned text. The cat APIs +aim to meet this need. + +[IMPORTANT] +==== +cat APIs are only intended for human consumption using the +{kibana-ref}/console-kibana.html[Kibana console] or command line. They are _not_ +intended for use by applications. For application consumption, we recommend +using a corresponding JSON API. +==== All the cat commands accept a query string parameter `help` to see all the headers and info they provide, and the `/_cat` command alone lists all @@ -221,6 +229,10 @@ include::cat/allocation.asciidoc[] include::cat/count.asciidoc[] +include::cat/dataframeanalytics.asciidoc[] + +include::cat/datafeeds.asciidoc[] + include::cat/fielddata.asciidoc[] include::cat/health.asciidoc[] diff --git a/docs/reference/cat/datafeeds.asciidoc b/docs/reference/cat/datafeeds.asciidoc new file mode 100644 index 0000000000000..95a830aa82347 --- /dev/null +++ b/docs/reference/cat/datafeeds.asciidoc @@ -0,0 +1,155 @@ +[role="xpack"] +[testenv="platinum"] +[[cat-datafeeds]] +=== cat {dfeeds} API +++++ +cat {dfeeds} +++++ + +Returns configuration and usage information about {dfeeds}. + +[[cat-datafeeds-request]] +==== {api-request-title} + +`GET /_cat/ml/datafeeds/` + + +`GET /_cat/ml/datafeeds` + +[[cat-datafeeds-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. See +<> and {ml-docs}/setup.html[Set up {ml-features}]. + +//// +[[cat-datafeeds-desc]] +==== {api-description-title} + +TBD: This API returns a maximum of 10,000 {dfeeds}. +//// + +[[cat-datafeeds-path-params]] +==== {api-path-parms-title} + +``:: +(Optional, string) +include::{docdir}/ml/ml-shared.asciidoc[tag=datafeed-id] + +[[cat-datafeeds-query-params]] +==== {api-query-parms-title} + +`allow_no_datafeeds`:: +(Optional, boolean) +include::{docdir}/ml/ml-shared.asciidoc[tag=allow-no-datafeeds] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=time] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + +[[cat-datafeeds-results]] +==== {api-response-body-title} + +`assignment_explanation`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=assignment-explanation] ++ +To retrieve this information, specify the `ae` column in the `h` query parameter. + +`bucket.count`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=bucket-count] ++ +To retrieve this information, specify the `bc` or `bucketCount` column in the +`h` query parameter. + +`id`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=datafeed-id] ++ +To retrieve this information, specify the `id` column in the `h` query parameter. + +`node.address`:: +The network address of the node. ++ +include::{docdir}/ml/ml-shared.asciidoc[tag=node] ++ +To retrieve this information, specify the `na` or `nodeAddress` column in the +`h` query parameter. + +`node.ephemeral_id`:: +The ephemeral ID of the node. ++ +include::{docdir}/ml/ml-shared.asciidoc[tag=node] ++ +To retrieve this information, specify the `ne` or `nodeEphemeralId` column in +the `h` query parameter. + +`node.id`:: +The unique identifier of the node. ++ +include::{docdir}/ml/ml-shared.asciidoc[tag=node] ++ +To retrieve this information, specify the `ni` or `nodeId` column in the `h` +query parameter. + +`node.name`:: +The node name. ++ +include::{docdir}/ml/ml-shared.asciidoc[tag=node] ++ +To retrieve this information, specify the `nn` or `nodeName` column in the `h` +query parameter. + +`search.bucket_avg`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=search-bucket-avg] ++ +To retrieve this information, specify the `sba` or `searchBucketAvg` column in +the `h` query parameter. + +`search.count`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=search-count] ++ +To retrieve this information, specify the `sc` or `searchCount` column in the +`h` query parameter. + +`search.exp_avg_hour`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=search-exp-avg-hour] ++ +To retrieve this information, specify the `seah` or `searchExpAvgHour` column in +the `h` query parameter. + +`search.time`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=search-time] ++ +To retrieve this information, specify the `st` or `searchTime` column in the `h` +query parameter. + +`state`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=state-datafeed] ++ +To retrieve this information, specify the `s` column in the `h` query parameter. + +[[cat-datafeeds-example]] +==== {api-examples-title} + +[source,console] +-------------------------------------------------- +GET _cat/ml/datafeeds?v +-------------------------------------------------- +// TEST[skip:kibana sample data] + +[source,console-result] +---- +id state bucket.count search.count +datafeed-high_sum_total_sales stopped 743 7 +datafeed-low_request_rate stopped 1457 3 +datafeed-response_code_rates stopped 1460 18 +datafeed-url_scanning stopped 1460 18 +---- +// TESTRESPONSE[skip:kibana sample data] diff --git a/docs/reference/cat/dataframeanalytics.asciidoc b/docs/reference/cat/dataframeanalytics.asciidoc new file mode 100644 index 0000000000000..0537a0cda83c4 --- /dev/null +++ b/docs/reference/cat/dataframeanalytics.asciidoc @@ -0,0 +1,165 @@ +[role="xpack"] +[testenv="platinum"] +[[cat-dfanalytics]] +=== cat {dfanalytics} API +++++ +cat {dfanalytics} +++++ + +Returns configuration and usage information about {dfanalytics-jobs}. + + +[[cat-dfanalytics-request]] +==== {api-request-title} + +`GET /_cat/ml/data_frame/analytics/` + + +`GET /_cat/ml/data_frame/analytics` + + +[[cat-dfanalytics-prereqs]] +==== {api-prereq-title} + +If the {es} {security-features} are enabled, you must have the following +privileges: + +* cluster: `monitor_ml` + +For more information, see <> and {ml-docs}/setup.html[Set up {ml-features}]. + + +//// +[[cat-dfanalytics-desc]] +==== {api-description-title} + +TBD +//// + +[[cat-dfanalytics-path-params]] +==== {api-path-parms-title} + +``:: +(Optional, string) +include::{docdir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-default] + + +[[cat-dfanalytics-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=http-format] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-h] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=help] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-s] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=time] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=cat-v] + + +[[cat-dfanalytics-results]] +==== {api-response-body-title} + +`assignment_explanation`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=assignment-explanation-dfanalytics] ++ +-- +To retrieve this information, specify the `ae` column in the `h` query +parameter. +-- + +`create_time`:: +The time when the {dfanalytics-job} was created. To retrieve this information, +specify the `ct` or `createTime` column in the `h` query parameter. + +`description`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=description-dfa] ++ +-- +To retrieve this information, specify the `d` column in the `h` query parameter. +-- + +`dest_index`:: +Name of the destination index. To retrieve this information, specify the `di` or +the `destIndex` column in the `h` query parameter. + +`failure_reason`:: +Contains messages about the reason why a {dfanalytics-job} failed. To retrieve +this information, specify the `fr` or the `failureReason` column in the `h` +query parameter. + +`id`:: +include::{docdir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] ++ +-- +To retrieve this information, specify the `id` column in the `h` query +parameter. +-- + +`model_memory_limit`:: +The approximate maximum amount of memory resources that are permitted for the +{dfanalytics-job}. To retrieve this information, specify the `mml` or the +`modelMemoryLimit` column in the `h` query parameter. + +`node.address`:: +The network address of the node that the {dfanalytics-job} is assigned to. To +retrieve this information, specify the `na` or `nodeAddress` column in the `h` +query parameter. + +`node.ephemeral_id`:: +The ephemeral ID of the node that the {dfanalytics-job} is assigned to. To +retrieve this information, specify the `ne` or `nodeEphemeralId` column in the +`h` query parameter. + +`node.id`:: +The unique identifier of the node that the {dfanalytics-job} is assigned to. To +retrieve this information, specify the `ni` or `nodeId` column in the `h` query +parameter. + +`node.name`:: +The name of the node that the {dfanalytics-job} is assigned to. To retrieve this +information, specify the `nn` or `nodeName` column in the `h` query +parameter. + +`progress`:: +The progress report of the {dfanalytics-job} by phase. To retrieve this +information, specify the `p` column in the `h` query parameter. + +`source_index`:: +Name of the source index. To retrieve this information, specify the `si` or the +`sourceIndex` column in the `h` query parameter. + +`state`:: +Current state of the {dfanalytics-job}. To retrieve this information, specify +the `s` column in the `h` query parameter. + +`type`:: +The type of analysis that the {dfanalytics-job} performs. To retrieve this +information, specify the `t` column in the `h` query parameter. + +`version`:: +The {es} version number in which the {dfanalytics-job} was created. To retrieve +this information, specify the `v` column in the `h` query parameter. + + +[[cat-dfanalytics-example]] +==== {api-examples-title} + +[source,console] +-------------------------------------------------- +GET _cat/ml/data_frame/analytics?v +-------------------------------------------------- +// TEST[skip:kibana sample data] + +[source,console-result] +---- +id create_time type state +classifier_job_1 2020-02-12T11:49:09.594Z classification stopped +classifier_job_2 2020-02-12T11:49:14.479Z classification stopped +classifier_job_3 2020-02-12T11:49:16.928Z classification stopped +classifier_job_4 2020-02-12T11:49:19.127Z classification stopped +classifier_job_5 2020-02-12T11:49:21.349Z classification stopped +---- +// TESTRESPONSE[skip:kibana sample data] diff --git a/docs/reference/docs/termvectors.asciidoc b/docs/reference/docs/termvectors.asciidoc index abd8fc6de0960..6d999d5b49737 100644 --- a/docs/reference/docs/termvectors.asciidoc +++ b/docs/reference/docs/termvectors.asciidoc @@ -216,7 +216,7 @@ PUT /twitter/_doc/1 "text" : "twitter test test test " } -PUT /twitter/_doc/2 +PUT /twitter/_doc/2?refresh=wait_for { "fullname" : "Jane Doe", "text" : "Another twitter test ..." diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index 52d5d04699d1f..e5a84f8ddc953 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -58,15 +58,18 @@ gets indexed and when it becomes visible, increasing the `30s`, might help improve indexing speed. [float] -=== Disable refresh and replicas for initial loads +=== Disable replicas for initial loads -If you need to load a large amount of data at once, you should disable refresh -by setting `index.refresh_interval` to `-1` and set `index.number_of_replicas` -to `0`. This will temporarily put your index at risk since the loss of any shard -will cause data loss, but at the same time indexing will be faster since -documents will be indexed only once. Once the initial loading is finished, you -can set `index.refresh_interval` and `index.number_of_replicas` back to their -original values. +If you have a large amount of data that you want to load all at once into +Elasticsearch, it may be beneficial to set `index.number_of_replicas` to `0` in +order to speep up indexing. Having no replicas means that losing a single node +may incur data loss, so it is important that the data lives elsewhere so that +this initial load can be retried in case of an issue. Once the initial load is +finished, you can set `index.number_of_replicas` back to its original value. + +If `index.refresh_interval` is configured in the index settings, it may further +help to unset it during this initial load and setting it back to its original +value once the initial load is finished. [float] === Disable swapping @@ -128,6 +131,15 @@ The default is `10%` which is often plenty: for example, if you give the JVM 10GB of memory, it will give 1GB to the index buffer, which is enough to host two shards that are heavily indexing. +[float] +=== Use {ccr} to prevent searching from stealing resources from indexing + +Within a single cluster, indexing and searching can compete for resources. By +setting up two clusters, configuring <> to replicate data from +one cluster to the other one, and routing all searches to the cluster that has +the follower indices, search activity will no longer steal resources from +indexing on the cluster that hosts the leader indices. + [float] === Additional optimizations diff --git a/docs/reference/ilm/ilm-with-existing-indices.asciidoc b/docs/reference/ilm/ilm-with-existing-indices.asciidoc index 57b4fe2bd1aaf..3ebf0a1fd9ad1 100644 --- a/docs/reference/ilm/ilm-with-existing-indices.asciidoc +++ b/docs/reference/ilm/ilm-with-existing-indices.asciidoc @@ -253,7 +253,7 @@ phases required. For simplicity, we'll just use rollover: [source,console] ----------------------- -PUT _ilm/policy/sample_policy +PUT _ilm/policy/mylogs_condensed_policy { "policy": { "phases": { diff --git a/docs/reference/index-modules/store.asciidoc b/docs/reference/index-modules/store.asciidoc index fd86b29df679d..2f028a5b381c7 100644 --- a/docs/reference/index-modules/store.asciidoc +++ b/docs/reference/index-modules/store.asciidoc @@ -3,6 +3,10 @@ The store module allows you to control how index data is stored and accessed on disk. +NOTE: This is a low-level setting. Some store implementations have poor +concurrency or disable optimizations for heap memory usage. We recommend +sticking to the defaults. + [float] [[file-system]] === File system storage types @@ -11,12 +15,12 @@ There are different file system implementations or _storage types_. By default, Elasticsearch will pick the best implementation based on the operating environment. -This can be overridden for all indices by adding this to the -`config/elasticsearch.yml` file: +The storage type can also be explicitly set for all indices by configuring the +store type in the `config/elasticsearch.yml` file: [source,yaml] --------------------------------- -index.store.type: niofs +index.store.type: hybridfs --------------------------------- It is a _static_ setting that can be set on a per-index basis at index @@ -27,7 +31,7 @@ creation time: PUT /my_index { "settings": { - "index.store.type": "niofs" + "index.store.type": "hybridfs" } } --------------------------------- @@ -47,15 +51,15 @@ supported systems but is subject to change. The Simple FS type is a straightforward implementation of file system storage (maps to Lucene `SimpleFsDirectory`) using a random access file. This implementation has poor concurrent performance (multiple threads -will bottleneck). It is usually better to use the `niofs` when you need -index persistence. +will bottleneck) and disables some optimizations for heap memory usage. [[niofs]]`niofs`:: The NIO FS type stores the shard index on the file system (maps to Lucene `NIOFSDirectory`) using NIO. It allows multiple threads to read from the same file concurrently. It is not recommended on Windows -because of a bug in the SUN Java implementation. +because of a bug in the SUN Java implementation and disables some +optimizations for heap memory usage. [[mmapfs]]`mmapfs`:: diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index d32384e613bb7..0749a7aabe1de 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -372,7 +372,7 @@ is not null safe alternative, so an explicit null check is needed. [[ingest-conditional-complex]] === Complex Conditionals -The `if` condition can be more then a simple equality check. +The `if` condition can be more complex than a simple equality check. The full power of the <> is available and running in the {painless}/painless-ingest-processor-context.html[ingest processor context]. diff --git a/docs/reference/ingest/processors/csv.asciidoc b/docs/reference/ingest/processors/csv.asciidoc index 505bd14162a59..464a835b11c94 100644 --- a/docs/reference/ingest/processors/csv.asciidoc +++ b/docs/reference/ingest/processors/csv.asciidoc @@ -23,7 +23,7 @@ include::common-options.asciidoc[] { "csv": { "field": "my_field", - "target_fields": ["field1, field2"] + "target_fields": ["field1", "field2"] } } -------------------------------------------------- diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index 98f10bfe20dd3..aa2ae72dd0522 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -183,18 +183,26 @@ or <>. The following parameters are accepted by `nested` fields: -[horizontal] <>:: - - Whether or not new `properties` should be added dynamically to an existing - nested object. Accepts `true` (default), `false` and `strict`. +(Optional, string) +Whether or not new `properties` should be added dynamically to an existing +nested object. Accepts `true` (default), `false` and `strict`. <>:: - - The fields within the nested object, which can be of any - <>, including `nested`. New properties - may be added to an existing nested object. - +(Optional, object) +The fields within the nested object, which can be of any +<>, including `nested`. New properties +may be added to an existing nested object. + +`include_in_parent`:: +(Optional, boolean) +If `true`, all fields in the nested object are also added to the parent document +as standard (flat) fields. Defaults to `false`. + +`include_in_root`:: +(Optional, boolean) +If `true`, all fields in the nested object are also added to the root +document as standard (flat) fields. Defaults to `false`. [float] === Limits on `nested` mappings and objects diff --git a/docs/reference/migration/migrate_8_0.asciidoc b/docs/reference/migration/migrate_8_0.asciidoc index 4478b678a11f3..bf363ea30f858 100644 --- a/docs/reference/migration/migrate_8_0.asciidoc +++ b/docs/reference/migration/migrate_8_0.asciidoc @@ -11,6 +11,7 @@ See also <> and <>. coming[8.0.0] +* <> * <> * <> * <> @@ -63,6 +64,7 @@ is replaced with a new endpoint that does not contain `_xpack`. As an example, // end::notable-breaking-changes[] +include::migrate_8_0/aggregations.asciidoc[] include::migrate_8_0/analysis.asciidoc[] include::migrate_8_0/allocation.asciidoc[] include::migrate_8_0/breaker.asciidoc[] diff --git a/docs/reference/migration/migrate_8_0/aggregations.asciidoc b/docs/reference/migration/migrate_8_0/aggregations.asciidoc new file mode 100644 index 0000000000000..fd7c3affb15b0 --- /dev/null +++ b/docs/reference/migration/migrate_8_0/aggregations.asciidoc @@ -0,0 +1,17 @@ +[float] +[[breaking_80_aggregations_changes]] +=== Aggregations changes + +//NOTE: The notable-breaking-changes tagged regions are re-used in the +//Installation and Upgrade Guide + +//tag::notable-breaking-changes[] +[discrete] +[[percentile-duplication]] +==== Duplicate values no longer supported in percentiles aggregation + +If you specify the `percents` parameter with the +<>, +its values must be unique. Otherwise, an exception occurs. + +// end::notable-breaking-changes[] \ No newline at end of file diff --git a/docs/reference/ml/anomaly-detection/apis/find-file-structure.asciidoc b/docs/reference/ml/anomaly-detection/apis/find-file-structure.asciidoc index 0a1b2aaefef80..f5173ab504e99 100644 --- a/docs/reference/ml/anomaly-detection/apis/find-file-structure.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/find-file-structure.asciidoc @@ -315,7 +315,7 @@ If the request does not encounter errors, you receive the following result: { "date" : { "field" : "release_date", - "timezone" : "{{ beat.timezone }}", + "timezone" : "{{ event.timezone }}", "formats" : [ "ISO8601" ] @@ -702,7 +702,7 @@ If the request does not encounter errors, you receive the following result: { "date" : { "field" : "tpep_pickup_datetime", - "timezone" : "{{ beat.timezone }}", + "timezone" : "{{ event.timezone }}", "formats" : [ "yyyy-MM-dd HH:mm:ss" ] @@ -1577,7 +1577,7 @@ this: { "date" : { "field" : "timestamp", - "timezone" : "{{ beat.timezone }}", + "timezone" : "{{ event.timezone }}", "formats" : [ "ISO8601" ] @@ -1743,7 +1743,7 @@ this: { "date" : { "field" : "timestamp", - "timezone" : "{{ beat.timezone }}", + "timezone" : "{{ event.timezone }}", "formats" : [ "ISO8601" ] diff --git a/docs/reference/ml/anomaly-detection/apis/get-datafeed-stats.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-datafeed-stats.asciidoc index 21a907a703c9a..463d17771cc2c 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-datafeed-stats.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-datafeed-stats.asciidoc @@ -68,15 +68,16 @@ The API returns an array of {dfeed} count objects. All of these properties are informational; you cannot update their values. `assignment_explanation`:: -(string) For started {dfeeds} only, contains messages relating to the selection of a node. +(string) +include::{docdir}/ml/ml-shared.asciidoc[tag=assignment-explanation] `datafeed_id`:: (string) include::{docdir}/ml/ml-shared.asciidoc[tag=datafeed-id] `node`:: -(object) For started {dfeeds} only, the node upon which the {dfeed} is started. -The {dfeed} and job will be on the same node. +(object) +include::{docdir}/ml/ml-shared.asciidoc[tag=node] `node`.`id`::: The unique identifier of the node. For example, "0-o0tOoRTwKFZifatTWKNw". `node`.`name`::: The node name. For example, `0-o0tOo`. `node`.`ephemeral_id`::: The node ephemeral ID. @@ -85,27 +86,33 @@ accepted. For example, `127.0.0.1:9300`. `node`.`attributes`::: For example, `{"ml.machine_memory": "17179869184"}`. `state`:: -(string) The status of the {dfeed}, which can be one of the following values: -+ --- -* `started`: The {dfeed} is actively receiving data. -* `stopped`: The {dfeed} is stopped and will not receive data until it is -re-started. --- +(string) +include::{docdir}/ml/ml-shared.asciidoc[tag=state-datafeed] `timing_stats`:: (object) An object that provides statistical information about timing aspect of this {dfeed}. + `timing_stats`.`average_search_time_per_bucket_ms`::: -(double) Average of the {dfeed} search times in milliseconds. +(double) +include::{docdir}/ml/ml-shared.asciidoc[tag=search-bucket-avg] + `timing_stats`.`bucket_count`::: -(long) The number of buckets processed. +(long) +include::{docdir}/ml/ml-shared.asciidoc[tag=bucket-count] + `timing_stats`.`exponential_average_search_time_per_hour_ms`::: -(double) Exponential moving average of the {dfeed} search times in milliseconds. +(double) +include::{docdir}/ml/ml-shared.asciidoc[tag=search-exp-avg-hour] + `timing_stats`.`job_id`::: include::{docdir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] -`timing_stats`.`search_count`::: Number of searches performed by this {dfeed}. -`timing_stats`.`total_search_time_ms`::: Total time the {dfeed} spent searching in milliseconds. + +`timing_stats`.`search_count`::: +include::{docdir}/ml/ml-shared.asciidoc[tag=search-count] + +`timing_stats`.`total_search_time_ms`::: +include::{docdir}/ml/ml-shared.asciidoc[tag=search-time] [[ml-get-datafeed-stats-response-codes]] diff --git a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc index a4661a19c435d..47257cc9a0ac3 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc @@ -78,7 +78,7 @@ A detector has the following properties: (string) include::{docdir}/ml/ml-shared.asciidoc[tag=by-field-name] -`analysis_config`.`detectors`.`custom_rules`:::: +[[put-customrules]]`analysis_config`.`detectors`.`custom_rules`:::: + -- (array) diff --git a/docs/reference/ml/anomaly-detection/detector-custom-rules.asciidoc b/docs/reference/ml/anomaly-detection/detector-custom-rules.asciidoc index 8cba67da926e7..a757c9036a1bf 100644 --- a/docs/reference/ml/anomaly-detection/detector-custom-rules.asciidoc +++ b/docs/reference/ml/anomaly-detection/detector-custom-rules.asciidoc @@ -10,10 +10,11 @@ of following its default behavior. To specify the _when_ a rule uses a `scope` and `conditions`. You can think of `scope` as the categorical specification of a rule, while `conditions` are the numerical part. A rule can have a scope, one or more conditions, or a combination of -scope and conditions. - -Let us see how those can be configured by examples. +scope and conditions. For the full list of specification details, see the +{ref}/ml-put-job.html#put-customrules[`custom_rules` object] in the create +{anomaly-jobs} API. +[[ml-custom-rules-scope]] ==== Specifying custom rule scope Let us assume we are configuring an {anomaly-job} in order to detect DNS data @@ -29,7 +30,8 @@ to achieve this. First, we need to create a list of our safe domains. Those lists are called _filters_ in {ml}. Filters can be shared across {anomaly-jobs}. -We create our filter using the {ref}/ml-put-filter.html[put filter API]: +You can create a filter in **Anomaly Detection > Settings > Filter Lists** in +{kib} or by using the {ref}/ml-put-filter.html[put filter API]: [source,console] ---------------------------------- @@ -42,7 +44,7 @@ PUT _ml/filters/safe_domains // TEST[skip:needs-licence] Now, we can create our {anomaly-job} specifying a scope that uses the -`safe_domains` filter for the `highest_registered_domain` field: +`safe_domains` filter for the `highest_registered_domain` field: [source,console] ---------------------------------- @@ -73,7 +75,8 @@ PUT _ml/anomaly_detectors/dns_exfiltration_with_rule // TEST[skip:needs-licence] As time advances and we see more data and more results, we might encounter new -domains that we want to add in the filter. We can do that by using the +domains that we want to add in the filter. We can do that in the +**Anomaly Detection > Settings > Filter Lists** in {kib} or by using the {ref}/ml-update-filter.html[update filter API]: [source,console] @@ -127,6 +130,7 @@ PUT _ml/anomaly_detectors/scoping_multiple_fields Such a detector will skip results when the values of all 3 scoped fields are included in the referenced filters. +[[ml-custom-rules-conditions]] ==== Specifying custom rule conditions Imagine a detector that looks for anomalies in CPU utilization. @@ -206,7 +210,8 @@ PUT _ml/anomaly_detectors/rule_with_range ---------------------------------- // TEST[skip:needs-licence] -==== Custom rules in the life-cycle of a job +[[ml-custom-rules-lifecycle]] +==== Custom rules in the lifecycle of a job Custom rules only affect results created after the rules were applied. Let us imagine that we have configured an {anomaly-job} and it has been running @@ -214,8 +219,9 @@ for some time. After observing its results we decide that we can employ rules in order to get rid of some uninteresting results. We can use the {ref}/ml-update-job.html[update {anomaly-job} API] to do so. However, the rule we added will only be in effect for any results created from the moment we -added the rule onwards. Past results will remain unaffected. +added the rule onwards. Past results will remain unaffected. +[[ml-custom-rules-filtering]] ==== Using custom rules vs. filtering data It might appear like using rules is just another way of filtering the data diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index bbd945b690e26..dbd1c0ef7fa4d 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -137,6 +137,14 @@ tag::analyzed-fields-includes[] An array of strings that defines the fields that will be included in the analysis. end::analyzed-fields-includes[] +tag::assignment-explanation[] +For started {dfeeds} only, contains messages relating to the selection of a node. +end::assignment-explanation[] + +tag::assignment-explanation-dfanalytics[] +Contains messages relating to the selection of a node. +end::assignment-explanation-dfanalytics[] + tag::background-persist-interval[] Advanced configuration option. The time between each periodic persistence of the model. The default value is a randomized value between 3 to 4 hours, which @@ -150,6 +158,10 @@ so do not set the `background_persist_interval` value too low. -- end::background-persist-interval[] +tag::bucket-count[] +The number of buckets processed. +end::bucket-count[] + tag::bucket-span[] The size of the interval that the analysis is aggregated into, typically between `5m` and `1h`. The default value is `5m`. If the {anomaly-job} uses a {dfeed} @@ -912,6 +924,11 @@ improve diversity in the ensemble. Therefore, only override this if you are confident that the value you choose is appropriate for the data set. end::n-neighbors[] +tag::node[] +For started {dfeeds} only, this information pertains to the node upon which the +{dfeed} is started. +end::node[] + tag::num-top-classes[] Defines the number of categories for which the predicted probabilities are reported. It must be non-negative. If it is greater than the @@ -1004,6 +1021,22 @@ tag::scroll-size[] The `size` parameter that is used in {es} searches. The default value is `1000`. end::scroll-size[] +tag::search-bucket-avg[] +The average search time per bucket, in milliseconds. +end::search-bucket-avg[] + +tag::search-count[] +The number of searches run by the {dfeed}. +end::search-count[] + +tag::search-exp-avg-hour[] +The exponential average search time per hour, in milliseconds. +end::search-exp-avg-hour[] + +tag::search-time[] +The total time the {dfeed} spent searching, in milliseconds. +end::search-time[] + tag::size[] Specifies the maximum number of {dfanalytics-jobs} to obtain. The default value is `100`. @@ -1050,6 +1083,16 @@ more information, see https://en.wikipedia.org/wiki/Feature_scaling#Standardization_(Z-score_Normalization)[this wiki page about standardization]. end::standardization-enabled[] +tag::state-datafeed[] +The status of the {dfeed}, which can be one of the following values: ++ +-- +* `started`: The {dfeed} is actively receiving data. +* `stopped`: The {dfeed} is stopped and will not receive data until it is +re-started. +-- +end::state-datafeed[] + tag::summary-count-field-name[] If this property is specified, the data that is fed to the job is expected to be pre-summarized. This property value is the name of the field that contains the diff --git a/docs/reference/monitoring/production.asciidoc b/docs/reference/monitoring/production.asciidoc index de0e1aa3912cf..e901896e865e3 100644 --- a/docs/reference/monitoring/production.asciidoc +++ b/docs/reference/monitoring/production.asciidoc @@ -103,7 +103,7 @@ monitoring cluster. ** <>. -** <>. +** <>. . (Optional) {logstash-ref}/configuring-logstash.html[Configure {ls} to collect data and send it to the monitoring cluster]. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 7d62966e2891f..679a70a9424db 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -91,11 +91,6 @@ See <>. See <>. -[role="exclude",id="configuring-monitoring"] -=== Configuring monitoring - -See <>. - [role="exclude",id="es-monitoring"] === Monitoring {es} diff --git a/docs/reference/release-notes/8.0.0-alpha1.asciidoc b/docs/reference/release-notes/8.0.0-alpha1.asciidoc index 4693221f629cf..239ffb0e3f94f 100644 --- a/docs/reference/release-notes/8.0.0-alpha1.asciidoc +++ b/docs/reference/release-notes/8.0.0-alpha1.asciidoc @@ -4,5 +4,11 @@ The changes listed below have been released for the first time in {es} 8.0.0-alpha1. -coming[8.0.0] +[[breaking-8.0.0-alpha1]] +[float] +=== Breaking changes + +Aggregations:: +* Disallow specifying the same percentile multiple times in percentiles aggregation {pull}52257[#52257] +coming[8.0.0] diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 3e4713273eb5c..34b418a157888 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -707,7 +707,7 @@ Defaults to 100. end::scroll_size[] tag::search_timeout[] -`search_timeout`:: +`timeout`:: (Optional, <>) Explicit timeout for each search request. Defaults to no timeout. diff --git a/docs/reference/settings/ilm-settings.asciidoc b/docs/reference/settings/ilm-settings.asciidoc index c309c61a1b049..44b055302652b 100644 --- a/docs/reference/settings/ilm-settings.asciidoc +++ b/docs/reference/settings/ilm-settings.asciidoc @@ -43,4 +43,4 @@ the index creation will fail. `index.lifecycle.origination_date`:: The timestamp that will be used to calculate the index age for its phase transitions. This allows the users to create an index containing old data and -use the original creation date of the old data to calculate the index age. +use the original creation date of the old data to calculate the index age. Must be a long (Unix epoch) value. diff --git a/docs/reference/sql/endpoints/odbc/configuration.asciidoc b/docs/reference/sql/endpoints/odbc/configuration.asciidoc index 33c82d500c015..da8d46c03b24a 100644 --- a/docs/reference/sql/endpoints/odbc/configuration.asciidoc +++ b/docs/reference/sql/endpoints/odbc/configuration.asciidoc @@ -123,11 +123,11 @@ NOTE: If left empty, the default *9200* port number will be used. + * Username, Password + -If security is enabled, these fields will need to contain the credentials of the user configured to access the REST SQL endpoint. +If security is enabled, these fields will need to contain the credentials of the access user. At a minimum, the _Name_ and _Hostname_ fields must be provisioned, before the DSN can be saved. -WARNING: Connection encryption is enabled by default. This will need to be changed if connecting to a SQL API endpoint with no cryptography enabled. +WARNING: Connection encryption is enabled by default. This will need to be changed if connecting to an {es} node with no encryption. [float] ===== 2.3 Cryptography parameters diff --git a/docs/reference/sql/functions/grouping.asciidoc b/docs/reference/sql/functions/grouping.asciidoc index 0de9e396fddbc..f9b2163c3750e 100644 --- a/docs/reference/sql/functions/grouping.asciidoc +++ b/docs/reference/sql/functions/grouping.asciidoc @@ -87,8 +87,8 @@ actually used will be `INTERVAL '2' DAY`. If the interval specified is less than [IMPORTANT] All intervals specified for a date/time HISTOGRAM will use a <> -in their `date_histogram` aggregation definition, with the notable exceptions of `INTERVAL '1' YEAR` AND `INTERVAL '1' MONTH` where a calendar interval is used. -The choice for a calendar interval was made for having a more intuitive result for YEAR and MONTH groupings. In the case of YEAR, for example, the calendar intervals consider a one year +in their `date_histogram` aggregation definition, with the notable exceptions of `INTERVAL '1' YEAR`, `INTERVAL '1' MONTH` and `INTERVAL '1' DAY` where a calendar interval is used. +The choice for a calendar interval was made for having a more intuitive result for YEAR, MONTH and DAY groupings. In the case of YEAR, for example, the calendar intervals consider a one year bucket as the one starting on January 1st that specific year, whereas a fixed interval one-year-bucket considers one year as a number of milliseconds (for example, `31536000000ms` corresponding to 365 days, 24 hours per day, 60 minutes per hour etc.). With fixed intervals, the day of February 5th, 2019 for example, belongs to a bucket that starts on December 20th, 2018 and {es} (and implicitly {es-sql}) would diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 403e43b44f27a..64c962a68cedd 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.2-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.2.1-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=f016e66d88c2f9adb5b6e7dff43a363b8c2632f18b4ad6f365f49da34dd57db8 +distributionSha256Sum=49fad5c94e76bc587c1a1138f045daee81476a82b288c7ab8c7cd6b14bf2b1c7 diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index b89f6ddcc4279..55432b2e586d3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -520,7 +520,7 @@ public static Class promoteEquality(Class from0, Class from1) { return Object.class; } - public static Class promoteConditional(Class from0, Class from1, Object const0, Object const1) { + public static Class promoteConditional(Class from0, Class from1) { if (from0 == from1) { return from0; } @@ -529,123 +529,29 @@ public static Class promoteConditional(Class from0, Class from1, Object return def.class; } - if (from0.isPrimitive() && from1.isPrimitive()) { - if (from0 == boolean.class && from1 == boolean.class) { - return boolean.class; - } - + if (from0.isPrimitive() && from0 != boolean.class && from1.isPrimitive() && from1 != boolean.class) { if (from0 == double.class || from1 == double.class) { return double.class; } else if (from0 == float.class || from1 == float.class) { return float.class; } else if (from0 == long.class || from1 == long.class) { return long.class; + } else if (from0 == int.class || from1 == int.class) { + return int.class; + } else if (from0 == char.class) { + if (from1 == short.class || from1 == byte.class) { + return int.class; + } else { + return null; + } + } else if (from1 == char.class) { + if (from0 == short.class || from0 == byte.class) { + return int.class; } else { - if (from0 == byte.class) { - if (from1 == byte.class) { - return byte.class; - } else if (from1 == short.class) { - if (const1 != null) { - final short constant = (short)const1; - - if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return byte.class; - } - } - - return short.class; - } else if (from1 == char.class) { - return int.class; - } else if (from1 == int.class) { - if (const1 != null) { - final int constant = (int)const1; - - if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return byte.class; - } - } - - return int.class; - } - } else if (from0 == short.class) { - if (from1 == byte.class) { - if (const0 != null) { - final short constant = (short)const0; - - if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return byte.class; - } - } - - return short.class; - } else if (from1 == short.class) { - return short.class; - } else if (from1 == char.class) { - return int.class; - } else if (from1 == int.class) { - if (const1 != null) { - final int constant = (int)const1; - - if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { - return short.class; - } - } - - return int.class; - } - } else if (from0 == char.class) { - if (from1 == byte.class) { - return int.class; - } else if (from1 == short.class) { - return int.class; - } else if (from1 == char.class) { - return char.class; - } else if (from1 == int.class) { - if (const1 != null) { - final int constant = (int)const1; - - if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { - return byte.class; - } - } - - return int.class; - } - } else if (from0 == int.class) { - if (from1 == byte.class) { - if (const0 != null) { - final int constant = (int)const0; - - if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return byte.class; - } - } - - return int.class; - } else if (from1 == short.class) { - if (const0 != null) { - final int constant = (int)const0; - - if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { - return byte.class; - } - } - - return int.class; - } else if (from1 == char.class) { - if (const0 != null) { - final int constant = (int)const0; - - if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { - return byte.class; - } - } - - return int.class; - } else if (from1 == int.class) { - return int.class; - } + return null; } + } else { + return null; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ClassWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ClassWriter.java index 873af9f73bf69..320eee3b928de 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ClassWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ClassWriter.java @@ -65,7 +65,6 @@ public static int buildAccess(int modifiers, boolean synthetic) { protected final org.objectweb.asm.ClassWriter classWriter; protected final ClassVisitor classVisitor; - protected MethodWriter clinitWriter = null; public ClassWriter(CompilerSettings compilerSettings, BitSet statements, Printer debugStream, Class baseClass, int classFrames, int classAccess, String className, String[] classInterfaces) { @@ -93,30 +92,12 @@ public ClassVisitor getClassVisitor() { return classVisitor; } - /** - * Lazy loads the {@link MethodWriter} for clinit, so that if it's not - * necessary the method is never created for the class. - */ - public MethodWriter getClinitWriter() { - if (clinitWriter == null) { - clinitWriter = new MethodWriter(Opcodes.ACC_STATIC, WriterConstants.CLINIT, classVisitor, statements, compilerSettings); - clinitWriter.visitCode(); - } - - return clinitWriter; - } - public MethodWriter newMethodWriter(int access, Method method) { return new MethodWriter(access, method, classVisitor, statements, compilerSettings); } @Override public void close() { - if (clinitWriter != null) { - clinitWriter.returnValue(); - clinitWriter.endMethod(); - } - classVisitor.visitEnd(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrapInjectionPhase.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrapInjectionPhase.java index 603defb7c82d1..eb7774c7d92ce 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrapInjectionPhase.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrapInjectionPhase.java @@ -23,7 +23,7 @@ import org.elasticsearch.painless.ir.ClassNode; import org.elasticsearch.painless.ir.FieldNode; import org.elasticsearch.painless.ir.FunctionNode; -import org.elasticsearch.painless.ir.MemberFieldNode; +import org.elasticsearch.painless.ir.MemberFieldLoadNode; import org.elasticsearch.painless.ir.ReturnNode; import org.elasticsearch.painless.ir.StaticNode; import org.elasticsearch.painless.ir.VariableNode; @@ -121,50 +121,50 @@ protected static void injectDefBootstrapMethod(ClassNode classNode) { callSubNode.setLocation(internalLocation); callSubNode.setExpressionType(CallSite.class); callSubNode.setMethod(new PainlessMethod( - DefBootstrap.class.getMethod("bootstrap", - PainlessLookup.class, - FunctionTable.class, - Lookup.class, - String.class, - MethodType.class, - int.class, - int.class, - Object[].class), - DefBootstrap.class, - CallSite.class, - Arrays.asList( - PainlessLookup.class, - FunctionTable.class, - Lookup.class, - String.class, - MethodType.class, - int.class, - int.class, - Object[].class), - null, - null, - null + DefBootstrap.class.getMethod("bootstrap", + PainlessLookup.class, + FunctionTable.class, + Lookup.class, + String.class, + MethodType.class, + int.class, + int.class, + Object[].class), + DefBootstrap.class, + CallSite.class, + Arrays.asList( + PainlessLookup.class, + FunctionTable.class, + Lookup.class, + String.class, + MethodType.class, + int.class, + int.class, + Object[].class), + null, + null, + null ) ); callSubNode.setBox(DefBootstrap.class); callNode.setRightNode(callSubNode); - MemberFieldNode memberFieldNode = new MemberFieldNode(); - memberFieldNode.setLocation(internalLocation); - memberFieldNode.setExpressionType(PainlessLookup.class); - memberFieldNode.setName("$DEFINITION"); - memberFieldNode.setStatic(true); + MemberFieldLoadNode memberFieldLoadNode = new MemberFieldLoadNode(); + memberFieldLoadNode.setLocation(internalLocation); + memberFieldLoadNode.setExpressionType(PainlessLookup.class); + memberFieldLoadNode.setName("$DEFINITION"); + memberFieldLoadNode.setStatic(true); - callSubNode.addArgumentNode(memberFieldNode); + callSubNode.addArgumentNode(memberFieldLoadNode); - memberFieldNode = new MemberFieldNode(); - memberFieldNode.setLocation(internalLocation); - memberFieldNode.setExpressionType(FunctionTable.class); - memberFieldNode.setName("$FUNCTIONS"); - memberFieldNode.setStatic(true); + memberFieldLoadNode = new MemberFieldLoadNode(); + memberFieldLoadNode.setLocation(internalLocation); + memberFieldLoadNode.setExpressionType(FunctionTable.class); + memberFieldLoadNode.setName("$FUNCTIONS"); + memberFieldLoadNode.setStatic(true); - callSubNode.addArgumentNode(memberFieldNode); + callSubNode.addArgumentNode(memberFieldLoadNode); VariableNode variableNode = new VariableNode(); variableNode.setLocation(internalLocation); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptInjectionPhase.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptInjectionPhase.java index 6fe930bb33fea..92c2a2b83cf5a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptInjectionPhase.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptInjectionPhase.java @@ -29,7 +29,7 @@ import org.elasticsearch.painless.ir.FieldNode; import org.elasticsearch.painless.ir.FunctionNode; import org.elasticsearch.painless.ir.MemberCallNode; -import org.elasticsearch.painless.ir.MemberFieldNode; +import org.elasticsearch.painless.ir.MemberFieldLoadNode; import org.elasticsearch.painless.ir.ReturnNode; import org.elasticsearch.painless.ir.StatementNode; import org.elasticsearch.painless.ir.StaticNode; @@ -131,13 +131,13 @@ protected static void injectStaticFieldsAndGetters(ClassNode classNode) { blockNode.addStatementNode(returnNode); - MemberFieldNode memberFieldNode = new MemberFieldNode(); - memberFieldNode.setLocation(internalLocation); - memberFieldNode.setExpressionType(String.class); - memberFieldNode.setName("$NAME"); - memberFieldNode.setStatic(true); + MemberFieldLoadNode memberFieldLoadNode = new MemberFieldLoadNode(); + memberFieldLoadNode.setLocation(internalLocation); + memberFieldLoadNode.setExpressionType(String.class); + memberFieldLoadNode.setName("$NAME"); + memberFieldLoadNode.setStatic(true); - returnNode.setExpressionNode(memberFieldNode); + returnNode.setExpressionNode(memberFieldLoadNode); functionNode = new FunctionNode(); functionNode.setLocation(internalLocation); @@ -162,13 +162,13 @@ protected static void injectStaticFieldsAndGetters(ClassNode classNode) { blockNode.addStatementNode(returnNode); - memberFieldNode = new MemberFieldNode(); - memberFieldNode.setLocation(internalLocation); - memberFieldNode.setExpressionType(String.class); - memberFieldNode.setName("$SOURCE"); - memberFieldNode.setStatic(true); + memberFieldLoadNode = new MemberFieldLoadNode(); + memberFieldLoadNode.setLocation(internalLocation); + memberFieldLoadNode.setExpressionType(String.class); + memberFieldLoadNode.setName("$SOURCE"); + memberFieldLoadNode.setStatic(true); - returnNode.setExpressionNode(memberFieldNode); + returnNode.setExpressionNode(memberFieldLoadNode); functionNode = new FunctionNode(); functionNode.setLocation(internalLocation); @@ -193,13 +193,13 @@ protected static void injectStaticFieldsAndGetters(ClassNode classNode) { blockNode.addStatementNode(returnNode); - memberFieldNode = new MemberFieldNode(); - memberFieldNode.setLocation(internalLocation); - memberFieldNode.setExpressionType(BitSet.class); - memberFieldNode.setName("$STATEMENTS"); - memberFieldNode.setStatic(true); + memberFieldLoadNode = new MemberFieldLoadNode(); + memberFieldLoadNode.setLocation(internalLocation); + memberFieldLoadNode.setExpressionType(BitSet.class); + memberFieldLoadNode.setName("$STATEMENTS"); + memberFieldLoadNode.setStatic(true); - returnNode.setExpressionNode(memberFieldNode); + returnNode.setExpressionNode(memberFieldLoadNode); } // convert gets methods to a new set of inserted ir nodes as necessary - @@ -338,11 +338,11 @@ protected static void injectSandboxExceptions(FunctionNode functionNode) { memberCallNode.setLocation(internalLocation); memberCallNode.setExpressionType(ScriptException.class); memberCallNode.setLocalFunction(new LocalFunction( - "convertToScriptException", - ScriptException.class, - Arrays.asList(Throwable.class, Map.class), - true, - false + "convertToScriptException", + ScriptException.class, + Arrays.asList(Throwable.class, Map.class), + true, + false ) ); @@ -382,17 +382,18 @@ protected static void injectSandboxExceptions(FunctionNode functionNode) { null, null, null - )); + ) + ); callNode.setRightNode(callSubNode); - MemberFieldNode memberFieldNode = new MemberFieldNode(); - memberFieldNode.setLocation(internalLocation); - memberFieldNode.setExpressionType(PainlessLookup.class); - memberFieldNode.setName("$DEFINITION"); - memberFieldNode.setStatic(true); + MemberFieldLoadNode memberFieldLoadNode = new MemberFieldLoadNode(); + memberFieldLoadNode.setLocation(internalLocation); + memberFieldLoadNode.setExpressionType(PainlessLookup.class); + memberFieldLoadNode.setName("$DEFINITION"); + memberFieldLoadNode.setStatic(true); - callSubNode.addArgumentNode(memberFieldNode); + callSubNode.addArgumentNode(memberFieldLoadNode); for (Class throwable : new Class[] { PainlessError.class, BootstrapMethodError.class, OutOfMemoryError.class, StackOverflowError.class, Exception.class}) { @@ -429,11 +430,11 @@ protected static void injectSandboxExceptions(FunctionNode functionNode) { memberCallNode.setLocation(internalLocation); memberCallNode.setExpressionType(ScriptException.class); memberCallNode.setLocalFunction(new LocalFunction( - "convertToScriptException", - ScriptException.class, - Arrays.asList(Throwable.class, Map.class), - true, - false + "convertToScriptException", + ScriptException.class, + Arrays.asList(Throwable.class, Map.class), + true, + false ) ); @@ -470,7 +471,8 @@ protected static void injectSandboxExceptions(FunctionNode functionNode) { null, null, null - )); + ) + ); callNode.setRightNode(callSubNode); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 1e2e562787c82..21059f2112457 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -29,7 +29,8 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; -import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.store.ByteBuffersDirectory; +import org.apache.lucene.store.Directory; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -541,8 +542,8 @@ private static Response prepareRamIndex(Request request, Analyzer defaultAnalyzer = indexService.getIndexAnalyzers().getDefaultIndexAnalyzer(); - try (RAMDirectory ramDirectory = new RAMDirectory()) { - try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(defaultAnalyzer))) { + try (Directory directory = new ByteBuffersDirectory()) { + try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(defaultAnalyzer))) { String index = indexService.index().getName(); BytesReference document = request.contextSetup.document; XContentType xContentType = request.contextSetup.xContentType; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/AssignmentNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/AssignmentNode.java index 4499a9f1f490e..e5492061aacfc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/AssignmentNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/AssignmentNode.java @@ -22,7 +22,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.lookup.PainlessCast; @@ -114,7 +113,7 @@ public PainlessCast getBack() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); // For the case where the assignment represents a String concatenation @@ -129,7 +128,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals } // call the setup method on the lhs to prepare for a load/store operation - getLeftNode().setup(classWriter, methodWriter, globals, scopeTable); + getLeftNode().setup(classWriter, methodWriter, scopeTable); if (cat) { // Handle the case where we are doing a compound assignment @@ -137,10 +136,10 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals methodWriter.writeDup(getLeftNode().accessElementCount(), catElementStackSize); // dup the top element and insert it // before concat helper on stack - getLeftNode().load(classWriter, methodWriter, globals, scopeTable); // read the current lhs's value + getLeftNode().load(classWriter, methodWriter, scopeTable); // read the current lhs's value methodWriter.writeAppendStrings(getLeftNode().getExpressionType()); // append the lhs's value using the StringBuilder - getRightNode().write(classWriter, methodWriter, globals, scopeTable); // write the bytecode for the rhs + getRightNode().write(classWriter, methodWriter, scopeTable); // write the bytecode for the rhs // check to see if the rhs has already done a concatenation if (getRightNode() instanceof BinaryMathNode == false || ((BinaryMathNode)getRightNode()).getCat() == false) { @@ -158,14 +157,15 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals } // store the lhs's value from the stack in its respective variable/field/array - getLeftNode().store(classWriter, methodWriter, globals, scopeTable); + getLeftNode().store(classWriter, methodWriter, scopeTable); } else if (operation != null) { // Handle the case where we are doing a compound assignment that // does not represent a String concatenation. methodWriter.writeDup(getLeftNode().accessElementCount(), 0); // if necessary, dup the previous lhs's value // to be both loaded from and stored to - getLeftNode().load(classWriter, methodWriter, globals, scopeTable); // load the current lhs's value + + getLeftNode().load(classWriter, methodWriter, scopeTable); // load the current lhs's value if (read && post) { // dup the value if the lhs is also read from and is a post increment @@ -175,7 +175,8 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals methodWriter.writeCast(there); // if necessary cast the current lhs's value // to the promotion type between the lhs and rhs types - getRightNode().write(classWriter, methodWriter, globals, scopeTable); // write the bytecode for the rhs + + getRightNode().write(classWriter, methodWriter, scopeTable); // write the bytecode for the rhs // XXX: fix these types, but first we need def compound assignment tests. // its tricky here as there are possibly explicit casts, too. @@ -196,11 +197,11 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals } // store the lhs's value from the stack in its respective variable/field/array - getLeftNode().store(classWriter, methodWriter, globals, scopeTable); + getLeftNode().store(classWriter, methodWriter, scopeTable); } else { // Handle the case for a simple write. - getRightNode().write(classWriter, methodWriter, globals, scopeTable); // write the bytecode for the rhs rhs + getRightNode().write(classWriter, methodWriter, scopeTable); // write the bytecode for the rhs rhs if (read) { // dup the value if the lhs is also read from @@ -209,7 +210,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals } // store the lhs's value from the stack in its respective variable/field/array - getLeftNode().store(classWriter, methodWriter, globals, scopeTable); + getLeftNode().store(classWriter, methodWriter, scopeTable); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BinaryMathNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BinaryMathNode.java index cddb42dcee6e0..b24a031753f51 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BinaryMathNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BinaryMathNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; @@ -99,7 +98,7 @@ public void setLocation(Location location) { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (getBinaryType() == String.class && operation == Operation.ADD) { @@ -107,13 +106,13 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals methodWriter.writeNewStrings(); } - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); + getLeftNode().write(classWriter, methodWriter, scopeTable); if (getLeftNode() instanceof BinaryMathNode == false || ((BinaryMathNode)getLeftNode()).getCat() == false) { methodWriter.writeAppendStrings(getLeftNode().getExpressionType()); } - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); if (getRightNode() instanceof BinaryMathNode == false || ((BinaryMathNode)getRightNode()).getCat() == false) { methodWriter.writeAppendStrings(getRightNode().getExpressionType()); @@ -123,8 +122,8 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals methodWriter.writeToStrings(); } } else if (operation == Operation.FIND || operation == Operation.MATCH) { - getRightNode().write(classWriter, methodWriter, globals, scopeTable); - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); + getLeftNode().write(classWriter, methodWriter, scopeTable); methodWriter.invokeVirtual(org.objectweb.asm.Type.getType(Pattern.class), WriterConstants.PATTERN_MATCHER); if (operation == Operation.FIND) { @@ -136,8 +135,8 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals "for type [" + getExpressionCanonicalTypeName() + "]"); } } else { - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + getLeftNode().write(classWriter, methodWriter, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); if (binaryType == def.class || (shiftType != null && shiftType == def.class)) { // def calls adopt the wanted return value. if there was a narrowing cast, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BlockNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BlockNode.java index 65bc2adc118ab..11fb02f1f3f26 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BlockNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BlockNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -65,11 +64,11 @@ public int getStatementCount() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { for (StatementNode statementNode : statementNodes) { statementNode.continueLabel = continueLabel; statementNode.breakLabel = breakLabel; - statementNode.write(classWriter, methodWriter, globals, scopeTable); + statementNode.write(classWriter, methodWriter, scopeTable); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BooleanNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BooleanNode.java index 2d2ab280e38c8..7ac8a431a5672 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BooleanNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BooleanNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.symbol.ScopeTable; @@ -44,16 +43,16 @@ public Operation getOperation() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (operation == Operation.AND) { Label fals = new Label(); Label end = new Label(); - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); + getLeftNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, fals); - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, fals); methodWriter.push(true); @@ -66,9 +65,9 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals Label fals = new Label(); Label end = new Label(); - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); + getLeftNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFNE, tru); - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, fals); methodWriter.mark(tru); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceNode.java index dbc6f7cd95779..74982d4455dc8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceNode.java @@ -20,16 +20,15 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; public class BraceNode extends BinaryNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getLeftNode().write(classWriter, methodWriter, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); } @Override @@ -38,18 +37,18 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); - getRightNode().setup(classWriter, methodWriter, globals, scopeTable); + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getLeftNode().write(classWriter, methodWriter, scopeTable); + getRightNode().setup(classWriter, methodWriter, scopeTable); } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getRightNode().load(classWriter, methodWriter, globals, scopeTable); + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getRightNode().load(classWriter, methodWriter, scopeTable); } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getRightNode().store(classWriter, methodWriter, globals, scopeTable); + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getRightNode().store(classWriter, methodWriter, scopeTable); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceSubDefNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceSubDefNode.java index ebb99785106cb..26ab7533a7ac9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceSubDefNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceSubDefNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Type; @@ -29,9 +28,9 @@ public class BraceSubDefNode extends UnaryNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - setup(classWriter, methodWriter, globals, scopeTable); - load(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + setup(classWriter, methodWriter, scopeTable); + load(classWriter, methodWriter, scopeTable); } @Override @@ -40,16 +39,16 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.dup(); - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + getChildNode().write(classWriter, methodWriter, scopeTable); Type methodType = Type.getMethodType(MethodWriter.getType( getChildNode().getExpressionType()), Type.getType(Object.class), MethodWriter.getType(getChildNode().getExpressionType())); methodWriter.invokeDefCall("normalizeIndex", methodType, DefBootstrap.INDEX_NORMALIZE); } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Type methodType = Type.getMethodType(MethodWriter.getType( @@ -58,7 +57,7 @@ protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Type methodType = Type.getMethodType(Type.getType(void.class), Type.getType(Object.class), diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceSubNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceSubNode.java index 181b35c0598a0..dce0fe34722ff 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceSubNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceSubNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Label; @@ -29,9 +28,9 @@ public class BraceSubNode extends UnaryNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - setup(classWriter, methodWriter, globals, scopeTable); - load(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + setup(classWriter, methodWriter, scopeTable); + load(classWriter, methodWriter, scopeTable); } @Override @@ -40,8 +39,8 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getChildNode().write(classWriter, methodWriter, scopeTable); Label noFlip = new Label(); methodWriter.dup(); @@ -54,13 +53,13 @@ protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.arrayLoad(MethodWriter.getType(getExpressionType())); } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.arrayStore(MethodWriter.getType(getExpressionType())); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BreakNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BreakNode.java index 61f5d2e507cf0..310ad68929b86 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BreakNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BreakNode.java @@ -20,14 +20,13 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; public class BreakNode extends StatementNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.goTo(breakLabel); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallNode.java index 91138a7c8506b..eef4b94082df4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallNode.java @@ -20,15 +20,14 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; public class CallNode extends BinaryNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getLeftNode().write(classWriter, methodWriter, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallSubDefNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallSubDefNode.java index 5e9124d1eb3e3..a12988c13a79b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallSubDefNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallSubDefNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Type; @@ -73,11 +72,11 @@ public List> getTypeParameters() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); for (ExpressionNode argumentNode : getArgumentNodes()) { - argumentNode.write(classWriter, methodWriter, globals, scopeTable); + argumentNode.write(classWriter, methodWriter, scopeTable); } // create method type from return value and arguments diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallSubNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallSubNode.java index 961973c2ade12..e8f4a7e640cb3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallSubNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallSubNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.symbol.ScopeTable; @@ -51,7 +50,7 @@ public Class getBox() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (box.isPrimitive()) { @@ -59,7 +58,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals } for (ExpressionNode argumentNode : getArgumentNodes()) { - argumentNode.write(classWriter, methodWriter, globals, scopeTable); + argumentNode.write(classWriter, methodWriter, scopeTable); } methodWriter.invokeMethodCall(method); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CapturingFuncRefNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CapturingFuncRefNode.java index 549fafa665943..1c4701fea59f9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CapturingFuncRefNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CapturingFuncRefNode.java @@ -22,7 +22,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.FunctionRef; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScopeTable.Variable; @@ -73,7 +72,7 @@ public String getPointer() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Variable captured = scopeTable.getVariable(capturedName); if (pointer != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CastNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CastNode.java index b6475c5049dde..33e9f43ed5624 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CastNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CastNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.symbol.ScopeTable; @@ -42,8 +41,8 @@ public PainlessCast getCast() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getChildNode().write(classWriter, methodWriter, scopeTable); methodWriter.writeDebugInfo(location); methodWriter.writeCast(cast); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CatchNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CatchNode.java index 224a285f4d328..31d8bc568e063 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CatchNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CatchNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScopeTable.Variable; @@ -57,10 +56,10 @@ public BlockNode getBlockNode() { Label exception = null; @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); - declarationNode.write(classWriter, methodWriter, globals, scopeTable); + declarationNode.write(classWriter, methodWriter, scopeTable); Variable variable = scopeTable.getVariable(declarationNode.getName()); Label jump = new Label(); @@ -71,7 +70,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals if (blockNode != null) { blockNode.continueLabel = continueLabel; blockNode.breakLabel = breakLabel; - blockNode.write(classWriter, methodWriter, globals, scopeTable); + blockNode.write(classWriter, methodWriter, scopeTable); } methodWriter.visitTryCatchBlock(begin, end, jump, variable.getAsmType().getInternalName()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ClassNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ClassNode.java index fe2fb0145d888..39530e87471c8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ClassNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ClassNode.java @@ -20,23 +20,20 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Constant; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.ScriptClassInfo; -import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScriptRoot; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; +import org.objectweb.asm.commons.Method; import org.objectweb.asm.util.Printer; import java.lang.invoke.MethodType; import java.util.ArrayList; import java.util.BitSet; -import java.util.Collection; import java.util.List; import static org.elasticsearch.painless.WriterConstants.BASE_INTERFACE_TYPE; @@ -48,6 +45,7 @@ public class ClassNode extends IRNode { private final List fieldNodes = new ArrayList<>(); private final List functionNodes = new ArrayList<>(); + private final BlockNode clinitBlockNode; public void addFieldNode(FieldNode fieldNode) { fieldNodes.add(fieldNode); @@ -64,7 +62,11 @@ public void addFunctionNode(FunctionNode functionNode) { public List getFunctionsNodes() { return functionNodes; } - + + public BlockNode getClinitBlockNode() { + return clinitBlockNode; + } + /* ---- end tree structure, begin node data ---- */ private ScriptClassInfo scriptClassInfo; @@ -115,11 +117,16 @@ public ScriptRoot getScriptRoot() { /* ---- end node data ---- */ - protected Globals globals; + public ClassNode() { + clinitBlockNode = new BlockNode(); + clinitBlockNode.setLocation(new Location("internal$clinit$blocknode", 0)); + clinitBlockNode.setAllEscape(true); + clinitBlockNode.setStatementCount(1); + } public byte[] write() { - globals = new Globals(new BitSet(sourceText.length())); - scriptRoot.addStaticConstant("$STATEMENTS", globals.getStatements()); + BitSet statements = new BitSet(sourceText.length()); + scriptRoot.addStaticConstant("$STATEMENTS", statements); // Create the ClassWriter. @@ -129,7 +136,7 @@ public byte[] write() { String className = CLASS_TYPE.getInternalName(); String[] classInterfaces = new String[] { interfaceBase }; - ClassWriter classWriter = new ClassWriter(scriptRoot.getCompilerSettings(), globals.getStatements(), debugStream, + ClassWriter classWriter = new ClassWriter(scriptRoot.getCompilerSettings(), statements, debugStream, scriptClassInfo.getBaseClass(), classFrames, classAccess, className, classInterfaces); ClassVisitor classVisitor = classWriter.getClassVisitor(); classVisitor.visitSource(Location.computeSourceName(name), null); @@ -152,34 +159,26 @@ public byte[] write() { constructor.returnValue(); constructor.endMethod(); + if (clinitBlockNode.getStatementsNodes().isEmpty() == false) { + MethodWriter methodWriter = classWriter.newMethodWriter( + Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, + new Method("", Type.getType(void.class), new Type[0])); + clinitBlockNode.write(classWriter, methodWriter, new ScopeTable()); + methodWriter.returnValue(); + methodWriter.endMethod(); + } + // Write all fields: for (FieldNode fieldNode : fieldNodes) { - fieldNode.write(classWriter, null, null, null); + fieldNode.write(classWriter, null, null); } // Write all functions: for (FunctionNode functionNode : functionNodes) { - functionNode.write(classWriter, null, globals, new ScopeTable()); - } - - // Write the constants - if (false == globals.getConstantInitializers().isEmpty()) { - Collection inits = globals.getConstantInitializers().values(); - - // Initialize the constants in a static initializer - final MethodWriter clinit = new MethodWriter(Opcodes.ACC_STATIC, - WriterConstants.CLINIT, classVisitor, globals.getStatements(), scriptRoot.getCompilerSettings()); - clinit.visitCode(); - for (Constant constant : inits) { - constant.initializer.accept(clinit); - clinit.putStatic(CLASS_TYPE, constant.name, constant.type); - } - clinit.returnValue(); - clinit.endMethod(); + functionNode.write(classWriter, null, new ScopeTable()); } // End writing the class and store the generated bytes. - classVisitor.visitEnd(); return classWriter.getClassBytes(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ComparisonNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ComparisonNode.java index f21a210953f7b..7d410258b3dfe 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ComparisonNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ComparisonNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.lookup.PainlessLookupUtility; @@ -63,13 +62,13 @@ public String getComparisonCanonicalTypeName() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); + getLeftNode().write(classWriter, methodWriter, scopeTable); if (getRightNode() instanceof NullNode == false) { - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); } Label jump = new Label(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConditionalNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConditionalNode.java index f62ceb6848960..7a2be602f9a2f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConditionalNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConditionalNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Label; @@ -43,19 +42,19 @@ public ExpressionNode getConditionNode() { /* ---- end tree structure ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Label fals = new Label(); Label end = new Label(); - conditionNode.write(classWriter, methodWriter, globals, scopeTable); + conditionNode.write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, fals); - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); + getLeftNode().write(classWriter, methodWriter, scopeTable); methodWriter.goTo(end); methodWriter.mark(fals); - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); methodWriter.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConstantNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConstantNode.java index 46052bdf1eee3..4540480cc6e7b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConstantNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConstantNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -41,7 +40,7 @@ public Object getConstant() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { if (constant instanceof String) methodWriter.push((String)constant); else if (constant instanceof Double) methodWriter.push((double)constant); else if (constant instanceof Float) methodWriter.push((float)constant); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ContinueNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ContinueNode.java index f62a4df7261d5..7691b9858439a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ContinueNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ContinueNode.java @@ -20,14 +20,13 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; public class ContinueNode extends StatementNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.goTo(continueLabel); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DeclarationBlockNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DeclarationBlockNode.java index c490364448fbc..42f0bb3e38572 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DeclarationBlockNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DeclarationBlockNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -44,9 +43,9 @@ public List getDeclarationsNodes() { /* ---- end tree structure ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { for (DeclarationNode declarationNode : declarationNodes) { - declarationNode.write(classWriter, methodWriter, globals, scopeTable); + declarationNode.write(classWriter, methodWriter, scopeTable); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DeclarationNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DeclarationNode.java index 7c62847697d95..d653101df1b07 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DeclarationNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DeclarationNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.symbol.ScopeTable; @@ -78,7 +77,7 @@ public boolean requiresDefault() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); Variable variable = scopeTable.defineVariable(declarationType, name); @@ -101,7 +100,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals } } } else { - expressionNode.write(classWriter, methodWriter, globals, scopeTable); + expressionNode.write(classWriter, methodWriter, scopeTable); } methodWriter.visitVarInsn(variable.getAsmType().getOpcode(Opcodes.ISTORE), variable.getSlot()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DoWhileLoopNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DoWhileLoopNode.java index 89964578e9285..59a31e6c1db0f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DoWhileLoopNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DoWhileLoopNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScopeTable.Variable; @@ -30,7 +29,7 @@ public class DoWhileLoopNode extends LoopNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); scopeTable = scopeTable.newScope(); @@ -43,12 +42,12 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals getBlockNode().continueLabel = begin; getBlockNode().breakLabel = end; - getBlockNode().write(classWriter, methodWriter, globals, scopeTable); + getBlockNode().write(classWriter, methodWriter, scopeTable); methodWriter.mark(begin); if (isContinuous() == false) { - getConditionNode().write(classWriter, methodWriter, globals, scopeTable); + getConditionNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, end); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotNode.java index c50fa3680d5a6..73e2b4e8c01ad 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotNode.java @@ -20,16 +20,15 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; public class DotNode extends BinaryNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getLeftNode().write(classWriter, methodWriter, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); } @Override @@ -37,18 +36,18 @@ protected int accessElementCount() { return getRightNode().accessElementCount(); } - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); - getRightNode().setup(classWriter, methodWriter, globals, scopeTable); + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getLeftNode().write(classWriter, methodWriter, scopeTable); + getRightNode().setup(classWriter, methodWriter, scopeTable); } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getRightNode().load(classWriter, methodWriter, globals, scopeTable); + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getRightNode().load(classWriter, methodWriter, scopeTable); } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getRightNode().store(classWriter, methodWriter, globals, scopeTable); + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getRightNode().store(classWriter, methodWriter, scopeTable); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubArrayLengthNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubArrayLengthNode.java index 4687ee14c57af..f8e717f54576e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubArrayLengthNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubArrayLengthNode.java @@ -20,14 +20,13 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; public class DotSubArrayLengthNode extends ExpressionNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.arrayLength(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubDefNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubDefNode.java index 1e4e7820bfd2f..28b7a78f74f69 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubDefNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubDefNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Type; @@ -43,7 +42,7 @@ public String getValue() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Type methodType = Type.getMethodType(MethodWriter.getType(getExpressionType()), Type.getType(Object.class)); @@ -56,12 +55,12 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { // do nothing } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Type methodType = Type.getMethodType(MethodWriter.getType(getExpressionType()), Type.getType(Object.class)); @@ -69,7 +68,7 @@ protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Type methodType = Type.getMethodType( diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubNode.java index faab60a07ee1c..7a01526a12e03 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.symbol.ScopeTable; @@ -43,7 +42,7 @@ public PainlessField getField() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { @@ -61,12 +60,12 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { // Do nothing. } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { @@ -79,7 +78,7 @@ protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.javaField.getModifiers())) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubShortcutNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubShortcutNode.java index 8552667e720e4..45c2448dda5b0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubShortcutNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotSubShortcutNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.symbol.ScopeTable; @@ -51,7 +50,7 @@ public PainlessMethod getGetter() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.invokeMethodCall(getter); @@ -67,12 +66,12 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { // do nothing } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.invokeMethodCall(getter); @@ -83,7 +82,7 @@ protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.invokeMethodCall(setter); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ElvisNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ElvisNode.java index c5ec6959410fa..46341711cae6f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ElvisNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ElvisNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Label; @@ -28,16 +27,16 @@ public class ElvisNode extends BinaryNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Label end = new Label(); - getLeftNode().write(classWriter, methodWriter, globals, scopeTable); + getLeftNode().write(classWriter, methodWriter, scopeTable); methodWriter.dup(); methodWriter.ifNonNull(end); methodWriter.pop(); - getRightNode().write(classWriter, methodWriter, globals, scopeTable); + getRightNode().write(classWriter, methodWriter, scopeTable); methodWriter.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FieldNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FieldNode.java index 95e113cbf57ba..2347cabbc9b6d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FieldNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FieldNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.symbol.ScopeTable; @@ -65,7 +64,7 @@ public String getName() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { classWriter.getClassVisitor().visitField( ClassWriter.buildAccess(modifiers, true), name, Type.getType(fieldType).getDescriptor(), null, null).visitEnd(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachLoopNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachLoopNode.java index b1ff46d14c358..75609f8eafbc4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachLoopNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachLoopNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -41,8 +40,8 @@ public ConditionNode getConditionNode() { /* ---- end tree structure ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { scopeTable = scopeTable.newScope(); - conditionNode.write(classWriter, methodWriter, globals, scopeTable); + conditionNode.write(classWriter, methodWriter, scopeTable); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachSubArrayNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachSubArrayNode.java index 3197aae1a9ac8..356f4056039a6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachSubArrayNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachSubArrayNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.lookup.PainlessLookupUtility; @@ -125,14 +124,14 @@ public String getIndexedCanonicalTypeName() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); Variable variable = scopeTable.defineVariable(variableType, variableName); Variable array = scopeTable.defineInternalVariable(arrayType, arrayName); Variable index = scopeTable.defineInternalVariable(indexType, indexName); - getConditionNode().write(classWriter, methodWriter, globals, scopeTable); + getConditionNode().write(classWriter, methodWriter, scopeTable); methodWriter.visitVarInsn(array.getAsmType().getOpcode(Opcodes.ISTORE), array.getSlot()); methodWriter.push(-1); methodWriter.visitVarInsn(index.getAsmType().getOpcode(Opcodes.ISTORE), index.getSlot()); @@ -162,7 +161,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals getBlockNode().continueLabel = begin; getBlockNode().breakLabel = end; - getBlockNode().write(classWriter, methodWriter, globals, scopeTable); + getBlockNode().write(classWriter, methodWriter, scopeTable); methodWriter.goTo(begin); methodWriter.mark(end); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachSubIterableNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachSubIterableNode.java index bdb1af41c3a41..68bcf63e1c09e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachSubIterableNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForEachSubIterableNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -101,13 +100,13 @@ public PainlessMethod getMethod() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); Variable variable = scopeTable.defineVariable(variableType, variableName); Variable iterator = scopeTable.defineInternalVariable(iteratorType, iteratorName); - getConditionNode().write(classWriter, methodWriter, globals, scopeTable); + getConditionNode().write(classWriter, methodWriter, scopeTable); if (method == null) { org.objectweb.asm.Type methodType = org.objectweb.asm.Type @@ -141,7 +140,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals getBlockNode().continueLabel = begin; getBlockNode().breakLabel = end; - getBlockNode().write(classWriter, methodWriter, globals, scopeTable); + getBlockNode().write(classWriter, methodWriter, scopeTable); methodWriter.goTo(begin); methodWriter.mark(end); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForLoopNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForLoopNode.java index a1cdabf5b6cbe..edd450207bea9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForLoopNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ForLoopNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScopeTable.Variable; @@ -51,7 +50,7 @@ public ExpressionNode getAfterthoughtNode() { } @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); scopeTable = scopeTable.newScope(); @@ -61,18 +60,18 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals Label end = new Label(); if (initializerNode instanceof DeclarationBlockNode) { - initializerNode.write(classWriter, methodWriter, globals, scopeTable); + initializerNode.write(classWriter, methodWriter, scopeTable); } else if (initializerNode instanceof ExpressionNode) { ExpressionNode initializer = (ExpressionNode)this.initializerNode; - initializer.write(classWriter, methodWriter, globals, scopeTable); + initializer.write(classWriter, methodWriter, scopeTable); methodWriter.writePop(MethodWriter.getType(initializer.getExpressionType()).getSize()); } methodWriter.mark(start); if (getConditionNode() != null && isContinuous() == false) { - getConditionNode().write(classWriter, methodWriter, globals, scopeTable); + getConditionNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, end); } @@ -95,7 +94,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals getBlockNode().continueLabel = begin; getBlockNode().breakLabel = end; - getBlockNode().write(classWriter, methodWriter, globals, scopeTable); + getBlockNode().write(classWriter, methodWriter, scopeTable); } else { Variable loop = scopeTable.getInternalVariable("loop"); @@ -106,7 +105,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals if (afterthoughtNode != null) { methodWriter.mark(begin); - afterthoughtNode.write(classWriter, methodWriter, globals, scopeTable); + afterthoughtNode.write(classWriter, methodWriter, scopeTable); methodWriter.writePop(MethodWriter.getType(afterthoughtNode.getExpressionType()).getSize()); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FuncRefNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FuncRefNode.java index 45e56c3921b67..32161967b765c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FuncRefNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FuncRefNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.FunctionRef; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -42,7 +41,7 @@ public FunctionRef getFuncRef() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { if (funcRef != null) { methodWriter.writeDebugInfo(location); methodWriter.invokeLambdaCall(funcRef); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FunctionNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FunctionNode.java index 47c944f3b8e27..ba2ec55f720bf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FunctionNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/FunctionNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScopeTable.Variable; @@ -123,7 +122,7 @@ public int getMaxLoopCounter() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { int access = Opcodes.ACC_PUBLIC; if (isStatic) { @@ -165,7 +164,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals methodWriter.visitVarInsn(Opcodes.ISTORE, loop.getSlot()); } - blockNode.write(classWriter, methodWriter, globals, scopeTable.newScope()); + blockNode.write(classWriter, methodWriter, scopeTable.newScope()); methodWriter.endMethod(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IRNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IRNode.java index e8624c5a1699a..a4b8a132fd940 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IRNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IRNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -41,7 +40,7 @@ public Location getLocation() { /* end node data */ - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { throw new UnsupportedOperationException(); } @@ -49,15 +48,15 @@ protected int accessElementCount() { throw new UnsupportedOperationException(); } - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { throw new UnsupportedOperationException(); } - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { throw new UnsupportedOperationException(); } - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { throw new UnsupportedOperationException(); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IfElseNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IfElseNode.java index ede3c583884d9..76e477e2a78d1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IfElseNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IfElseNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Label; @@ -43,18 +42,18 @@ public BlockNode getElseBlockNode() { /* ---- end tree structure ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); Label fals = new Label(); Label end = new Label(); - getConditionNode().write(classWriter, methodWriter, globals, scopeTable); + getConditionNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, fals); getBlockNode().continueLabel = continueLabel; getBlockNode().breakLabel = breakLabel; - getBlockNode().write(classWriter, methodWriter, globals, scopeTable.newScope()); + getBlockNode().write(classWriter, methodWriter, scopeTable.newScope()); if (getBlockNode().doAllEscape() == false) { methodWriter.goTo(end); @@ -64,7 +63,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals elseBlockNode.continueLabel = continueLabel; elseBlockNode.breakLabel = breakLabel; - elseBlockNode.write(classWriter, methodWriter, globals, scopeTable.newScope()); + elseBlockNode.write(classWriter, methodWriter, scopeTable.newScope()); methodWriter.mark(end); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IfNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IfNode.java index 05639fe177627..eba682b15e48e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IfNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/IfNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Label; @@ -29,17 +28,17 @@ public class IfNode extends ConditionNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); Label fals = new Label(); - getConditionNode().write(classWriter, methodWriter, globals, scopeTable); + getConditionNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, fals); getBlockNode().continueLabel = continueLabel; getBlockNode().breakLabel = breakLabel; - getBlockNode().write(classWriter, methodWriter, globals, scopeTable.newScope()); + getBlockNode().write(classWriter, methodWriter, scopeTable.newScope()); methodWriter.mark(fals); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/InstanceofNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/InstanceofNode.java index c825d3c941ecf..049dc649b6aa2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/InstanceofNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/InstanceofNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.symbol.ScopeTable; @@ -69,8 +68,8 @@ public boolean isPrimitiveResult() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getChildNode().write(classWriter, methodWriter, scopeTable); // primitive types if (isPrimitiveResult) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/LambdaNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/LambdaNode.java index 1a5a3e016d855..5cf60d6ed157a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/LambdaNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/LambdaNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.FunctionRef; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScopeTable.Variable; @@ -56,7 +55,7 @@ public FunctionRef getFuncRef() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (funcRef != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ListInitializationNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ListInitializationNode.java index 304beade56265..71cf61a0ca536 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ListInitializationNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ListInitializationNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -54,7 +53,7 @@ public PainlessMethod getMethod() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.newInstance(MethodWriter.getType(getExpressionType())); @@ -64,7 +63,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals for (ExpressionNode argument : getArgumentNodes()) { methodWriter.dup(); - argument.write(classWriter, methodWriter, globals, scopeTable); + argument.write(classWriter, methodWriter, scopeTable); methodWriter.invokeMethodCall(method); methodWriter.pop(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ListSubShortcutNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ListSubShortcutNode.java index 7b3602c9cc5e8..29841216b3925 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ListSubShortcutNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ListSubShortcutNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -54,9 +53,9 @@ public PainlessMethod getGetter() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - setup(classWriter, methodWriter, globals, scopeTable); - load(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + setup(classWriter, methodWriter, scopeTable); + load(classWriter, methodWriter, scopeTable); } @Override @@ -65,8 +64,8 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getChildNode().write(classWriter, methodWriter, scopeTable); Label noFlip = new Label(); methodWriter.dup(); @@ -79,7 +78,7 @@ protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.invokeMethodCall(getter); @@ -89,7 +88,7 @@ protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.invokeMethodCall(setter); methodWriter.writePop(MethodWriter.getType(setter.returnType).getSize()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MapInitializationNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MapInitializationNode.java index 788f157a95419..6e3b8540872ac 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MapInitializationNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MapInitializationNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -87,7 +86,7 @@ public PainlessMethod getMethod() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.newInstance(MethodWriter.getType(getExpressionType())); @@ -97,8 +96,8 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals for (int index = 0; index < getArgumentsSize(); ++index) { methodWriter.dup(); - getKeyNode(index).write(classWriter, methodWriter, globals, scopeTable); - getValueNode(index).write(classWriter, methodWriter, globals, scopeTable); + getKeyNode(index).write(classWriter, methodWriter, scopeTable); + getValueNode(index).write(classWriter, methodWriter, scopeTable); methodWriter.invokeMethodCall(method); methodWriter.pop(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MapSubShortcutNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MapSubShortcutNode.java index f1e012099a57a..c74dde235951a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MapSubShortcutNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MapSubShortcutNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.symbol.ScopeTable; @@ -51,8 +50,8 @@ public PainlessMethod getGetter() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getChildNode().write(classWriter, methodWriter, scopeTable); methodWriter.writeDebugInfo(location); methodWriter.invokeMethodCall(getter); @@ -68,12 +67,12 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + getChildNode().write(classWriter, methodWriter, scopeTable); } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.invokeMethodCall(getter); @@ -83,7 +82,7 @@ protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.invokeMethodCall(setter); methodWriter.writePop(MethodWriter.getType(setter.returnType).getSize()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberCallNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberCallNode.java index 8dcc87363bac0..78c88038fd6bb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberCallNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberCallNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClassBinding; import org.elasticsearch.painless.lookup.PainlessInstanceBinding; @@ -95,7 +94,7 @@ public String getBindingName() { /* ---- end node data ---- */ @Override - public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + public void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (localFunction != null) { @@ -104,8 +103,8 @@ public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals gl } for (ExpressionNode argumentNode : getArgumentNodes()) { - argumentNode.write(classWriter, methodWriter, globals, scopeTable); - } + argumentNode.write(classWriter, methodWriter, scopeTable); + } if (localFunction.isStatic()) { methodWriter.invokeStatic(CLASS_TYPE, localFunction.getAsmMethod()); @@ -114,8 +113,8 @@ public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals gl } } else if (importedMethod != null) { for (ExpressionNode argumentNode : getArgumentNodes()) { - argumentNode.write(classWriter, methodWriter, globals, scopeTable); - } + argumentNode.write(classWriter, methodWriter, scopeTable); + } methodWriter.invokeStatic(Type.getType(importedMethod.targetClass), new Method(importedMethod.javaMethod.getName(), importedMethod.methodType.toMethodDescriptorString())); @@ -137,8 +136,8 @@ public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals gl } for (int argument = 0; argument < javaConstructorParameterCount; ++argument) { - getArgumentNodes().get(argument).write(classWriter, methodWriter, globals, scopeTable); - } + getArgumentNodes().get(argument).write(classWriter, methodWriter, scopeTable); + } methodWriter.invokeConstructor(type, Method.getMethod(classBinding.javaConstructor)); methodWriter.putField(CLASS_TYPE, bindingName, type); @@ -148,7 +147,7 @@ public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals gl methodWriter.getField(CLASS_TYPE, bindingName, type); for (int argument = 0; argument < classBinding.javaMethod.getParameterCount(); ++argument) { - getArgumentNodes().get(argument + javaConstructorParameterCount).write(classWriter, methodWriter, globals, scopeTable); + getArgumentNodes().get(argument + javaConstructorParameterCount).write(classWriter, methodWriter, scopeTable); } methodWriter.invokeVirtual(type, Method.getMethod(classBinding.javaMethod)); @@ -159,7 +158,7 @@ public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals gl methodWriter.getStatic(CLASS_TYPE, bindingName, type); for (int argument = 0; argument < instanceBinding.javaMethod.getParameterCount(); ++argument) { - getArgumentNodes().get(argument).write(classWriter, methodWriter, globals, scopeTable); + getArgumentNodes().get(argument).write(classWriter, methodWriter, scopeTable); } methodWriter.invokeVirtual(type, Method.getMethod(instanceBinding.javaMethod)); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldLoadNode.java similarity index 89% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldNode.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldLoadNode.java index 7008fa9894c89..6da484046fbc0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldLoadNode.java @@ -20,13 +20,16 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; -public class MemberFieldNode extends ExpressionNode { +/** + * Represents reading a value from a member field from + * the main class. + */ +public class MemberFieldLoadNode extends ExpressionNode { /* ---- begin node data ---- */ @@ -51,12 +54,8 @@ public boolean isStatic() { /* ---- end node data ---- */ - public MemberFieldNode() { - // do nothing - } - @Override - public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + public void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (isStatic) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldStoreNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldStoreNode.java new file mode 100644 index 0000000000000..184979a2494ba --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/MemberFieldStoreNode.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.ir; + +import org.elasticsearch.painless.ClassWriter; +import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.symbol.ScopeTable; + +import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; + +/** + * Represents a member field assignment on the main class. + * The value to store is generated by the child node accessed + * via {@link #getChildNode()}. + */ +public class MemberFieldStoreNode extends UnaryNode { + + /* ---- begin node data ---- */ + + protected String name; + protected Class fieldType; + protected boolean isStatic; + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setFieldType(Class fieldType) { + this.fieldType = fieldType; + } + + public Class getFieldType() { + return fieldType; + } + + public String getFieldCanonicalTypeName() { + return PainlessLookupUtility.typeToCanonicalTypeName(fieldType); + } + + public void setStatic(boolean isStatic) { + this.isStatic = isStatic; + } + + public boolean isStatic() { + return isStatic; + } + + /* ---- end node data ---- */ + + @Override + public void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { + if (isStatic == false) { + methodWriter.loadThis(); + } + + getChildNode().write(classWriter, methodWriter, scopeTable); + + methodWriter.writeDebugInfo(location); + + if (isStatic) { + methodWriter.putStatic(CLASS_TYPE, name, MethodWriter.getType(fieldType)); + } else { + methodWriter.putField(CLASS_TYPE, name, MethodWriter.getType(fieldType)); + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewArrayFuncRefNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewArrayFuncRefNode.java index 7055b82fd03ff..6afb9e68cf52f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewArrayFuncRefNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewArrayFuncRefNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.FunctionRef; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -42,7 +41,7 @@ public FunctionRef getFuncRef() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { if (funcRef != null) { methodWriter.writeDebugInfo(location); methodWriter.invokeLambdaCall(funcRef); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewArrayNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewArrayNode.java index c63b811b5990c..31aaf42b0ceca 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewArrayNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewArrayNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -41,7 +40,7 @@ public boolean getInitialize() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (initialize) { @@ -53,12 +52,12 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals methodWriter.dup(); methodWriter.push(index); - argumentNode.write(classWriter, methodWriter, globals, scopeTable); + argumentNode.write(classWriter, methodWriter, scopeTable); methodWriter.arrayStore(MethodWriter.getType(getExpressionType().getComponentType())); } } else { for (ExpressionNode argumentNode : getArgumentNodes()) { - argumentNode.write(classWriter, methodWriter, globals, scopeTable); + argumentNode.write(classWriter, methodWriter, scopeTable); } if (getArgumentNodes().size() > 1) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewObjectNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewObjectNode.java index cb40eb7cb33e5..4585459a21fad 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewObjectNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NewObjectNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.symbol.ScopeTable; @@ -53,7 +52,7 @@ public boolean getRead() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); methodWriter.newInstance(MethodWriter.getType(getExpressionType())); @@ -63,7 +62,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals } for (ExpressionNode argumentNode : getArgumentNodes()) { - argumentNode.write(classWriter, methodWriter, globals, scopeTable); + argumentNode.write(classWriter, methodWriter, scopeTable); } methodWriter.invokeConstructor( diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NullNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NullNode.java index 60e9d43fa1cab..7a2b05bcb0307 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NullNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NullNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Opcodes; @@ -28,7 +27,7 @@ public class NullNode extends ExpressionNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.visitInsn(Opcodes.ACONST_NULL); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NullSafeSubNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NullSafeSubNode.java index c89c0700d8060..78c29cad5497a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NullSafeSubNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/NullSafeSubNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Label; @@ -28,13 +27,13 @@ public class NullSafeSubNode extends UnaryNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); Label end = new Label(); methodWriter.dup(); methodWriter.ifNull(end); - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + getChildNode().write(classWriter, methodWriter, scopeTable); methodWriter.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/RegexNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/RegexNode.java deleted file mode 100644 index 988f2c4bcc9af..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/RegexNode.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.ir; - -import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Constant; -import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.WriterConstants; -import org.elasticsearch.painless.symbol.ScopeTable; - -import java.util.regex.Pattern; - -public class RegexNode extends ExpressionNode { - - /* ---- begin node data ---- */ - - private String pattern; - private int flags; - private Constant constant; - - public void setPattern(String pattern) { - this.pattern = pattern; - } - - public String getPattern() { - return pattern; - } - - public void setFlags(int flags) { - this.flags = flags; - } - - public int getFlags() { - return flags; - } - - public void setConstant(Constant constant) { - this.constant = constant; - } - - public Object getConstant() { - return constant; - } - - /* ---- end node data ---- */ - - @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { - methodWriter.writeDebugInfo(location); - - methodWriter.getStatic(WriterConstants.CLASS_TYPE, constant.name, org.objectweb.asm.Type.getType(Pattern.class)); - globals.addConstantInitializer(constant); - } - - public void initializeConstant(MethodWriter writer) { - writer.push(pattern); - writer.push(flags); - writer.invokeStatic(org.objectweb.asm.Type.getType(Pattern.class), WriterConstants.PATTERN_COMPILE); - } - - protected int flagForChar(char c) { - switch (c) { - case 'c': return Pattern.CANON_EQ; - case 'i': return Pattern.CASE_INSENSITIVE; - case 'l': return Pattern.LITERAL; - case 'm': return Pattern.MULTILINE; - case 's': return Pattern.DOTALL; - case 'U': return Pattern.UNICODE_CHARACTER_CLASS; - case 'u': return Pattern.UNICODE_CASE; - case 'x': return Pattern.COMMENTS; - default: - throw new IllegalArgumentException("Unknown flag [" + c + "]"); - } - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ReturnNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ReturnNode.java index 5a0d2687bcd13..bc9f534692b2e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ReturnNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ReturnNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -41,11 +40,11 @@ public ExpressionNode getExpressionNode() { /* ---- end tree structure ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); if (expressionNode != null) { - expressionNode.write(classWriter, methodWriter, globals, scopeTable); + expressionNode.write(classWriter, methodWriter, scopeTable); } methodWriter.returnValue(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StatementExpressionNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StatementExpressionNode.java index 6d045bdb7f8c0..cb3247915ffbd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StatementExpressionNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StatementExpressionNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -38,29 +37,12 @@ public ExpressionNode getExpressionNode() { return expressionNode; } - /* ---- end tree structure, begin node data ---- */ - - private boolean methodEscape; - - public void setMethodEscape(boolean methodEscape) { - this.methodEscape = methodEscape; - } - - public boolean getMethodEscape() { - return methodEscape; - } - - /* ---- end node data ---- */ + /* ---- end tree structure ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); - expressionNode.write(classWriter, methodWriter, globals, scopeTable); - - if (methodEscape) { - methodWriter.returnValue(); - } else { - methodWriter.writePop(MethodWriter.getType(expressionNode.getExpressionType()).getSize()); - } + expressionNode.write(classWriter, methodWriter, scopeTable); + methodWriter.writePop(MethodWriter.getType(expressionNode.getExpressionType()).getSize()); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StaticNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StaticNode.java index 2b0409982f88f..448a0ad0f2bd6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StaticNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/StaticNode.java @@ -20,14 +20,13 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; public class StaticNode extends ExpressionNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { // do nothing } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ThrowNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ThrowNode.java index 77f5dffea8bcf..b7d4f87dd0724 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ThrowNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ThrowNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; @@ -41,9 +40,9 @@ public ExpressionNode getExpressionNode() { /* ---- end tree structure ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); - expressionNode.write(classWriter, methodWriter, globals, scopeTable); + expressionNode.write(classWriter, methodWriter, scopeTable); methodWriter.throwException(); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/TryNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/TryNode.java index c621e1348ec4a..51703ada694e9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/TryNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/TryNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.objectweb.asm.Label; @@ -54,7 +53,7 @@ public List getCatchsNodes() { /* ---- end tree structure ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); Label begin = new Label(); @@ -65,7 +64,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals blockNode.continueLabel = continueLabel; blockNode.breakLabel = breakLabel; - blockNode.write(classWriter, methodWriter, globals, scopeTable.newScope()); + blockNode.write(classWriter, methodWriter, scopeTable.newScope()); if (blockNode.doAllEscape() == false) { methodWriter.goTo(exception); @@ -77,7 +76,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals catchNode.begin = begin; catchNode.end = end; catchNode.exception = catchNodes.size() > 1 ? exception : null; - catchNode.write(classWriter, methodWriter, globals, scopeTable.newScope()); + catchNode.write(classWriter, methodWriter, scopeTable.newScope()); } if (blockNode.doAllEscape() == false || catchNodes.size() > 1) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/UnaryMathNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/UnaryMathNode.java index 66369d8e202f0..a0f18a92d310b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/UnaryMathNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/UnaryMathNode.java @@ -21,7 +21,6 @@ import org.elasticsearch.painless.ClassWriter; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.lookup.PainlessLookupUtility; @@ -79,14 +78,15 @@ public boolean getOriginallyExplicit() { /* ---- end node data ---- */ @Override - public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + public void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeDebugInfo(location); if (operation == Operation.NOT) { Label fals = new Label(); Label end = new Label(); - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + getChildNode().write(classWriter, methodWriter, scopeTable); + methodWriter.ifZCmp(Opcodes.IFEQ, fals); methodWriter.push(false); @@ -95,7 +95,7 @@ public void write(ClassWriter classWriter, MethodWriter methodWriter, Globals gl methodWriter.push(true); methodWriter.mark(end); } else { - getChildNode().write(classWriter, methodWriter, globals, scopeTable); + getChildNode().write(classWriter, methodWriter, scopeTable); // Def calls adopt the wanted return value. If there was a narrowing cast, // we need to flag that so that it's done at runtime. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/VariableNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/VariableNode.java index 68f2f08e2e52f..f7dbe238cf498 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/VariableNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/VariableNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScopeTable.Variable; @@ -43,7 +42,7 @@ public String getName() { /* ---- end node data ---- */ @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { Variable variable = scopeTable.getVariable(name); methodWriter.visitVarInsn(variable.getAsmType().getOpcode(Opcodes.ILOAD), variable.getSlot()); } @@ -54,18 +53,18 @@ protected int accessElementCount() { } @Override - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void setup(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { // do nothing } @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void load(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { Variable variable = scopeTable.getVariable(name); methodWriter.visitVarInsn(variable.getAsmType().getOpcode(Opcodes.ILOAD), variable.getSlot()); } @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void store(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { Variable variable = scopeTable.getVariable(name); methodWriter.visitVarInsn(variable.getAsmType().getOpcode(Opcodes.ISTORE), variable.getSlot()); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/WhileNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/WhileNode.java index 3e5df7d8a2e04..4b5abb54a61ce 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/WhileNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/WhileNode.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.ir; import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.symbol.ScopeTable; import org.elasticsearch.painless.symbol.ScopeTable.Variable; @@ -30,7 +29,7 @@ public class WhileNode extends LoopNode { @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals globals, ScopeTable scopeTable) { + protected void write(ClassWriter classWriter, MethodWriter methodWriter, ScopeTable scopeTable) { methodWriter.writeStatementOffset(location); scopeTable = scopeTable.newScope(); @@ -41,7 +40,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals methodWriter.mark(begin); if (isContinuous() == false) { - getConditionNode().write(classWriter, methodWriter, globals, scopeTable); + getConditionNode().write(classWriter, methodWriter, scopeTable); methodWriter.ifZCmp(Opcodes.IFEQ, end); } @@ -54,7 +53,7 @@ protected void write(ClassWriter classWriter, MethodWriter methodWriter, Globals getBlockNode().continueLabel = begin; getBlockNode().breakLabel = end; - getBlockNode().write(classWriter, methodWriter, globals, scopeTable); + getBlockNode().write(classWriter, methodWriter, scopeTable); } else { Variable loop = scopeTable.getInternalVariable("loop"); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index 723854dc0ce1e..ff53b661a9ba7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -25,7 +25,6 @@ import org.elasticsearch.painless.ir.ClassNode; import org.elasticsearch.painless.ir.ExpressionNode; import org.elasticsearch.painless.lookup.PainlessCast; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.symbol.ScriptRoot; import java.util.Objects; @@ -84,14 +83,6 @@ public abstract class AExpression extends ANode { */ boolean internal = false; - /** - * Set to the value of the constant this expression node represents if - * and only if the node represents a constant. If this is not null - * this node will be replaced by an {@link EConstant} during casting - * if it's not already one. - */ - Object constant = null; - /** * Set to true by {@link ENull} to represent a null value. */ @@ -134,97 +125,14 @@ AExpression cast(ScriptRoot scriptRoot, Scope scope) { PainlessCast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal); if (cast == null) { - if (constant == null || this instanceof EConstant) { - // For the case where a cast is not required and a constant is not set - // or the node is already an EConstant no changes are required to the tree. - - return this; - } else { - // For the case where a cast is not required but a - // constant is set, an EConstant replaces this node - // with the constant copied from this node. Note that - // for constants output data does not need to be copied - // from this node because the output data for the EConstant - // will already be the same. - - EConstant econstant = new EConstant(location, constant); - econstant.analyze(scriptRoot, scope); - - if (!expected.equals(econstant.actual)) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - - return econstant; - } + return this; } else { - if (constant == null) { - // For the case where a cast is required and a constant is not set. - // Modify the tree to add an ECast between this node and its parent. - // The output data from this node is copied to the ECast for - // further reads done by the parent. - - ECast ecast = new ECast(location, this, cast); - ecast.statement = statement; - ecast.actual = expected; - ecast.isNull = isNull; - - return ecast; - } else { - if (PainlessLookupUtility.isConstantType(expected)) { - // For the case where a cast is required, a constant is set, - // and the constant can be immediately cast to the expected type. - // An EConstant replaces this node with the constant cast appropriately - // from the constant value defined by this node. Note that - // for constants output data does not need to be copied - // from this node because the output data for the EConstant - // will already be the same. - - constant = AnalyzerCaster.constCast(location, constant, cast); - - EConstant econstant = new EConstant(location, constant); - econstant.analyze(scriptRoot, scope); - - if (!expected.equals(econstant.actual)) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - - return econstant; - } else if (this instanceof EConstant) { - // For the case where a cast is required, a constant is set, - // the constant cannot be immediately cast to the expected type, - // and this node is already an EConstant. Modify the tree to add - // an ECast between this node and its parent. Note that - // for constants output data does not need to be copied - // from this node because the output data for the EConstant - // will already be the same. - - ECast ecast = new ECast(location, this, cast); - ecast.actual = expected; - - return ecast; - } else { - // For the case where a cast is required, a constant is set, - // the constant cannot be immediately cast to the expected type, - // and this node is not an EConstant. Replace this node with - // an Econstant node copying the constant from this node. - // Modify the tree to add an ECast between the EConstant node - // and its parent. Note that for constants output data does not - // need to be copied from this node because the output data for - // the EConstant will already be the same. - - EConstant econstant = new EConstant(location, constant); - econstant.analyze(scriptRoot, scope); - - if (!actual.equals(econstant.actual)) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - - ECast ecast = new ECast(location, econstant, cast); - ecast.actual = expected; - - return ecast; - } - } + ECast ecast = new ECast(location, this, cast); + ecast.statement = statement; + ecast.actual = expected; + ecast.isNull = isNull; + + return ecast; } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 2fc9dedfe144e..6deca2498b5c7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -116,20 +116,6 @@ private void analyzeMul(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == int.class) { - constant = (int)left.constant * (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant * (long)right.constant; - } else if (promote == float.class) { - constant = (float)left.constant * (float)right.constant; - } else if (promote == double.class) { - constant = (double)left.constant * (double)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } } private void analyzeDiv(ScriptRoot scriptRoot, Scope variables) { @@ -160,24 +146,6 @@ private void analyzeDiv(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - try { - if (promote == int.class) { - constant = (int)left.constant / (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant / (long)right.constant; - } else if (promote == float.class) { - constant = (float)left.constant / (float)right.constant; - } else if (promote == double.class) { - constant = (double)left.constant / (double)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } catch (ArithmeticException exception) { - throw createError(exception); - } - } } private void analyzeRem(ScriptRoot scriptRoot, Scope variables) { @@ -208,24 +176,6 @@ private void analyzeRem(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - try { - if (promote == int.class) { - constant = (int)left.constant % (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant % (long)right.constant; - } else if (promote == float.class) { - constant = (float)left.constant % (float)right.constant; - } else if (promote == double.class) { - constant = (double)left.constant % (double)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } catch (ArithmeticException exception) { - throw createError(exception); - } - } } private void analyzeAdd(ScriptRoot scriptRoot, Scope variables) { @@ -268,23 +218,6 @@ private void analyzeAdd(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == int.class) { - constant = (int)left.constant + (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant + (long)right.constant; - } else if (promote == float.class) { - constant = (float)left.constant + (float)right.constant; - } else if (promote == double.class) { - constant = (double)left.constant + (double)right.constant; - } else if (promote == String.class) { - constant = left.constant.toString() + right.constant.toString(); - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - } private void analyzeSub(ScriptRoot scriptRoot, Scope variables) { @@ -315,20 +248,6 @@ private void analyzeSub(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == int.class) { - constant = (int)left.constant - (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant - (long)right.constant; - } else if (promote == float.class) { - constant = (float)left.constant - (float)right.constant; - } else if (promote == double.class) { - constant = (double)left.constant - (double)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } } private void analyzeRegexOp(ScriptRoot scriptRoot, Scope variables) { @@ -381,16 +300,6 @@ private void analyzeLSH(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == int.class) { - constant = (int)left.constant << (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant << (int)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } } private void analyzeRSH(ScriptRoot scriptRoot, Scope variables) { @@ -429,16 +338,6 @@ private void analyzeRSH(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == int.class) { - constant = (int)left.constant >> (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant >> (int)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } } private void analyzeUSH(ScriptRoot scriptRoot, Scope variables) { @@ -477,16 +376,6 @@ private void analyzeUSH(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == int.class) { - constant = (int)left.constant >>> (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant >>> (int)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } } private void analyzeBWAnd(ScriptRoot scriptRoot, Scope variables) { @@ -517,16 +406,6 @@ private void analyzeBWAnd(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == int.class) { - constant = (int)left.constant & (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant & (long)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } } private void analyzeXor(ScriptRoot scriptRoot, Scope variables) { @@ -556,18 +435,6 @@ private void analyzeXor(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == boolean.class) { - constant = (boolean)left.constant ^ (boolean)right.constant; - } else if (promote == int.class) { - constant = (int)left.constant ^ (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant ^ (long)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } } private void analyzeBWOr(ScriptRoot scriptRoot, Scope variables) { @@ -597,16 +464,6 @@ private void analyzeBWOr(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - - if (left.constant != null && right.constant != null) { - if (promote == int.class) { - constant = (int)left.constant | (int)right.constant; - } else if (promote == long.class) { - constant = (long)left.constant | (long)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java index d2bc1fcaf5233..ec2407dc92016 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java @@ -55,16 +55,6 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { right.analyze(scriptRoot, scope); right = right.cast(scriptRoot, scope); - if (left.constant != null && right.constant != null) { - if (operation == Operation.AND) { - constant = (boolean)left.constant && (boolean)right.constant; - } else if (operation == Operation.OR) { - constant = (boolean)left.constant || (boolean)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - actual = boolean.class; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java index 3964b3f989118..10d941fdce90f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java @@ -22,6 +22,7 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Scope; import org.elasticsearch.painless.ir.ClassNode; +import org.elasticsearch.painless.ir.ConstantNode; import org.elasticsearch.painless.ir.ExpressionNode; import org.elasticsearch.painless.symbol.ScriptRoot; @@ -30,6 +31,8 @@ */ public final class EBoolean extends AExpression { + protected boolean constant; + public EBoolean(Location location, boolean constant) { super(location); @@ -47,7 +50,12 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { @Override ExpressionNode write(ClassNode classNode) { - throw createError(new IllegalStateException("Illegal tree structure.")); + ConstantNode constantNode = new ConstantNode(); + constantNode.setLocation(location); + constantNode.setExpressionType(actual); + constantNode.setConstant(constant); + + return constantNode; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index f65aaf8a8e063..ff1ac49431da7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -100,26 +100,6 @@ private void analyzeEq(ScriptRoot scriptRoot, Scope variables) { throw createError(new IllegalArgumentException("Extraneous comparison of null constants.")); } - if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) { - if (promotedType == boolean.class) { - constant = (boolean)left.constant == (boolean)right.constant; - } else if (promotedType == int.class) { - constant = (int)left.constant == (int)right.constant; - } else if (promotedType == long.class) { - constant = (long)left.constant == (long)right.constant; - } else if (promotedType == float.class) { - constant = (float)left.constant == (float)right.constant; - } else if (promotedType == double.class) { - constant = (double)left.constant == (double)right.constant; - } else if (!left.isNull) { - constant = left.constant.equals(right.constant); - } else if (!right.isNull) { - constant = right.constant.equals(null); - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - actual = boolean.class; } @@ -145,22 +125,6 @@ private void analyzeEqR(ScriptRoot scriptRoot, Scope variables) { throw createError(new IllegalArgumentException("Extraneous comparison of null constants.")); } - if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) { - if (promotedType == boolean.class) { - constant = (boolean)left.constant == (boolean)right.constant; - } else if (promotedType == int.class) { - constant = (int)left.constant == (int)right.constant; - } else if (promotedType == long.class) { - constant = (long)left.constant == (long)right.constant; - } else if (promotedType == float.class) { - constant = (float)left.constant == (float)right.constant; - } else if (promotedType == double.class) { - constant = (double)left.constant == (double)right.constant; - } else { - constant = left.constant == right.constant; - } - } - actual = boolean.class; } @@ -191,26 +155,6 @@ private void analyzeNE(ScriptRoot scriptRoot, Scope variables) { throw createError(new IllegalArgumentException("Extraneous comparison of null constants.")); } - if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) { - if (promotedType == boolean.class) { - constant = (boolean)left.constant != (boolean)right.constant; - } else if (promotedType == int.class) { - constant = (int)left.constant != (int)right.constant; - } else if (promotedType == long.class) { - constant = (long)left.constant != (long)right.constant; - } else if (promotedType == float.class) { - constant = (float)left.constant != (float)right.constant; - } else if (promotedType == double.class) { - constant = (double)left.constant != (double)right.constant; - } else if (!left.isNull) { - constant = !left.constant.equals(right.constant); - } else if (!right.isNull) { - constant = !right.constant.equals(null); - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - actual = boolean.class; } @@ -236,22 +180,6 @@ private void analyzeNER(ScriptRoot scriptRoot, Scope variables) { throw createError(new IllegalArgumentException("Extraneous comparison of null constants.")); } - if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) { - if (promotedType == boolean.class) { - constant = (boolean)left.constant != (boolean)right.constant; - } else if (promotedType == int.class) { - constant = (int)left.constant != (int)right.constant; - } else if (promotedType == long.class) { - constant = (long)left.constant != (long)right.constant; - } else if (promotedType == float.class) { - constant = (float)left.constant != (float)right.constant; - } else if (promotedType == double.class) { - constant = (double)left.constant != (double)right.constant; - } else { - constant = left.constant != right.constant; - } - } - actual = boolean.class; } @@ -278,20 +206,6 @@ private void analyzeGTE(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - if (left.constant != null && right.constant != null) { - if (promotedType == int.class) { - constant = (int)left.constant >= (int)right.constant; - } else if (promotedType == long.class) { - constant = (long)left.constant >= (long)right.constant; - } else if (promotedType == float.class) { - constant = (float)left.constant >= (float)right.constant; - } else if (promotedType == double.class) { - constant = (double)left.constant >= (double)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - actual = boolean.class; } @@ -318,20 +232,6 @@ private void analyzeGT(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - if (left.constant != null && right.constant != null) { - if (promotedType == int.class) { - constant = (int)left.constant > (int)right.constant; - } else if (promotedType == long.class) { - constant = (long)left.constant > (long)right.constant; - } else if (promotedType == float.class) { - constant = (float)left.constant > (float)right.constant; - } else if (promotedType == double.class) { - constant = (double)left.constant > (double)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - actual = boolean.class; } @@ -358,20 +258,6 @@ private void analyzeLTE(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - if (left.constant != null && right.constant != null) { - if (promotedType == int.class) { - constant = (int)left.constant <= (int)right.constant; - } else if (promotedType == long.class) { - constant = (long)left.constant <= (long)right.constant; - } else if (promotedType == float.class) { - constant = (float)left.constant <= (float)right.constant; - } else if (promotedType == double.class) { - constant = (double)left.constant <= (double)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - actual = boolean.class; } @@ -398,20 +284,6 @@ private void analyzeLT(ScriptRoot scriptRoot, Scope variables) { left = left.cast(scriptRoot, variables); right = right.cast(scriptRoot, variables); - if (left.constant != null && right.constant != null) { - if (promotedType == int.class) { - constant = (int)left.constant < (int)right.constant; - } else if (promotedType == long.class) { - constant = (long)left.constant < (long)right.constant; - } else if (promotedType == float.class) { - constant = (float)left.constant < (float)right.constant; - } else if (promotedType == double.class) { - constant = (double)left.constant < (double)right.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - actual = boolean.class; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java index c4516aa26dcc5..2f3c04e8d899c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java @@ -24,6 +24,7 @@ import org.elasticsearch.painless.Scope; import org.elasticsearch.painless.ir.ClassNode; import org.elasticsearch.painless.ir.ConditionalNode; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.symbol.ScriptRoot; import java.util.Objects; @@ -51,10 +52,6 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { condition.analyze(scriptRoot, scope); condition = condition.cast(scriptRoot, scope); - if (condition.constant != null) { - throw createError(new IllegalArgumentException("Extraneous conditional statement.")); - } - left.expected = expected; left.explicit = explicit; left.internal = internal; @@ -67,7 +64,13 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { right.analyze(scriptRoot, scope); if (expected == null) { - Class promote = AnalyzerCaster.promoteConditional(left.actual, right.actual, left.constant, right.constant); + Class promote = AnalyzerCaster.promoteConditional(left.actual, right.actual); + + if (promote == null) { + throw createError(new ClassCastException("cannot apply a conditional operator [?:] to the types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "]")); + } left.expected = promote; right.expected = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java index dbca094ba51b8..c02beca4ec044 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java @@ -31,6 +31,8 @@ */ final class EConstant extends AExpression { + protected Object constant; + EConstant(Location location, Object constant) { super(location); @@ -65,7 +67,6 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { @Override ConstantNode write(ClassNode classNode) { ConstantNode constantNode = new ConstantNode(); - constantNode.setLocation(location); constantNode.setExpressionType(actual); constantNode.setConstant(constant); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java index 1d62c0587f752..049e52799f1f7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java @@ -22,6 +22,7 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Scope; import org.elasticsearch.painless.ir.ClassNode; +import org.elasticsearch.painless.ir.ConstantNode; import org.elasticsearch.painless.ir.ExpressionNode; import org.elasticsearch.painless.symbol.ScriptRoot; @@ -34,6 +35,8 @@ public final class EDecimal extends AExpression { private final String value; + protected Object constant; + public EDecimal(Location location, String value) { super(location); @@ -69,7 +72,12 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { @Override ExpressionNode write(ClassNode classNode) { - throw createError(new IllegalStateException("Illegal tree structure.")); + ConstantNode constantNode = new ConstantNode(); + constantNode.setLocation(location); + constantNode.setExpressionType(actual); + constantNode.setConstant(constant); + + return constantNode; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java index 45cfdab5e8c34..89eabb439ba72 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java @@ -61,7 +61,11 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { if (lhs.isNull) { throw createError(new IllegalArgumentException("Extraneous elvis operator. LHS is null.")); } - if (lhs.constant != null) { + if (lhs instanceof EBoolean + || lhs instanceof ENumeric + || lhs instanceof EDecimal + || lhs instanceof EString + || lhs instanceof EConstant) { throw createError(new IllegalArgumentException("Extraneous elvis operator. LHS is a constant.")); } if (lhs.actual.isPrimitive()) { @@ -72,7 +76,7 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { } if (expected == null) { - Class promote = AnalyzerCaster.promoteConditional(lhs.actual, rhs.actual, lhs.constant, rhs.constant); + Class promote = AnalyzerCaster.promoteConditional(lhs.actual, rhs.actual); lhs.expected = promote; rhs.expected = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java index 0699475adbf1c..5c689f090acaf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java @@ -22,6 +22,7 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Scope; import org.elasticsearch.painless.ir.ClassNode; +import org.elasticsearch.painless.ir.ConstantNode; import org.elasticsearch.painless.ir.ExpressionNode; import org.elasticsearch.painless.symbol.ScriptRoot; @@ -35,6 +36,8 @@ public final class ENumeric extends AExpression { private final String value; private int radix; + protected Object constant; + public ENumeric(Location location, String value, int radix) { super(location); @@ -111,7 +114,12 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { @Override ExpressionNode write(ClassNode classNode) { - throw createError(new IllegalStateException("Illegal tree structure.")); + ConstantNode constantNode = new ConstantNode(); + constantNode.setLocation(location); + constantNode.setExpressionType(actual); + constantNode.setConstant(constant); + + return constantNode; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java index a4a1be53006ce..6c3527291afa9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java @@ -19,16 +19,23 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Constant; import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Scope; -import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.ir.BlockNode; +import org.elasticsearch.painless.ir.CallNode; +import org.elasticsearch.painless.ir.CallSubNode; import org.elasticsearch.painless.ir.ClassNode; -import org.elasticsearch.painless.ir.RegexNode; +import org.elasticsearch.painless.ir.ConstantNode; +import org.elasticsearch.painless.ir.FieldNode; +import org.elasticsearch.painless.ir.MemberFieldLoadNode; +import org.elasticsearch.painless.ir.MemberFieldStoreNode; +import org.elasticsearch.painless.ir.StatementExpressionNode; +import org.elasticsearch.painless.ir.StaticNode; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.symbol.ScriptRoot; import java.lang.reflect.Modifier; +import java.util.Arrays; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; @@ -39,7 +46,7 @@ public final class ERegex extends AExpression { private final String pattern; private final int flags; - private Constant constant; + private String name; public ERegex(Location location, String pattern, String flagsString) { super(location); @@ -74,30 +81,89 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { new IllegalArgumentException("Error compiling regex: " + e.getDescription())); } - String name = scriptRoot.getNextSyntheticName("regex"); - scriptRoot.getClassNode().addField( - new SField(location, Modifier.FINAL | Modifier.STATIC | Modifier.PRIVATE, name, Pattern.class)); - constant = new Constant(location, MethodWriter.getType(Pattern.class), name, this::initializeConstant); + name = scriptRoot.getNextSyntheticName("regex"); actual = Pattern.class; } @Override - RegexNode write(ClassNode classNode) { - RegexNode regexNode = new RegexNode(); - regexNode.setLocation(location); + MemberFieldLoadNode write(ClassNode classNode) { + FieldNode fieldNode = new FieldNode(); + fieldNode.setLocation(location); + fieldNode.setModifiers(Modifier.FINAL | Modifier.STATIC | Modifier.PRIVATE); + fieldNode.setFieldType(Pattern.class); + fieldNode.setName(name); - regexNode.setExpressionType(actual); - regexNode.setFlags(flags); - regexNode.setPattern(pattern); - regexNode.setConstant(constant); + classNode.addFieldNode(fieldNode); - return regexNode; - } + try { + StatementExpressionNode statementExpressionNode = new StatementExpressionNode(); + statementExpressionNode.setLocation(location); + + BlockNode blockNode = classNode.getClinitBlockNode(); + blockNode.addStatementNode(statementExpressionNode); + + MemberFieldStoreNode memberFieldStoreNode = new MemberFieldStoreNode(); + memberFieldStoreNode.setLocation(location); + memberFieldStoreNode.setExpressionType(void.class); + memberFieldStoreNode.setFieldType(Pattern.class); + memberFieldStoreNode.setName(name); + memberFieldStoreNode.setStatic(true); + + statementExpressionNode.setExpressionNode(memberFieldStoreNode); + + CallNode callNode = new CallNode(); + callNode.setLocation(location); + callNode.setExpressionType(Pattern.class); + + memberFieldStoreNode.setChildNode(callNode); + + StaticNode staticNode = new StaticNode(); + staticNode.setLocation(location); + staticNode.setExpressionType(Pattern.class); + + callNode.setLeftNode(staticNode); + + CallSubNode callSubNode = new CallSubNode(); + callSubNode.setLocation(location); + callSubNode.setExpressionType(Pattern.class); + callSubNode.setBox(Pattern.class); + callSubNode.setMethod(new PainlessMethod( + Pattern.class.getMethod("compile", String.class, int.class), + Pattern.class, + Pattern.class, + Arrays.asList(String.class, int.class), + null, + null, + null + ) + ); + + callNode.setRightNode(callSubNode); + + ConstantNode constantNode = new ConstantNode(); + constantNode.setLocation(location); + constantNode.setExpressionType(String.class); + constantNode.setConstant(pattern); + + callSubNode.addArgumentNode(constantNode); + + constantNode = new ConstantNode(); + constantNode.setLocation(location); + constantNode.setExpressionType(int.class); + constantNode.setConstant(flags); + + callSubNode.addArgumentNode(constantNode); + } catch (Exception exception) { + throw createError(new IllegalStateException("could not generate regex constant [" + pattern + "/" + flags +"] in clinit")); + } + + MemberFieldLoadNode memberFieldLoadNode = new MemberFieldLoadNode(); + memberFieldLoadNode.setLocation(location); + memberFieldLoadNode.setExpressionType(Pattern.class); + memberFieldLoadNode.setName(name); + memberFieldLoadNode.setStatic(true); - private void initializeConstant(MethodWriter writer) { - writer.push(pattern); - writer.push(flags); - writer.invokeStatic(org.objectweb.asm.Type.getType(Pattern.class), WriterConstants.PATTERN_COMPILE); + return memberFieldLoadNode; } private int flagForChar(char c) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java index 62367437dc648..423ff49e521b9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java @@ -22,6 +22,7 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Scope; import org.elasticsearch.painless.ir.ClassNode; +import org.elasticsearch.painless.ir.ConstantNode; import org.elasticsearch.painless.ir.ExpressionNode; import org.elasticsearch.painless.symbol.ScriptRoot; @@ -32,6 +33,8 @@ */ public final class EString extends AExpression { + protected String constant; + public EString(Location location, String string) { super(location); @@ -49,7 +52,12 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { @Override ExpressionNode write(ClassNode classNode) { - throw new IllegalStateException("Illegal tree structure."); + ConstantNode constantNode = new ConstantNode(); + constantNode.setLocation(location); + constantNode.setExpressionType(actual); + constantNode.setConstant(constant); + + return constantNode; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index 5d10612d1f6ab..93d0969da3c91 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -72,10 +72,6 @@ void analyzeNot(ScriptRoot scriptRoot, Scope variables) { child.analyze(scriptRoot, variables); child = child.cast(scriptRoot, variables); - if (child.constant != null) { - constant = !(boolean)child.constant; - } - actual = boolean.class; } @@ -92,16 +88,6 @@ void analyzeBWNot(ScriptRoot scriptRoot, Scope variables) { child.expected = promote; child = child.cast(scriptRoot, variables); - if (child.constant != null) { - if (promote == int.class) { - constant = ~(int)child.constant; - } else if (promote == long.class) { - constant = ~(long)child.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - if (promote == def.class && expected != null) { actual = expected; } else { @@ -122,20 +108,6 @@ void analyzerAdd(ScriptRoot scriptRoot, Scope variables) { child.expected = promote; child = child.cast(scriptRoot, variables); - if (child.constant != null) { - if (promote == int.class) { - constant = +(int)child.constant; - } else if (promote == long.class) { - constant = +(long)child.constant; - } else if (promote == float.class) { - constant = +(float)child.constant; - } else if (promote == double.class) { - constant = +(double)child.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - if (promote == def.class && expected != null) { actual = expected; } else { @@ -156,20 +128,6 @@ void analyzerSub(ScriptRoot scriptRoot, Scope variables) { child.expected = promote; child = child.cast(scriptRoot, variables); - if (child.constant != null) { - if (promote == int.class) { - constant = -(int)child.constant; - } else if (promote == long.class) { - constant = -(long)child.constant; - } else if (promote == float.class) { - constant = -(float)child.constant; - } else if (promote == double.class) { - constant = -(double)child.constant; - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } - } - if (promote == def.class && expected != null) { actual = expected; } else { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java index 1e8873f259056..41afc52f878b3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java @@ -64,8 +64,8 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { condition.analyze(scriptRoot, scope); condition = condition.cast(scriptRoot, scope); - if (condition.constant != null) { - continuous = (boolean)condition.constant; + if (condition instanceof EBoolean) { + continuous = ((EBoolean)condition).constant; if (!continuous) { throw createError(new IllegalArgumentException("Extraneous do while loop.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java index 228380d4e8048..361ba65d13ba9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java @@ -22,7 +22,9 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Scope; import org.elasticsearch.painless.ir.ClassNode; +import org.elasticsearch.painless.ir.ReturnNode; import org.elasticsearch.painless.ir.StatementExpressionNode; +import org.elasticsearch.painless.ir.StatementNode; import org.elasticsearch.painless.symbol.ScriptRoot; import java.util.Objects; @@ -48,7 +50,7 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { expression.read = lastSource && !isVoid; expression.analyze(scriptRoot, scope); - if (!lastSource && !expression.statement) { + if ((lastSource == false || isVoid) && expression.statement == false) { throw createError(new IllegalArgumentException("Not a statement.")); } @@ -65,15 +67,24 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { } @Override - StatementExpressionNode write(ClassNode classNode) { - StatementExpressionNode statementExpressionNode = new StatementExpressionNode(); + StatementNode write(ClassNode classNode) { + if (methodEscape) { + ReturnNode returnNode = new ReturnNode(); - statementExpressionNode.setExpressionNode(expression.write(classNode)); + returnNode.setExpressionNode(expression.write(classNode)); - statementExpressionNode.setLocation(location); - statementExpressionNode.setMethodEscape(methodEscape); + returnNode.setLocation(location); - return statementExpressionNode; + return returnNode; + } else { + StatementExpressionNode statementExpressionNode = new StatementExpressionNode(); + + statementExpressionNode.setExpressionNode(expression.write(classNode)); + + statementExpressionNode.setLocation(location); + + return statementExpressionNode; + } } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java index 9d8d3e3006936..970ba8d877f48 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java @@ -79,8 +79,8 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { condition.analyze(scriptRoot, scope); condition = condition.cast(scriptRoot, scope); - if (condition.constant != null) { - continuous = (boolean)condition.constant; + if (condition instanceof EBoolean) { + continuous = ((EBoolean)condition).constant; if (!continuous) { throw createError(new IllegalArgumentException("Extraneous for loop.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java index 5b13be46b254b..f00866a6ef422 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java @@ -48,7 +48,7 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { condition.analyze(scriptRoot, scope); condition = condition.cast(scriptRoot, scope); - if (condition.constant != null) { + if (condition instanceof EBoolean) { throw createError(new IllegalArgumentException("Extraneous if statement.")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java index 38b258557a289..518094a10c1a4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java @@ -53,7 +53,7 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { condition.analyze(scriptRoot, scope); condition = condition.cast(scriptRoot, scope); - if (condition.constant != null) { + if (condition instanceof EBoolean) { throw createError(new IllegalArgumentException("Extraneous if statement.")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java index 5192e06793f73..47909b6ded674 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java @@ -52,8 +52,8 @@ void analyze(ScriptRoot scriptRoot, Scope scope) { condition.analyze(scriptRoot, scope); condition = condition.cast(scriptRoot, scope); - if (condition.constant != null) { - continuous = (boolean)condition.constant; + if (condition instanceof EBoolean) { + continuous = ((EBoolean)condition).constant; if (!continuous) { throw createError(new IllegalArgumentException("Extraneous while loop.")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java index d4679787806ce..578efb25dbff9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java @@ -557,7 +557,6 @@ public void testReturnsPrimitiveDouble() throws Exception { .newInstance().execute(), 0); String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings()); - assertThat(debug, containsString("DCONST_1")); // The important thing here is that we have the bytecode for returning a double instead of an object assertThat(debug, containsString("DRETURN")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java index ffd4df43c9070..02f8774263bdf 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java @@ -38,6 +38,7 @@ protected Map, List> scriptContexts() { contexts.put(DeterministicFactoryTestScript.CONTEXT, Whitelist.BASE_WHITELISTS); contexts.put(EmptyTestScript.CONTEXT, Whitelist.BASE_WHITELISTS); contexts.put(TemplateScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(VoidReturnTestScript.CONTEXT, Whitelist.BASE_WHITELISTS); return contexts; } @@ -255,4 +256,22 @@ public void testGetterInLambda() { FactoryTestScript script = factory.newInstance(Collections.singletonMap("x", 1)); assertEquals(2, script.execute(1)); } + + public abstract static class VoidReturnTestScript { + public static final String[] PARAMETERS = {"map"}; + public abstract void execute(Map map); + + public interface Factory { + VoidReturnTestScript newInstance(); + } + + public static final ScriptContext CONTEXT = + new ScriptContext<>("test", VoidReturnTestScript.Factory.class); + } + + public void testVoidReturn() { + IllegalArgumentException iae = expectScriptThrows(IllegalArgumentException.class, () -> + scriptEngine.compile("void_return_test", "1 + 1", VoidReturnTestScript.CONTEXT, Collections.emptyMap())); + assertEquals(iae.getMessage(), "Not a statement."); + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index 1b4c4eb0ff636..8b9560bbd32c9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -34,8 +34,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.index.similarity.ScriptedSimilarity; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.script.ScriptContext; @@ -62,7 +62,7 @@ public void testBasics() throws IOException { SimilarityScript.Factory factory = scriptEngine.compile( "foobar", "return query.boost * doc.freq / doc.length", SimilarityScript.CONTEXT, Collections.emptyMap()); ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", factory::newInstance, true); - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); Document doc = new Document(); @@ -101,7 +101,7 @@ public void testWeightScript() throws IOException { SimilarityScript.Factory factory = scriptEngine.compile( "foobar", "return weight * doc.freq / doc.length", SimilarityScript.CONTEXT, Collections.emptyMap()); ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightFactory::newInstance, "foobaz", factory::newInstance, true); - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); Document doc = new Document(); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java index 31870b9125cb3..c04c1d9304425 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java @@ -167,12 +167,12 @@ public void testStringAndCharacter() { assertEquals('c', exec("String s = \"c\"; (char)s")); assertEquals('c', exec("String s = 'c'; (char)s")); - ClassCastException expected = expectScriptThrows(ClassCastException.class, false, () -> { + ClassCastException expected = expectScriptThrows(ClassCastException.class, () -> { assertEquals("cc", exec("return (String)(char)\"cc\"")); }); assertTrue(expected.getMessage().contains("cannot cast java.lang.String with length not equal to one to char")); - expected = expectScriptThrows(ClassCastException.class, false, () -> { + expected = expectScriptThrows(ClassCastException.class, () -> { assertEquals("cc", exec("return (String)(char)'cc'")); }); assertTrue(expected.getMessage().contains("cannot cast java.lang.String with length not equal to one to char")); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 57a49dbed4f65..bc0e04ed42bcb 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -35,7 +36,8 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; -import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.store.ByteBuffersDirectory; +import org.apache.lucene.store.Directory; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; @@ -74,6 +76,7 @@ import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -561,9 +564,9 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType; String name = this.name != null ? this.name : pft.name(); QueryShardContext percolateShardContext = wrap(context); + PercolatorFieldMapper.configureContext(percolateShardContext, pft.mapUnmappedFieldsAsText);; PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, - percolateShardContext, - pft.mapUnmappedFieldsAsText); + percolateShardContext); return pft.percolateQuery(name, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated()); } @@ -586,8 +589,8 @@ public String getQueryName() { } static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection docs) { - RAMDirectory ramDirectory = new RAMDirectory(); - try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(analyzer))) { + Directory directory = new ByteBuffersDirectory(); + try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(analyzer))) { // Indexing in order here, so that the user provided order matches with the docid sequencing: Iterable iterable = () -> docs.stream() .map(ParsedDocument::docs) @@ -606,8 +609,7 @@ static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection

{ @@ -633,7 +635,8 @@ static PercolateQuery.QueryStore createStore(MappedFieldType queryBuilderFieldTy assert valueLength > 0; QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class); assert in.read() == -1; - return PercolatorFieldMapper.toQuery(context, mapUnmappedFieldsAsString, queryBuilder); + queryBuilder = Rewriteable.rewrite(queryBuilder, context); + return queryBuilder.toQuery(context); } } } else { @@ -646,6 +649,13 @@ static PercolateQuery.QueryStore createStore(MappedFieldType queryBuilderFieldTy static QueryShardContext wrap(QueryShardContext shardContext) { return new QueryShardContext(shardContext) { + @Override + public IndexReader getIndexReader() { + // The reader that matters in this context is not the reader of the shard but + // the reader of the MemoryIndex. We just use `null` for simplicity. + return null; + } + @Override public BitSetProducer bitsetFilter(Query query) { return context -> { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 501d71465d679..bf481e23e1081 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -394,6 +394,8 @@ public void parse(ParseContext context) throws IOException { throw new IllegalArgumentException("a document can only contain one percolator query"); } + configureContext(queryShardContext, isMapUnmappedFieldAsText()); + XContentParser parser = context.parser(); QueryBuilder queryBuilder = parseQueryBuilder( parser, parser.getTokenLocation() @@ -407,14 +409,8 @@ public void parse(ParseContext context) throws IOException { Version indexVersion = context.mapperService().getIndexSettings().getIndexVersionCreated(); createQueryBuilderField(indexVersion, queryBuilderField, queryBuilder, context); - QueryBuilder queryBuilderForProcessing = queryBuilder.rewrite(new QueryShardContext(queryShardContext) { - - @Override - public boolean convertNowRangeToMatchAll() { - return true; - } - }); - Query query = toQuery(queryShardContext, isMapUnmappedFieldAsText(), queryBuilderForProcessing); + QueryBuilder queryBuilderForProcessing = queryBuilder.rewrite(new QueryShardContext(queryShardContext)); + Query query = queryBuilderForProcessing.toQuery(queryShardContext); processQuery(query, context); } @@ -472,11 +468,7 @@ void processQuery(Query query, ParseContext context) { doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); } - static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException { - return toQuery(context, mapUnmappedFieldsAsString, parseQueryBuilder(parser, parser.getTokenLocation())); - } - - static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder queryBuilder) throws IOException { + static void configureContext(QueryShardContext context, boolean mapUnmappedFieldsAsString) { // This means that fields in the query need to exist in the mapping prior to registering this query // The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired. // @@ -491,7 +483,6 @@ static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsStrin // as an analyzed string. context.setAllowUnmappedFields(false); context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); - return queryBuilder.toQuery(context); } private static QueryBuilder parseQueryBuilder(XContentParser parser, XContentLocation location) { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 800706b74b9d3..db7020cf8176c 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -40,6 +40,7 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; +import org.elasticsearch.index.query.DateRangeIncludingNowQuery; import java.util.ArrayList; import java.util.Arrays; @@ -149,6 +150,10 @@ Result getResult() { @Override public QueryVisitor getSubVisitor(Occur occur, Query parent) { + if (parent instanceof DateRangeIncludingNowQuery) { + terms.add(Result.UNKNOWN); + return QueryVisitor.EMPTY_VISITOR; + } this.verified = isVerified(parent); if (occur == Occur.MUST || occur == Occur.FILTER) { ResultBuilder builder = new ResultBuilder(true); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 2f18512aadf94..d46ac0bb0a2b3 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -73,8 +73,8 @@ import org.apache.lucene.search.spans.SpanNotQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -836,7 +836,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception { Version v = Version.CURRENT; - try (RAMDirectory directory = new RAMDirectory()) { + try (Directory directory = new ByteBuffersDirectory()) { try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { List documents = new ArrayList<>(); Document document = new Document(); @@ -875,7 +875,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception { } // This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery: - try (RAMDirectory directory = new RAMDirectory()) { + try (Directory directory = new ByteBuffersDirectory()) { try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { Document document = new Document(); for (int i = 0; i < 1024; i++) { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 658981e6251e1..f41c4c400e2dd 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -490,6 +490,17 @@ public void testPercolatorFieldMapper() throws Exception { assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(1)); qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder); + + queryBuilder = rangeQuery("date_field").from("now"); + doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", BytesReference.bytes(XContentFactory + .jsonBuilder() + .startObject() + .field(fieldName, queryBuilder) + .endObject()), + XContentType.JSON)); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), + equalTo(EXTRACTION_FAILED)); } public void testStoringQueries() throws Exception { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index cbd319d342c94..f7701bfa96be4 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -48,6 +48,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; import static org.elasticsearch.index.query.QueryBuilders.geoPolygonQuery; @@ -930,4 +931,41 @@ public void testDisallowExpensiveQueries() throws IOException { assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); } } + + public void testWrappedWithConstantScore() throws Exception { + + assertAcked(client().admin().indices().prepareCreate("test") + .setMapping("d", "type=date", "q", "type=percolator") + ); + + client().prepareIndex("test").setId("1") + .setSource(jsonBuilder().startObject().field("q", + boolQuery().must(rangeQuery("d").gt("now")) + ).endObject()) + .execute().actionGet(); + + client().prepareIndex("test").setId("2") + .setSource(jsonBuilder().startObject().field("q", + boolQuery().must(rangeQuery("d").lt("now")) + ).endObject()) + .execute().actionGet(); + + client().admin().indices().prepareRefresh().get(); + + SearchResponse response = client().prepareSearch("test").setQuery(new PercolateQueryBuilder("q", + BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), + XContentType.JSON)).get(); + assertEquals(1, response.getHits().getTotalHits().value); + + response = client().prepareSearch("test").setQuery(new PercolateQueryBuilder("q", + BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), + XContentType.JSON)).addSort("_doc", SortOrder.ASC).get(); + assertEquals(1, response.getHits().getTotalHits().value); + + response = client().prepareSearch("test").setQuery(constantScoreQuery(new PercolateQueryBuilder("q", + BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), + XContentType.JSON))).get(); + assertEquals(1, response.getHits().getTotalHits().value); + + } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java index 88c2a098deb21..d9d920ae14d4d 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java @@ -38,10 +38,12 @@ import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; @@ -93,7 +95,14 @@ public void testStoringQueryBuilders() throws IOException { when(queryShardContext.getXContentRegistry()).thenReturn(xContentRegistry()); when(queryShardContext.getForField(fieldMapper.fieldType())) .thenReturn(new BytesBinaryDVIndexFieldData(new Index("index", "uuid"), fieldMapper.name())); - PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(fieldMapper.fieldType(), queryShardContext, false); + when(queryShardContext.fieldMapper(Mockito.anyString())).thenAnswer(invocation -> { + final String fieldName = (String) invocation.getArguments()[0]; + KeywordFieldMapper.KeywordFieldType ft = new KeywordFieldMapper.KeywordFieldType(); + ft.setName(fieldName); + ft.freeze(); + return ft; + }); + PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(fieldMapper.fieldType(), queryShardContext); try (IndexReader indexReader = DirectoryReader.open(directory)) { LeafReaderContext leafContext = indexReader.leaves().get(0); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java index 2fca3f53402e1..761c70e164a2d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java @@ -245,6 +245,10 @@ public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception { // so we should test both case of disk allocation decider is enabled and disabled boolean diskAllocationDeciderEnabled = randomBoolean(); try { + if (diskAllocationDeciderEnabled == false) { + // Disable the disk allocation decider to ensure the read_only_allow_delete block cannot be released + setDiskAllocationDeciderEnabled(false); + } // When a read_only_allow_delete block is set on the index, // it will trigger a retry policy in the delete by query request because the rest status of the block is 429 enableIndexBlock("test", SETTING_READ_ONLY_ALLOW_DELETE); @@ -259,8 +263,6 @@ public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception { assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(docs)); } else { - // Disable the disk allocation decider to ensure the read_only_allow_delete block cannot be released - setDiskAllocationDeciderEnabled(false); // The delete by query request will not be executed successfully because the block cannot be released assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true) .setMaxRetries(2).setRetryBackoffInitialTime(TimeValue.timeValueMillis(50)).get(), diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 2bf0eae138135..d19cf3ebd974b 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -248,8 +248,8 @@ public void testSkipUnavailableDependsOnSeeds() throws IOException { () -> client().performRequest(request)); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); assertThat(responseException.getMessage(), - containsString("missing required setting [cluster.remote.remote1.seeds] " + - "for setting [cluster.remote.remote1.skip_unavailable]")); + containsString("Cannot configure setting [cluster.remote.remote1.skip_unavailable] if remote cluster is " + + "not enabled.")); } Map settingsMap = new HashMap<>(); @@ -264,8 +264,8 @@ public void testSkipUnavailableDependsOnSeeds() throws IOException { ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request)); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); - assertThat(responseException.getMessage(), containsString("missing required setting [cluster.remote.remote1.seeds] " + - "for setting [cluster.remote.remote1.skip_unavailable]")); + assertThat(responseException.getMessage(), containsString("Cannot configure setting " + + "[cluster.remote.remote1.skip_unavailable] if remote cluster is not enabled.")); } if (randomBoolean()) { diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index 0f02cbd52d4e9..ef9d05bf65744 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -30,17 +30,10 @@ tasks.register("bwcTest") { group = 'verification' } -configurations { - restSpec -} - -dependencies { - restSpec project(':rest-api-spec') -} - -processTestResources { - from({ zipTree(configurations.restSpec.singleFile) }) - dependsOn configurations.restSpec +restResources { + restTests { + includeCore '*' + } } for (Version bwcVersion : bwcVersions.wireCompatible) { diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java index 76015e2899f1b..1c1a637ca3fce 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java @@ -30,8 +30,11 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.List; import java.util.stream.Stream; +import static java.nio.file.StandardOpenOption.APPEND; +import static java.nio.file.StandardOpenOption.CREATE; import static org.elasticsearch.packaging.util.Archives.installArchive; import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation; import static org.elasticsearch.packaging.util.FileExistenceMatchers.fileDoesNotExist; @@ -108,7 +111,7 @@ public void test50StartAndStop() throws Exception { try { startElasticsearch(); - } catch (Exception e ){ + } catch (Exception e) { if (Files.exists(installation.home.resolve("elasticsearch.pid"))) { String pid = FileUtils.slurp(installation.home.resolve("elasticsearch.pid")).trim(); logger.info("Dumping jstack of elasticsearch processb ({}) that failed to start", pid); @@ -138,8 +141,7 @@ public void test51JavaHomeOverride() throws Exception { stopElasticsearch(); String systemJavaHome1 = sh.getEnv().get("JAVA_HOME"); - assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), - containsString(systemJavaHome1)); + assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), containsString(systemJavaHome1)); } public void test52BundledJdkRemoved() throws Exception { @@ -162,8 +164,7 @@ public void test52BundledJdkRemoved() throws Exception { stopElasticsearch(); String systemJavaHome1 = sh.getEnv().get("JAVA_HOME"); - assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), - containsString(systemJavaHome1)); + assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), containsString(systemJavaHome1)); } finally { mv(relocatedJdk, installation.bundledJdk); } @@ -178,7 +179,7 @@ public void test53JavaHomeWithSpecialCharacters() throws Exception { sh.getEnv().put("JAVA_HOME", "C:\\Program Files (x86)\\java"); - //verify ES can start, stop and run plugin list + // verify ES can start, stop and run plugin list startElasticsearch(); stopElasticsearch(); @@ -188,7 +189,7 @@ public void test53JavaHomeWithSpecialCharacters() throws Exception { assertThat(result.exitCode, equalTo(0)); } finally { - //clean up sym link + // clean up sym link if (Files.exists(Paths.get(javaPath))) { sh.run("cmd /c rmdir '" + javaPath + "' "); } @@ -203,7 +204,7 @@ public void test53JavaHomeWithSpecialCharacters() throws Exception { sh.run("ln -s \"" + systemJavaHome + "\" \"" + testJavaHome + "\""); sh.getEnv().put("JAVA_HOME", testJavaHome); - //verify ES can start, stop and run plugin list + // verify ES can start, stop and run plugin list startElasticsearch(); stopElasticsearch(); @@ -229,11 +230,8 @@ public void test70CustomPathConfAndJvmOptions() throws Exception { // we have to disable Log4j from using JMX lest it will hit a security // manager exception before we have configured logging; this will fail // startup since we detect usages of logging before it is configured - final String jvmOptions = - "-Xms512m\n" + - "-Xmx512m\n" + - "-Dlog4j2.disable.jmx=true\n"; - append(tempConf.resolve("jvm.options"), jvmOptions); + final List jvmOptions = List.of("-Xms512m", "-Xmx512m", "-Dlog4j2.disable.jmx=true"); + Files.write(tempConf.resolve("jvm.options"), jvmOptions, CREATE, APPEND); sh.chown(tempConf); @@ -316,11 +314,8 @@ public void test80RelativePathConf() throws Exception { try { mkdir(tempConf); - Stream.of( - "elasticsearch.yml", - "log4j2.properties", - "jvm.options" - ).forEach(file -> cp(installation.config(file), tempConf.resolve(file))); + Stream.of("elasticsearch.yml", "log4j2.properties", "jvm.options") + .forEach(file -> cp(installation.config(file), tempConf.resolve(file))); append(tempConf.resolve("elasticsearch.yml"), "node.name: relative"); @@ -381,8 +376,7 @@ public void test92ElasticsearchNodeCliPackaging() throws Exception { Platforms.PlatformAction action = () -> { final Result result = sh.run(bin.nodeTool + " -h"); - assertThat(result.stdout, - containsString("A CLI tool to do unsafe cluster and index manipulations on current node")); + assertThat(result.stdout, containsString("A CLI tool to do unsafe cluster and index manipulations on current node")); }; // TODO: this should be checked on all distributions @@ -412,17 +406,13 @@ public void test94ElasticsearchNodeExecuteCliNotEsHomeWorkDir() throws Exception Platforms.PlatformAction action = () -> { Result result = sh.run(bin.certutilTool + " -h"); - assertThat(result.stdout, - containsString("Simplifies certificate creation for use with the Elastic Stack")); + assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack")); result = sh.run(bin.syskeygenTool + " -h"); - assertThat(result.stdout, - containsString("system key tool")); + assertThat(result.stdout, containsString("system key tool")); result = sh.run(bin.setupPasswordsTool + " -h"); - assertThat(result.stdout, - containsString("Sets the passwords for reserved users")); + assertThat(result.stdout, containsString("Sets the passwords for reserved users")); result = sh.run(bin.usersTool + " -h"); - assertThat(result.stdout, - containsString("Manages elasticsearch file users")); + assertThat(result.stdout, containsString("Manages elasticsearch file users")); }; // TODO: this should be checked on all distributions diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java index 1893e5f682f7d..46152f564a8e6 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java @@ -30,13 +30,15 @@ import java.nio.file.Files; import java.nio.file.Path; -import java.util.Arrays; +import java.util.ArrayList; +import java.util.List; import static com.carrotsearch.randomizedtesting.RandomizedTest.assumeFalse; +import static java.nio.file.StandardOpenOption.APPEND; +import static java.nio.file.StandardOpenOption.CREATE; import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File; import static org.elasticsearch.packaging.util.FileMatcher.file; import static org.elasticsearch.packaging.util.FileMatcher.p600; -import static org.elasticsearch.packaging.util.FileUtils.append; import static org.elasticsearch.packaging.util.FileUtils.escapePath; import static org.elasticsearch.packaging.util.FileUtils.getTempDir; import static org.hamcrest.CoreMatchers.containsString; @@ -61,17 +63,18 @@ public void test10Install() throws Exception { install(); } - public void test20Help() throws Exception { + public void test20Help() { Shell.Result result = installation.executables().certgenTool.run("--help"); assertThat(result.stdout, containsString("Simplifies certificate creation")); } public void test30Generate() throws Exception { - Files.write(instancesFile, Arrays.asList( - "instances:", - " - name: \"mynode\"", - " ip:", - " - \"127.0.0.1\"")); + final List lines = new ArrayList<>(); + lines.add("instances:"); + lines.add(" - name: \"mynode\""); + lines.add(" ip:"); + lines.add(" - \"127.0.0.1\""); + Files.write(instancesFile, lines, CREATE, APPEND); installation.executables().certgenTool.run("--in " + instancesFile + " --out " + certificatesFile); @@ -100,20 +103,27 @@ public void test31ExtractCerts() throws Exception { public void test40RunWithCert() throws Exception { // windows 2012 r2 has powershell 4.0, which lacks Expand-Archive assumeFalse(Platforms.OS_NAME.equals("Windows Server 2012 R2")); - - append(installation.config("elasticsearch.yml"), String.join("\n", + + final String keyPath = escapePath(installation.config("certs/mynode/mynode.key")); + final String certPath = escapePath(installation.config("certs/mynode/mynode.crt")); + final String caCertPath = escapePath(installation.config("certs/ca/ca.crt")); + + List yaml = List.of( "node.name: mynode", - "xpack.security.transport.ssl.key: " + escapePath(installation.config("certs/mynode/mynode.key")), - "xpack.security.transport.ssl.certificate: " + escapePath(installation.config("certs/mynode/mynode.crt")), - "xpack.security.transport.ssl.certificate_authorities: [\"" + escapePath(installation.config("certs/ca/ca.crt")) + "\"]", - "xpack.security.http.ssl.key: " + escapePath(installation.config("certs/mynode/mynode.key")), - "xpack.security.http.ssl.certificate: "+ escapePath(installation.config("certs/mynode/mynode.crt")), - "xpack.security.http.ssl.certificate_authorities: [\"" + escapePath(installation.config("certs/ca/ca.crt")) + "\"]", + "xpack.security.transport.ssl.key: " + keyPath, + "xpack.security.transport.ssl.certificate: " + certPath, + "xpack.security.transport.ssl.certificate_authorities: [\"" + caCertPath + "\"]", + "xpack.security.http.ssl.key: " + keyPath, + "xpack.security.http.ssl.certificate: " + certPath, + "xpack.security.http.ssl.certificate_authorities: [\"" + caCertPath + "\"]", "xpack.security.transport.ssl.enabled: true", - "xpack.security.http.ssl.enabled: true")); + "xpack.security.http.ssl.enabled: true" + ); + + Files.write(installation.config("elasticsearch.yml"), yaml, CREATE, APPEND); - assertWhileRunning(() -> { - ServerUtils.makeRequest(Request.Get("https://127.0.0.1:9200"), null, null, installation.config("certs/ca/ca.crt")); - }); + assertWhileRunning( + () -> ServerUtils.makeRequest(Request.Get("https://127.0.0.1:9200"), null, null, installation.config("certs/ca/ca.crt")) + ); } } diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebPreservationTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DebPreservationTests.java index 18777e1a4ee3b..2c0faa1dc33c9 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebPreservationTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DebPreservationTests.java @@ -79,10 +79,7 @@ public void test20Remove() throws Exception { // keystore was removed - assertPathsDoNotExist( - installation.config("elasticsearch.keystore"), - installation.config(".elasticsearch.keystore.initial_md5sum") - ); + assertPathsDoNotExist(installation.config("elasticsearch.keystore"), installation.config(".elasticsearch.keystore.initial_md5sum")); // doc files were removed @@ -105,11 +102,7 @@ public void test30Purge() throws Exception { assertRemoved(distribution()); - assertPathsDoNotExist( - installation.config, - installation.envFile, - SYSVINIT_SCRIPT - ); + assertPathsDoNotExist(installation.config, installation.envFile, SYSVINIT_SCRIPT); assertThat(packageStatus(distribution()).exitCode, is(1)); } diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index fd2e06f9ffcf9..46271e4d63b24 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -30,11 +30,10 @@ import org.junit.Ignore; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.StandardOpenOption; import java.util.Arrays; +import java.util.List; import java.util.Map; import static org.elasticsearch.packaging.util.Archives.ARCHIVE_OWNER; @@ -80,8 +79,7 @@ public void test10InstallArchiveDistribution() throws Exception { final Installation.Executables bin = installation.executables(); Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd"); assertFalse("has-passwd should fail", r.isSuccess()); - assertThat("has-passwd should indicate missing keystore", - r.stderr, containsString(ERROR_KEYSTORE_NOT_FOUND)); + assertThat("has-passwd should indicate missing keystore", r.stderr, containsString(ERROR_KEYSTORE_NOT_FOUND)); } /** Test initial package state */ @@ -96,8 +94,7 @@ public void test11InstallPackageDistribution() throws Exception { final Installation.Executables bin = installation.executables(); Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd"); assertFalse("has-passwd should fail", r.isSuccess()); - assertThat("has-passwd should indicate unprotected keystore", - r.stderr, containsString(ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED)); + assertThat("has-passwd should indicate unprotected keystore", r.stderr, containsString(ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED)); Shell.Result r2 = bin.keystoreTool.run("list"); assertThat(r2.stdout, containsString("keystore.seed")); } @@ -117,8 +114,7 @@ public void test12InstallDockerDistribution() throws Exception { final Installation.Executables bin = installation.executables(); Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool.toString() + " has-passwd"); assertFalse("has-passwd should fail", r.isSuccess()); - assertThat("has-passwd should indicate unprotected keystore", - r.stdout, containsString(ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED)); + assertThat("has-passwd should indicate unprotected keystore", r.stdout, containsString(ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED)); Shell.Result r2 = bin.keystoreTool.run("list"); assertThat(r2.stdout, containsString("keystore.seed")); } @@ -151,8 +147,7 @@ public void test30AutoCreateKeystore() throws Exception { } public void test40KeystorePasswordOnStandardInput() throws Exception { - assumeTrue("packages will use systemd, which doesn't handle stdin", - distribution.isArchive()); + assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive()); assumeThat(installation, is(notNullValue())); String password = "^|<>\\&exit"; // code insertion on Windows if special characters are not escaped @@ -169,8 +164,7 @@ public void test40KeystorePasswordOnStandardInput() throws Exception { } public void test41WrongKeystorePasswordOnStandardInput() { - assumeTrue("packages will use systemd, which doesn't handle stdin", - distribution.isArchive()); + assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive()); assumeThat(installation, is(notNullValue())); assertPasswordProtectedKeystore(); @@ -181,10 +175,8 @@ public void test41WrongKeystorePasswordOnStandardInput() { @Ignore /* Ignored for feature branch, awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */ public void test42KeystorePasswordOnTty() throws Exception { - assumeTrue("expect command isn't on Windows", - distribution.platform != Distribution.Platform.WINDOWS); - assumeTrue("packages will use systemd, which doesn't handle stdin", - distribution.isArchive()); + assumeTrue("expect command isn't on Windows", distribution.platform != Distribution.Platform.WINDOWS); + assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive()); assumeThat(installation, is(notNullValue())); String password = "keystorepass"; @@ -202,10 +194,8 @@ public void test42KeystorePasswordOnTty() throws Exception { @Ignore /* Ignored for feature branch, awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */ public void test43WrongKeystorePasswordOnTty() throws Exception { - assumeTrue("expect command isn't on Windows", - distribution.platform != Distribution.Platform.WINDOWS); - assumeTrue("packages will use systemd, which doesn't handle stdin", - distribution.isArchive()); + assumeTrue("expect command isn't on Windows", distribution.platform != Distribution.Platform.WINDOWS); + assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive()); assumeThat(installation, is(notNullValue())); assertPasswordProtectedKeystore(); @@ -220,8 +210,7 @@ public void test43WrongKeystorePasswordOnTty() throws Exception { * view help information. */ public void test44EncryptedKeystoreAllowsHelpMessage() throws Exception { - assumeTrue("users call elasticsearch directly in archive case", - distribution.isArchive()); + assumeTrue("users call elasticsearch directly in archive case", distribution.isArchive()); String password = "keystorepass"; @@ -249,9 +238,7 @@ public void test50KeystorePasswordFromFile() throws Exception { sh.run("sudo systemctl set-environment ES_KEYSTORE_PASSPHRASE_FILE=" + esKeystorePassphraseFile); Files.createFile(esKeystorePassphraseFile); - Files.write(esKeystorePassphraseFile, - (password + System.lineSeparator()).getBytes(StandardCharsets.UTF_8), - StandardOpenOption.WRITE); + Files.write(esKeystorePassphraseFile, List.of(password)); startElasticsearch(); ServerUtils.runElasticsearchTests(); @@ -275,9 +262,7 @@ public void test51WrongKeystorePasswordFromFile() throws Exception { } Files.createFile(esKeystorePassphraseFile); - Files.write(esKeystorePassphraseFile, - ("wrongpassword" + System.lineSeparator()).getBytes(StandardCharsets.UTF_8), - StandardOpenOption.WRITE); + Files.write(esKeystorePassphraseFile, List.of("wrongpassword")); Packages.JournaldWrapper journaldWrapper = new Packages.JournaldWrapper(sh); Shell.Result result = runElasticsearchStartCommand(); @@ -334,8 +319,7 @@ public void test61DockerEnvironmentVariablePasswordFromFile() throws Exception { waitForElasticsearch(installation); ServerUtils.runElasticsearchTests(); - } - finally { + } finally { if (tempDir != null) { rm(tempDir); } @@ -376,9 +360,13 @@ private Path getKeystoreFileFromDockerContainer(String password, Path dockerKeys // It's very tricky to properly quote a pipeline that you're passing to // a docker exec command, so we're just going to put a small script in the // temp folder. - String setPasswordScript = "echo \"" + password + "\n" + password - + "\n\" | " + installation.executables().keystoreTool.toString() + " passwd"; - Files.writeString(tempDirectory.resolve("set-pass.sh"), setPasswordScript); + List setPasswordScript = List.of( + "echo \"" + password, + password, + "\" | " + installation.executables().keystoreTool.toString() + " passwd" + ); + + Files.write(tempDirectory.resolve("set-pass.sh"), setPasswordScript); runContainer(distribution(), volumes, null); try { @@ -409,9 +397,7 @@ private void createKeystore() throws Exception { // the keystore ends up being owned by the Administrators group, so we manually set it to be owned by the vagrant user here. // from the server's perspective the permissions aren't really different, this is just to reflect what we'd expect in the tests. // when we run these commands as a role user we won't have to do this - Platforms.onWindows(() -> { - sh.chown(keystore); - }); + Platforms.onWindows(() -> sh.chown(keystore)); if (distribution().isDocker()) { try { @@ -444,14 +430,11 @@ private void setKeystorePassword(String password) throws Exception { final Installation.Executables bin = installation.executables(); // set the password by passing it to stdin twice - Platforms.onLinux(() -> { - bin.keystoreTool.run("passwd", password + "\n" + password + "\n"); - }); - - Platforms.onWindows(() -> { - sh.run("Invoke-Command -ScriptBlock {echo \'" + password + "\'; echo \'" + password + "\'} | " - + bin.keystoreTool + " passwd"); - }); + Platforms.onLinux(() -> bin.keystoreTool.run("passwd", password + "\n" + password + "\n")); + + Platforms.onWindows( + () -> sh.run("Invoke-Command -ScriptBlock {echo '" + password + "'; echo '" + password + "'} | " + bin.keystoreTool + " passwd") + ); } private void assertPasswordProtectedKeystore() { diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java index f4026c659275d..a32cf33e9f4d7 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java @@ -26,15 +26,15 @@ import org.elasticsearch.packaging.util.Shell.Result; import org.junit.BeforeClass; -import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.nio.file.StandardOpenOption; +import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; +import static java.nio.file.StandardOpenOption.APPEND; import static org.elasticsearch.packaging.util.FileExistenceMatchers.fileDoesNotExist; import static org.elasticsearch.packaging.util.FileExistenceMatchers.fileExists; import static org.elasticsearch.packaging.util.FileUtils.append; @@ -98,8 +98,7 @@ public void test31InstallDoesNotStartServer() { private void assertRunsWithJavaHome() throws Exception { byte[] originalEnvFile = Files.readAllBytes(installation.envFile); try { - Files.write(installation.envFile, ("JAVA_HOME=" + systemJavaHome + "\n").getBytes(StandardCharsets.UTF_8), - StandardOpenOption.APPEND); + Files.write(installation.envFile, List.of("JAVA_HOME=" + systemJavaHome), APPEND); startElasticsearch(); runElasticsearchTests(); stopElasticsearch(); @@ -107,8 +106,7 @@ private void assertRunsWithJavaHome() throws Exception { Files.write(installation.envFile, originalEnvFile); } - assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "elasticsearch*.log.gz"), - containsString(systemJavaHome)); + assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "elasticsearch*.log.gz"), containsString(systemJavaHome)); } public void test32JavaHomeOverride() throws Exception { @@ -170,8 +168,9 @@ public void test40StartServer() throws Exception { String start = sh.runIgnoreExitCode("date ").stdout.trim(); startElasticsearch(); - String journalEntries = sh.runIgnoreExitCode("journalctl _SYSTEMD_UNIT=elasticsearch.service " + - "--since \"" + start + "\" --output cat | wc -l").stdout.trim(); + String journalEntries = sh.runIgnoreExitCode( + "journalctl _SYSTEMD_UNIT=elasticsearch.service " + "--since \"" + start + "\" --output cat | wc -l" + ).stdout.trim(); assertThat(journalEntries, equalTo("0")); assertPathsExist(installation.pidDir.resolve("elasticsearch.pid")); @@ -211,9 +210,7 @@ public void test50Remove() throws Exception { matcher.find(); final int version = Integer.parseInt(matcher.group(1)); - statusExitCode = version < 231 - ? 3 - : 4; + statusExitCode = version < 231 ? 3 : 4; } assertThat(sh.runIgnoreExitCode("systemctl status elasticsearch.service").exitCode, is(statusExitCode)); @@ -256,7 +253,6 @@ public void test70RestartServer() throws Exception { } } - public void test72TestRuntimeDirectory() throws Exception { try { install(); @@ -279,7 +275,6 @@ public void test73gcLogsExist() throws Exception { // TEST CASES FOR SYSTEMD ONLY - /** * # Simulates the behavior of a system restart: * # the PID directory is deleted by the operating system diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index 7b674b4554a1d..a10de15fa58b0 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -67,14 +67,12 @@ * Class that all packaging test cases should inherit from */ @RunWith(RandomizedRunner.class) -@TestMethodProviders({ - JUnit3MethodProvider.class -}) +@TestMethodProviders({ JUnit3MethodProvider.class }) @Timeout(millis = 20 * 60 * 1000) // 20 min @TestCaseOrdering(TestCaseOrdering.AlphabeticOrder.class) public abstract class PackagingTestCase extends Assert { - protected final Logger logger = LogManager.getLogger(getClass()); + protected final Logger logger = LogManager.getLogger(getClass()); // the distribution being tested protected static final Distribution distribution; @@ -142,19 +140,14 @@ public static void cleanupDocker() { } } - @Before public void setup() throws Exception { assumeFalse(failed); // skip rest of tests once one fails sh.reset(); if (distribution().hasJdk == false) { - Platforms.onLinux(() -> { - sh.getEnv().put("JAVA_HOME", systemJavaHome); - }); - Platforms.onWindows(() -> { - sh.getEnv().put("JAVA_HOME", systemJavaHome); - }); + Platforms.onLinux(() -> sh.getEnv().put("JAVA_HOME", systemJavaHome)); + Platforms.onWindows(() -> sh.getEnv().put("JAVA_HOME", systemJavaHome)); } } @@ -208,15 +201,14 @@ protected static void install() throws Exception { protected void assertWhileRunning(Platforms.PlatformAction assertions) throws Exception { try { awaitElasticsearchStartup(runElasticsearchStartCommand()); - } catch (Exception e ){ + } catch (Exception e) { if (Files.exists(installation.home.resolve("elasticsearch.pid"))) { String pid = FileUtils.slurp(installation.home.resolve("elasticsearch.pid")).trim(); logger.info("Dumping jstack of elasticsearch processb ({}) that failed to start", pid); sh.runIgnoreExitCode("jstack " + pid); } if (Files.exists(installation.logs.resolve("elasticsearch.log"))) { - logger.warn("Elasticsearch log:\n" + - FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz")); + logger.warn("Elasticsearch log:\n" + FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz")); } if (Files.exists(installation.logs.resolve("output.out"))) { logger.warn("Stdout:\n" + FileUtils.slurpTxtorGz(installation.logs.resolve("output.out"))); @@ -230,8 +222,7 @@ protected void assertWhileRunning(Platforms.PlatformAction assertions) throws Ex try { assertions.run(); } catch (Exception e) { - logger.warn("Elasticsearch log:\n" + - FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz")); + logger.warn("Elasticsearch log:\n" + FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz")); throw e; } stopElasticsearch(); @@ -345,9 +336,11 @@ public void assertElasticsearchFailure(Shell.Result result, List expecte // in the background String wrapperPid = result.stdout.trim(); sh.runIgnoreExitCode("Wait-Process -Timeout " + Archives.ES_STARTUP_SLEEP_TIME_SECONDS + " -Id " + wrapperPid); - sh.runIgnoreExitCode("Get-EventSubscriber | " + - "where {($_.EventName -eq 'OutputDataReceived' -Or $_.EventName -eq 'ErrorDataReceived' |" + - "Unregister-EventSubscriber -Force"); + sh.runIgnoreExitCode( + "Get-EventSubscriber | " + + "where {($_.EventName -eq 'OutputDataReceived' -Or $_.EventName -eq 'ErrorDataReceived' |" + + "Unregister-EventSubscriber -Force" + ); assertThat(FileUtils.slurp(Archives.getPowershellErrorPath(installation)), anyOf(stringMatchers)); } else { diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java index edec71a12a063..6d8903b2f76e4 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java @@ -28,13 +28,14 @@ import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardOpenOption; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; -import static org.elasticsearch.packaging.util.FileUtils.append; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.collection.IsMapContaining.hasKey; import static org.junit.Assume.assumeTrue; @@ -52,9 +53,11 @@ public void filterDistros() { public void test010Install() throws Exception { install(); - append(installation.config("elasticsearch.yml"), - "xpack.license.self_generated.type: trial\n" + - "xpack.security.enabled: true"); + Files.write( + installation.config("elasticsearch.yml"), + List.of("xpack.license.self_generated.type: trial", "xpack.security.enabled: true"), + StandardOpenOption.APPEND + ); } public void test20GeneratePasswords() throws Exception { @@ -63,7 +66,11 @@ public void test20GeneratePasswords() throws Exception { Map userpasses = parseUsersAndPasswords(result.stdout); for (Map.Entry userpass : userpasses.entrySet()) { String response = ServerUtils.makeRequest( - Request.Get("http://localhost:9200"), userpass.getKey(), userpass.getValue(), null); + Request.Get("http://localhost:9200"), + userpass.getKey(), + userpass.getValue(), + null + ); assertThat(response, containsString("You Know, for Search")); } }); @@ -112,7 +119,10 @@ public void test30AddBootstrapPassword() throws Exception { assertWhileRunning(() -> { String response = ServerUtils.makeRequest( Request.Get("http://localhost:9200/_cluster/health?wait_for_status=green&timeout=180s"), - "elastic", BOOTSTRAP_PASSWORD, null); + "elastic", + BOOTSTRAP_PASSWORD, + null + ); assertThat(response, containsString("\"status\":\"green\"")); }); } @@ -125,7 +135,11 @@ public void test40GeneratePasswordsBootstrapAlreadySet() throws Exception { assertThat(userpasses, hasKey("elastic")); for (Map.Entry userpass : userpasses.entrySet()) { String response = ServerUtils.makeRequest( - Request.Get("http://localhost:9200"), userpass.getKey(), userpass.getValue(), null); + Request.Get("http://localhost:9200"), + userpass.getKey(), + userpass.getValue(), + null + ); assertThat(response, containsString("You Know, for Search")); } }); diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/RpmPreservationTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/RpmPreservationTests.java index 5da7ec4366e7b..5a99f33277a24 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/RpmPreservationTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/RpmPreservationTests.java @@ -78,21 +78,12 @@ public void test30PreserveConfig() throws Exception { verifyPackageInstallation(installation, distribution(), sh); sh.run("echo foobar | " + installation.executables().keystoreTool + " add --stdin foo.bar"); - Stream.of( - "elasticsearch.yml", - "jvm.options", - "log4j2.properties" - ) + Stream.of("elasticsearch.yml", "jvm.options", "log4j2.properties") .map(each -> installation.config(each)) .forEach(path -> append(path, "# foo")); append(installation.config(Paths.get("jvm.options.d", "heap.options")), "# foo"); if (distribution().isDefault()) { - Stream.of( - "role_mapping.yml", - "roles.yml", - "users", - "users_roles" - ) + Stream.of("role_mapping.yml", "roles.yml", "users", "users_roles") .map(each -> installation.config(each)) .forEach(path -> append(path, "# foo")); } @@ -119,27 +110,18 @@ public void test30PreserveConfig() throws Exception { assertThat(installation.config, fileExists()); assertThat(installation.config("elasticsearch.keystore"), fileExists()); - Stream.of( - "elasticsearch.yml", - "jvm.options", - "log4j2.properties" - ).forEach(this::assertConfFilePreserved); + Stream.of("elasticsearch.yml", "jvm.options", "log4j2.properties").forEach(this::assertConfFilePreserved); assertThat(installation.config(Paths.get("jvm.options.d", "heap.options")), fileExists()); if (distribution().isDefault()) { - Stream.of( - "role_mapping.yml", - "roles.yml", - "users", - "users_roles" - ).forEach(this::assertConfFilePreserved); + Stream.of("role_mapping.yml", "roles.yml", "users", "users_roles").forEach(this::assertConfFilePreserved); } } private void assertConfFilePreserved(String configFile) { final Path original = installation.config(configFile); final Path saved = installation.config(configFile + ".rpmsave"); - assertConfFilePreserved(original ,saved); + assertConfFilePreserved(original, saved); } private void assertConfFilePreserved(final Path original, final Path saved) { diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java index d261a2627a622..8264a4825f954 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java @@ -82,12 +82,16 @@ private void assertExit(Result result, String script, int exitCode) { logger.error("---- Unexpected exit code (expected " + exitCode + ", got " + result.exitCode + ") for script: " + script); logger.error(result); logger.error("Dumping log files\n"); - Result logs = sh.run("$files = Get-ChildItem \"" + installation.logs + "\\elasticsearch.log\"; " + - "Write-Output $files; " + - "foreach ($file in $files) {" + - "Write-Output \"$file\"; " + - "Get-Content \"$file\" " + - "}"); + Result logs = sh.run( + "$files = Get-ChildItem \"" + + installation.logs + + "\\elasticsearch.log\"; " + + "Write-Output $files; " + + "foreach ($file in $files) {" + + " Write-Output \"$file\"; " + + " Get-Content \"$file\" " + + "}" + ); logger.error(logs.stdout); fail(); } else { @@ -105,7 +109,7 @@ public void test11InstallServiceExeMissing() throws IOException { Path serviceExe = installation.bin("elasticsearch-service-x64.exe"); Path tmpServiceExe = serviceExe.getParent().resolve(serviceExe.getFileName() + ".tmp"); Files.move(serviceExe, tmpServiceExe); - Result result = sh.runIgnoreExitCode(serviceScript + " install"); + Result result = sh.runIgnoreExitCode(serviceScript + " install"); assertThat(result.exitCode, equalTo(1)); assertThat(result.stdout, containsString("elasticsearch-service-x64.exe was not found...")); Files.move(tmpServiceExe, serviceExe); @@ -167,28 +171,32 @@ public void assertStartedAndStop() throws Exception { assertCommand(serviceScript + " stop"); assertService(DEFAULT_ID, "Stopped", DEFAULT_DISPLAY_NAME); // the process is stopped async, and can become a zombie process, so we poll for the process actually being gone - assertCommand("$p = Get-Service -Name \"elasticsearch-service-x64\" -ErrorAction SilentlyContinue;" + - "$i = 0;" + - "do {" + - "$p = Get-Process -Name \"elasticsearch-service-x64\" -ErrorAction SilentlyContinue;" + - "echo \"$p\";" + - "if ($p -eq $Null) {" + - " Write-Host \"exited after $i seconds\";" + - " exit 0;" + - "}" + - "Start-Sleep -Seconds 1;" + - "$i += 1;" + - "} while ($i -lt 300);" + - "exit 9;"); + assertCommand( + "$p = Get-Service -Name \"elasticsearch-service-x64\" -ErrorAction SilentlyContinue;" + + "$i = 0;" + + "do {" + + " $p = Get-Process -Name \"elasticsearch-service-x64\" -ErrorAction SilentlyContinue;" + + " echo \"$p\";" + + " if ($p -eq $Null) {" + + " Write-Host \"exited after $i seconds\";" + + " exit 0;" + + " }" + + " Start-Sleep -Seconds 1;" + + " $i += 1;" + + "} while ($i -lt 300);" + + "exit 9;" + ); assertCommand(serviceScript + " remove"); - assertCommand("$p = Get-Service -Name \"elasticsearch-service-x64\" -ErrorAction SilentlyContinue;" + - "echo \"$p\";" + - "if ($p -eq $Null) {" + - " exit 0;" + - "} else {" + - " exit 1;" + - "}"); + assertCommand( + "$p = Get-Service -Name \"elasticsearch-service-x64\" -ErrorAction SilentlyContinue;" + + "echo \"$p\";" + + "if ($p -eq $Null) {" + + " exit 0;" + + "} else {" + + " exit 1;" + + "}" + ); } public void test30StartStop() throws Exception { @@ -231,12 +239,12 @@ public void test60Manager() throws IOException { Path fakeServiceMgr = serviceMgr.getParent().resolve("elasticsearch-service-mgr.bat"); Files.write(fakeServiceMgr, Arrays.asList("echo \"Fake Service Manager GUI\"")); Shell sh = new Shell(); - Result result = sh.run(serviceScript + " manager"); + Result result = sh.run(serviceScript + " manager"); assertThat(result.stdout, containsString("Fake Service Manager GUI")); // check failure too Files.write(fakeServiceMgr, Arrays.asList("echo \"Fake Service Manager GUI Failure\"", "exit 1")); - result = sh.runIgnoreExitCode(serviceScript + " manager"); + result = sh.runIgnoreExitCode(serviceScript + " manager"); TestCase.assertEquals(1, result.exitCode); TestCase.assertTrue(result.stdout, result.stdout.contains("Fake Service Manager GUI Failure")); Files.move(tmpServiceMgr, serviceMgr); diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Archives.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/Archives.java index 96e7ddbd9d03e..b34f9c786eddb 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/Archives.java @@ -26,6 +26,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; +import java.util.Locale; import java.util.stream.Stream; import static java.util.stream.Collectors.joining; @@ -56,12 +57,10 @@ */ public class Archives { - protected static final Logger logger = LogManager.getLogger(Archives.class); + protected static final Logger logger = LogManager.getLogger(Archives.class); // in the future we'll run as a role user on Windows - public static final String ARCHIVE_OWNER = Platforms.WINDOWS - ? System.getenv("username") - : "elasticsearch"; + public static final String ARCHIVE_OWNER = Platforms.WINDOWS ? System.getenv("username") : "elasticsearch"; /** This is an arbitrarily chosen value that gives Elasticsearch time to log Bootstrap * errors to the console if they occur before the logging framework is initialized. */ @@ -91,9 +90,12 @@ public static Installation installArchive(Shell sh, Distribution distribution, P if (Platforms.WINDOWS == false) { throw new IllegalStateException("Distribution " + distribution + " is not supported on linux"); } - installCommand = - "Add-Type -AssemblyName 'System.IO.Compression.Filesystem'; " + - "[IO.Compression.ZipFile]::ExtractToDirectory('" + distributionFile + "', '" + baseInstallPath + "')"; + installCommand = String.format( + Locale.ROOT, + "Add-Type -AssemblyName 'System.IO.Compression.Filesystem'; [IO.Compression.ZipFile]::ExtractToDirectory('%s', '%s')", + distributionFile, + baseInstallPath + ); } else { throw new RuntimeException("Distribution " + distribution + " is not a known archive type"); @@ -129,22 +131,26 @@ private static void setupArchiveUsersLinux(Path installPath) { if (sh.runIgnoreExitCode("id elasticsearch").isSuccess() == false) { if (isDPKG()) { - sh.run("adduser " + - "--quiet " + - "--system " + - "--no-create-home " + - "--ingroup elasticsearch " + - "--disabled-password " + - "--shell /bin/false " + - "elasticsearch"); + sh.run( + "adduser " + + "--quiet " + + "--system " + + "--no-create-home " + + "--ingroup elasticsearch " + + "--disabled-password " + + "--shell /bin/false " + + "elasticsearch" + ); } else { - sh.run("useradd " + - "--system " + - "-M " + - "--gid elasticsearch " + - "--shell /sbin/nologin " + - "--comment 'elasticsearch user' " + - "elasticsearch"); + sh.run( + "useradd " + + "--system " + + "-M " + + "--gid elasticsearch " + + "--shell /sbin/nologin " + + "--comment 'elasticsearch user' " + + "elasticsearch" + ); } } } @@ -157,13 +163,7 @@ public static void verifyArchiveInstallation(Installation installation, Distribu } private static void verifyOssInstallation(Installation es, Distribution distribution, String owner) { - Stream.of( - es.home, - es.config, - es.plugins, - es.modules, - es.logs - ).forEach(dir -> assertThat(dir, file(Directory, owner, owner, p755))); + Stream.of(es.home, es.config, es.plugins, es.modules, es.logs).forEach(dir -> assertThat(dir, file(Directory, owner, owner, p755))); assertThat(Files.exists(es.data), is(false)); @@ -188,24 +188,15 @@ private static void verifyOssInstallation(Installation es, Distribution distribu }); if (distribution.packaging == Distribution.Packaging.ZIP) { - Stream.of( - "elasticsearch-service.bat", - "elasticsearch-service-mgr.exe", - "elasticsearch-service-x64.exe" - ).forEach(executable -> assertThat(es.bin(executable), file(File, owner))); + Stream.of("elasticsearch-service.bat", "elasticsearch-service-mgr.exe", "elasticsearch-service-x64.exe") + .forEach(executable -> assertThat(es.bin(executable), file(File, owner))); } - Stream.of( - "elasticsearch.yml", - "jvm.options", - "log4j2.properties" - ).forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); + Stream.of("elasticsearch.yml", "jvm.options", "log4j2.properties") + .forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); - Stream.of( - "NOTICE.txt", - "LICENSE.txt", - "README.asciidoc" - ).forEach(doc -> assertThat(es.home.resolve(doc), file(File, owner, owner, p644))); + Stream.of("NOTICE.txt", "LICENSE.txt", "README.asciidoc") + .forEach(doc -> assertThat(es.home.resolve(doc), file(File, owner, owner, p644))); } private static void verifyDefaultInstallation(Installation es, Distribution distribution, String owner) { @@ -235,13 +226,8 @@ private static void verifyDefaultInstallation(Installation es, Distribution dist // the version through here assertThat(es.bin("elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, owner, owner, p755)); - Stream.of( - "users", - "users_roles", - "roles.yml", - "role_mapping.yml", - "log4j2.properties" - ).forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); + Stream.of("users", "users_roles", "roles.yml", "role_mapping.yml", "log4j2.properties") + .forEach(configFile -> assertThat(es.config(configFile), file(File, owner, owner, p660))); } public static Shell.Result startElasticsearch(Installation installation, Shell sh) { @@ -253,13 +239,20 @@ public static Shell.Result startElasticsearchWithTty(Installation installation, final Installation.Executables bin = installation.executables(); // requires the "expect" utility to be installed - String script = "expect -c \"$(cat< "$processInfo.Environment.Add('" + entry.getKey() + "', '" + entry.getValue() + "'); ") - .collect(joining()) + - "$processInfo.UseShellExecute = $false; " + - "$process = New-Object System.Diagnostics.Process; " + - "$process.StartInfo = $processInfo; " + + .collect(joining()) + + "$processInfo.UseShellExecute = $false; " + + "$process = New-Object System.Diagnostics.Process; " + + "$process.StartInfo = $processInfo; " + + // set up some asynchronous output handlers - "$outScript = { $EventArgs.Data | Out-File -Encoding UTF8 -Append '" + stdout + "' }; " + - "$errScript = { $EventArgs.Data | Out-File -Encoding UTF8 -Append '" + stderr + "' }; " + - "$stdOutEvent = Register-ObjectEvent -InputObject $process " + - "-Action $outScript -EventName 'OutputDataReceived'; " + - "$stdErrEvent = Register-ObjectEvent -InputObject $process " + - "-Action $errScript -EventName 'ErrorDataReceived'; " + - - "$process.Start() | Out-Null; " + - "$process.BeginOutputReadLine(); " + - "$process.BeginErrorReadLine(); " + - "$process.StandardInput.WriteLine('" + keystorePassword + "'); " + - "Wait-Process -Timeout " + ES_STARTUP_SLEEP_TIME_SECONDS + " -Id $process.Id; " + - "$process.Id;" - ); + "$outScript = { $EventArgs.Data | Out-File -Encoding UTF8 -Append '" + + stdout + + "' }; " + + "$errScript = { $EventArgs.Data | Out-File -Encoding UTF8 -Append '" + + stderr + + "' }; " + + "$stdOutEvent = Register-ObjectEvent -InputObject $process " + + "-Action $outScript -EventName 'OutputDataReceived'; " + + "$stdErrEvent = Register-ObjectEvent -InputObject $process " + + "-Action $errScript -EventName 'ErrorDataReceived'; " + + + + "$process.Start() | Out-Null; " + + "$process.BeginOutputReadLine(); " + + "$process.BeginErrorReadLine(); " + + "$process.StandardInput.WriteLine('" + + keystorePassword + + "'); " + + "Wait-Process -Timeout " + + ES_STARTUP_SLEEP_TIME_SECONDS + + " -Id $process.Id; " + + "$process.Id;" + ); } public static void assertElasticsearchStarted(Installation installation) throws Exception { @@ -355,9 +363,11 @@ public static void stopElasticsearch(Installation installation) throws Exception sh.run("Get-Process -Id " + pid + " | Stop-Process -Force; Wait-Process -Id " + pid); // Clear the asynchronous event handlers - sh.runIgnoreExitCode("Get-EventSubscriber | " + - "where {($_.EventName -eq 'OutputDataReceived' -Or $_.EventName -eq 'ErrorDataReceived' |" + - "Unregister-EventSubscriber -Force"); + sh.runIgnoreExitCode( + "Get-EventSubscriber | " + + "where {($_.EventName -eq 'OutputDataReceived' -Or $_.EventName -eq 'ErrorDataReceived' |" + + "Unregister-EventSubscriber -Force" + ); }); if (Files.exists(pidFile)) { Files.delete(pidFile); diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Cleanup.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/Cleanup.java index b2483380f832a..bdc04b94d51d7 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/Cleanup.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/Cleanup.java @@ -62,14 +62,16 @@ public static void cleanEverything() throws Exception { sh.runIgnoreExitCode("ps aux | grep -i 'org.elasticsearch.bootstrap.Elasticsearch' | awk {'print $2'} | xargs kill -9"); }); - Platforms.onWindows(() -> { - // the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here - sh.runIgnoreExitCode( - "Get-WmiObject Win32_Process | " + - "Where-Object { $_.CommandLine -Match 'org.elasticsearch.bootstrap.Elasticsearch' } | " + - "ForEach-Object { $_.Terminate() }" - ); - }); + Platforms.onWindows( + () -> { + // the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here + sh.runIgnoreExitCode( + "Get-WmiObject Win32_Process | " + + "Where-Object { $_.CommandLine -Match 'org.elasticsearch.bootstrap.Elasticsearch' } | " + + "ForEach-Object { $_.Terminate() }" + ); + } + ); Platforms.onLinux(Cleanup::purgePackagesLinux); @@ -85,10 +87,7 @@ public static void cleanEverything() throws Exception { final List filesToDelete = Platforms.WINDOWS ? ELASTICSEARCH_FILES_WINDOWS : ELASTICSEARCH_FILES_LINUX; // windows needs leniency due to asinine releasing of file locking async from a process exiting Consumer rm = Platforms.WINDOWS ? FileUtils::rmWithRetries : FileUtils::rm; - filesToDelete.stream() - .map(Paths::get) - .filter(Files::exists) - .forEach(rm); + filesToDelete.stream().map(Paths::get).filter(Files::exists).forEach(rm); // disable elasticsearch service // todo add this for windows when adding tests for service intallation diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java index 90edb42829db1..4252739382a29 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java @@ -43,7 +43,10 @@ */ public class FileMatcher extends TypeSafeMatcher { - public enum Fileness { File, Directory } + public enum Fileness { + File, + Directory + } public static final Set p775 = fromString("rwxrwxr-x"); public static final Set p770 = fromString("rwxrwx---"); @@ -126,10 +129,14 @@ public void describeMismatchSafely(Path path, Description description) { @Override public void describeTo(Description description) { - description.appendValue("file/directory: ").appendValue(fileness) - .appendText(" with owner ").appendValue(owner) - .appendText(" with group ").appendValue(group) - .appendText(" with posix permissions ").appendValueList("[", ",", "]", posixPermissions); + description.appendValue("file/directory: ") + .appendValue(fileness) + .appendText(" with owner ") + .appendValue(owner) + .appendText(" with group ") + .appendValue(group) + .appendText(" with posix permissions ") + .appendValueList("[", ",", "]", posixPermissions); } public static FileMatcher file(Fileness fileness, String owner) { diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/FileUtils.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/FileUtils.java index 5aa6b9ff60c56..3462fa51d10a6 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/FileUtils.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/FileUtils.java @@ -34,6 +34,7 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.LinkOption; +import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; @@ -111,22 +112,21 @@ public static void rmWithRetries(Path... paths) { public static Path mktempDir(Path path) { try { - return Files.createTempDirectory(path,"tmp"); + return Files.createTempDirectory(path, "tmp"); } catch (IOException e) { throw new RuntimeException(e); } } - public static Path mkdir(Path path) { try { return Files.createDirectories(path); - } catch (IOException e) { + } catch (IOException e) { throw new RuntimeException(e); - } - } + } + } - public static Path cp(Path source, Path target) { + public static Path cp(Path source, Path target) { try { return Files.copy(source, target); } catch (IOException e) { @@ -142,9 +142,22 @@ public static Path mv(Path source, Path target) { } } + /** + * Creates or appends to the specified file, and writes the supplied string to it. + * No newline is written - if a trailing newline is required, it should be present + * in text, or use {@link Files#write(Path, Iterable, OpenOption...)}. + * @param file the file to create or append + * @param text the string to write + */ public static void append(Path file, String text) { - try (BufferedWriter writer = Files.newBufferedWriter(file, StandardCharsets.UTF_8, - StandardOpenOption.CREATE, StandardOpenOption.APPEND)) { + try ( + BufferedWriter writer = Files.newBufferedWriter( + file, + StandardCharsets.UTF_8, + StandardOpenOption.CREATE, + StandardOpenOption.APPEND + ) + ) { writer.write(text); } catch (IOException e) { @@ -204,7 +217,7 @@ public static String slurpAllLogs(Path logPath, String activeLogFile, String rot for (Path rotatedLogFile : FileUtils.lsGlob(logPath, rotatedLogFilesGlob)) { logFileJoiner.add(FileUtils.slurpTxtorGz(rotatedLogFile)); } - return(logFileJoiner.toString()); + return (logFileJoiner.toString()); } catch (IOException e) { throw new RuntimeException(e); } @@ -221,14 +234,14 @@ public static void logAllLogs(Path logsDir, Logger logger) { // gc logs are verbose and not useful in this context .filter(file -> file.getFileName().toString().startsWith("gc.log") == false) .forEach(file -> { - logger.info("=== Contents of `{}` ({}) ===", file, file.toAbsolutePath()); - try (Stream stream = Files.lines(file)) { - stream.forEach(logger::info); - } catch (IOException e) { - logger.error("Can't show contents", e); - } - logger.info("=== End of contents of `{}`===", file); - }); + logger.info("=== Contents of `{}` ({}) ===", file, file.toAbsolutePath()); + try (Stream stream = Files.lines(file)) { + stream.forEach(logger::info); + } catch (IOException e) { + logger.error("Can't show contents", e); + } + logger.info("=== End of contents of `{}`===", file); + }); } catch (IOException e) { logger.error("Can't list log files", e); } @@ -284,7 +297,6 @@ public static Map getNumericUnixPathOwnership(Path path) { return numericPathOwnership; } - // vagrant creates /tmp for us in windows so we use that to avoid long paths public static Path getTempDir() { return Paths.get("/tmp").toAbsolutePath(); @@ -295,6 +307,7 @@ public static Path getDefaultArchiveInstallPath() { } private static final Pattern VERSION_REGEX = Pattern.compile("(\\d+\\.\\d+\\.\\d+(-SNAPSHOT)?)"); + public static String getCurrentVersion() { // TODO: just load this once String distroFile = System.getProperty("tests.distribution"); @@ -314,12 +327,12 @@ public static void assertPathsExist(final Path... paths) { } public static Matcher fileWithGlobExist(String glob) throws IOException { - return new FeatureMatcher>(not(emptyIterable()),"File with pattern exist", "file with pattern"){ + return new FeatureMatcher>(not(emptyIterable()), "File with pattern exist", "file with pattern") { @Override protected Iterable featureValueOf(Path actual) { try { - return Files.newDirectoryStream(actual,glob); + return Files.newDirectoryStream(actual, glob); } catch (IOException e) { return Collections.emptyList(); } diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Installation.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/Installation.java index 9144496f2d1ec..d4a8d37823b06 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/Installation.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/Installation.java @@ -28,9 +28,7 @@ public class Installation { // in the future we'll run as a role user on Windows - public static final String ARCHIVE_OWNER = Platforms.WINDOWS - ? System.getenv("username") - : "elasticsearch"; + public static final String ARCHIVE_OWNER = Platforms.WINDOWS ? System.getenv("username") : "elasticsearch"; private final Shell sh; public final Distribution distribution; @@ -46,8 +44,18 @@ public class Installation { public final Path pidDir; public final Path envFile; - private Installation(Shell sh, Distribution distribution, Path home, Path config, Path data, Path logs, - Path plugins, Path modules, Path pidDir, Path envFile) { + private Installation( + Shell sh, + Distribution distribution, + Path home, + Path config, + Path data, + Path logs, + Path plugins, + Path modules, + Path pidDir, + Path envFile + ) { this.sh = sh; this.distribution = distribution; this.home = home; @@ -147,9 +155,7 @@ public class Executable { public final Path path; private Executable(String name) { - final String platformExecutableName = Platforms.WINDOWS - ? name + ".bat" - : name; + final String platformExecutableName = Platforms.WINDOWS ? name + ".bat" : name; this.path = bin(platformExecutableName); } diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Packages.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/Packages.java index 4ff748b988a8d..f20f980832307 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/Packages.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/Packages.java @@ -24,11 +24,11 @@ import org.elasticsearch.packaging.util.Shell.Result; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; +import java.util.List; import java.util.regex.Pattern; import java.util.stream.Stream; @@ -53,7 +53,7 @@ public class Packages { - private static final Logger logger = LogManager.getLogger(Packages.class); + private static final Logger logger = LogManager.getLogger(Packages.class); public static final Path SYSVINIT_SCRIPT = Paths.get("/etc/init.d/elasticsearch"); public static final Path SYSTEMD_SERVICE = Paths.get("/usr/lib/systemd/system/elasticsearch.service"); @@ -73,9 +73,10 @@ public static void assertRemoved(Distribution distribution) throws Exception { Platforms.onDPKG(() -> { assertThat(status.exitCode, anyOf(is(0), is(1))); if (status.exitCode == 0) { - assertTrue("an uninstalled status should be indicated: " + status.stdout, - Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find() || - Pattern.compile("(?m)^Status:.+ok not-installed").matcher(status.stdout).find() + assertTrue( + "an uninstalled status should be indicated: " + status.stdout, + Pattern.compile("(?m)^Status:.+deinstall ok").matcher(status.stdout).find() + || Pattern.compile("(?m)^Status:.+ok not-installed").matcher(status.stdout).find() ); } }); @@ -108,8 +109,7 @@ public static Installation installPackage(Shell sh, Distribution distribution) t Installation installation = Installation.ofPackage(sh, distribution); if (distribution.hasJdk == false) { - Files.write(installation.envFile, ("JAVA_HOME=" + systemJavaHome + "\n").getBytes(StandardCharsets.UTF_8), - StandardOpenOption.APPEND); + Files.write(installation.envFile, List.of("JAVA_HOME=" + systemJavaHome), StandardOpenOption.APPEND); } return installation; } @@ -124,9 +124,7 @@ private static Result runInstallCommand(Distribution distribution, Shell sh) { if (r.exitCode != 0) { Result lockOF = sh.runIgnoreExitCode("lsof /var/lib/dpkg/lock"); if (lockOF.exitCode == 0) { - throw new RuntimeException( - "dpkg failed and the lockfile still exists. " - + "Failure:\n" + r + "\nLockfile:\n" + lockOF); + throw new RuntimeException("dpkg failed and the lockfile still exists. " + "Failure:\n" + r + "\nLockfile:\n" + lockOF); } } return r; @@ -157,7 +155,6 @@ public static void verifyPackageInstallation(Installation installation, Distribu } } - private static void verifyOssInstallation(Installation es, Distribution distribution, Shell sh) { sh.run("id elasticsearch"); @@ -167,16 +164,9 @@ private static void verifyOssInstallation(Installation es, Distribution distribu final Path homeDir = Paths.get(passwdResult.stdout.trim().split(":")[5]); assertThat("elasticsearch user home directory must not exist", homeDir, fileDoesNotExist()); - Stream.of( - es.home, - es.plugins, - es.modules - ).forEach(dir -> assertThat(dir, file(Directory, "root", "root", p755))); + Stream.of(es.home, es.plugins, es.modules).forEach(dir -> assertThat(dir, file(Directory, "root", "root", p755))); - Stream.of( - es.data, - es.logs - ).forEach(dir -> assertThat(dir, file(Directory, "elasticsearch", "elasticsearch", p750))); + Stream.of(es.data, es.logs).forEach(dir -> assertThat(dir, file(Directory, "elasticsearch", "elasticsearch", p750))); // we shell out here because java's posix file permission view doesn't support special modes assertThat(es.config, file(Directory, "root", "elasticsearch", p750)); @@ -186,33 +176,18 @@ private static void verifyOssInstallation(Installation es, Distribution distribu assertThat(jvmOptionsDirectory, file(Directory, "root", "elasticsearch", p750)); assertThat(sh.run("find \"" + jvmOptionsDirectory + "\" -maxdepth 0 -printf \"%m\"").stdout, containsString("2750")); - Stream.of( - "elasticsearch.keystore", - "elasticsearch.yml", - "jvm.options", - "log4j2.properties" - ).forEach(configFile -> assertThat(es.config(configFile), file(File, "root", "elasticsearch", p660))); + Stream.of("elasticsearch.keystore", "elasticsearch.yml", "jvm.options", "log4j2.properties") + .forEach(configFile -> assertThat(es.config(configFile), file(File, "root", "elasticsearch", p660))); assertThat(es.config(".elasticsearch.keystore.initial_md5sum"), file(File, "root", "elasticsearch", p644)); assertThat(sh.run("sudo -u elasticsearch " + es.bin("elasticsearch-keystore") + " list").stdout, containsString("keystore.seed")); - Stream.of( - es.bin, - es.lib - ).forEach(dir -> assertThat(dir, file(Directory, "root", "root", p755))); + Stream.of(es.bin, es.lib).forEach(dir -> assertThat(dir, file(Directory, "root", "root", p755))); - Stream.of( - "elasticsearch", - "elasticsearch-plugin", - "elasticsearch-keystore", - "elasticsearch-shard", - "elasticsearch-node" - ).forEach(executable -> assertThat(es.bin(executable), file(File, "root", "root", p755))); + Stream.of("elasticsearch", "elasticsearch-plugin", "elasticsearch-keystore", "elasticsearch-shard", "elasticsearch-node") + .forEach(executable -> assertThat(es.bin(executable), file(File, "root", "root", p755))); - Stream.of( - "NOTICE.txt", - "README.asciidoc" - ).forEach(doc -> assertThat(es.home.resolve(doc), file(File, "root", "root", p644))); + Stream.of("NOTICE.txt", "README.asciidoc").forEach(doc -> assertThat(es.home.resolve(doc), file(File, "root", "root", p644))); assertThat(es.envFile, file(File, "root", "elasticsearch", p660)); @@ -231,9 +206,7 @@ private static void verifyOssInstallation(Installation es, Distribution distribu Paths.get("/usr/lib/sysctl.d/elasticsearch.conf") ).forEach(confFile -> assertThat(confFile, file(File, "root", "root", p644))); - final String sysctlExecutable = (distribution.packaging == Distribution.Packaging.RPM) - ? "/usr/sbin/sysctl" - : "/sbin/sysctl"; + final String sysctlExecutable = (distribution.packaging == Distribution.Packaging.RPM) ? "/usr/sbin/sysctl" : "/sbin/sysctl"; assertThat(sh.run(sysctlExecutable + " vm.max_map_count").stdout, containsString("vm.max_map_count = 262144")); } @@ -262,13 +235,8 @@ private static void verifyDefaultInstallation(Installation es) { // the version through here assertThat(es.bin("elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, "root", "root", p755)); - Stream.of( - "users", - "users_roles", - "roles.yml", - "role_mapping.yml", - "log4j2.properties" - ).forEach(configFile -> assertThat(es.config(configFile), file(File, "root", "elasticsearch", p660))); + Stream.of("users", "users_roles", "roles.yml", "role_mapping.yml", "log4j2.properties") + .forEach(configFile -> assertThat(es.config(configFile), file(File, "root", "elasticsearch", p660))); } /** @@ -336,8 +304,8 @@ public JournaldWrapper(Shell sh) { * for Elasticsearch logs and storing it in class state. */ public void clear() { - cursor = sh.run("sudo journalctl --unit=elasticsearch.service --lines=0 --show-cursor -o cat" + - " | sed -e 's/-- cursor: //'").stdout.trim(); + final String script = "sudo journalctl --unit=elasticsearch.service --lines=0 --show-cursor -o cat | sed -e 's/-- cursor: //'"; + cursor = sh.run(script).stdout.trim(); } /** diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java index 6ba33b56b30d4..6b6cbf27b5124 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java @@ -57,12 +57,12 @@ public class ServerUtils { - private static final Logger logger = LogManager.getLogger(ServerUtils.class); + private static final Logger logger = LogManager.getLogger(ServerUtils.class); private static String SECURITY_ENABLED = "xpack.security.enabled: true"; private static String SSL_ENABLED = "xpack.security.http.ssl.enabled: true"; - // generous timeout as nested virtualization can be quite slow ... + // generous timeout as nested virtualization can be quite slow ... private static final long waitTime = TimeUnit.MINUTES.toMillis(3); private static final long timeoutLength = TimeUnit.SECONDS.toMillis(30); private static final long requestInterval = TimeUnit.SECONDS.toMillis(5); @@ -122,9 +122,7 @@ private static HttpResponse execute(Request request, String username, String pas connectionManager.setDefaultMaxPerRoute(100); connectionManager.setMaxTotal(200); connectionManager.setValidateAfterInactivity(1000); - executor = Executor.newInstance(HttpClientBuilder.create() - .setConnectionManager(connectionManager) - .build()); + executor = Executor.newInstance(HttpClientBuilder.create().setConnectionManager(connectionManager).build()); } } else { executor = Executor.newInstance(); @@ -157,13 +155,8 @@ private static void waitForXpack() { throw new RuntimeException("Elasticsearch (with x-pack) did not start"); } - public static void waitForElasticsearch( - String status, - String index, - Installation installation, - String username, - String password - ) throws Exception { + public static void waitForElasticsearch(String status, String index, Installation installation, String username, String password) + throws Exception { Objects.requireNonNull(status); @@ -184,8 +177,7 @@ public static void waitForElasticsearch( try { final HttpResponse response = execute( - Request - .Get("http://localhost:9200/_cluster/health") + Request.Get("http://localhost:9200/_cluster/health") .connectTimeout((int) timeoutLength) .socketTimeout((int) timeoutLength), username, @@ -237,11 +229,13 @@ public static void waitForElasticsearch( public static void runElasticsearchTests() throws Exception { makeRequest( Request.Post("http://localhost:9200/library/_doc/1?refresh=true&pretty") - .bodyString("{ \"title\": \"Book #1\", \"pages\": 123 }", ContentType.APPLICATION_JSON)); + .bodyString("{ \"title\": \"Book #1\", \"pages\": 123 }", ContentType.APPLICATION_JSON) + ); makeRequest( Request.Post("http://localhost:9200/library/_doc/2?refresh=true&pretty") - .bodyString("{ \"title\": \"Book #2\", \"pages\": 456 }", ContentType.APPLICATION_JSON)); + .bodyString("{ \"title\": \"Book #2\", \"pages\": 456 }", ContentType.APPLICATION_JSON) + ); String count = makeRequest(Request.Get("http://localhost:9200/_count?pretty")); assertThat(count, containsString("\"count\" : 2")); diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java index 9de4218929bd3..3acf3d39f555c 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java @@ -42,9 +42,8 @@ */ public class Shell { - public static final int TAIL_WHEN_TOO_MUCH_OUTPUT = 1000; - public static final Result NO_OP = new Shell.Result(0, "",""); - protected final Logger logger = LogManager.getLogger(getClass()); + public static final Result NO_OP = new Shell.Result(0, "", ""); + protected final Logger logger = LogManager.getLogger(getClass()); final Map env = new HashMap<>(); Path workingDirectory; @@ -86,20 +85,28 @@ public Result runIgnoreExitCode(String script) { public void chown(Path path) throws Exception { Platforms.onLinux(() -> run("chown -R elasticsearch:elasticsearch " + path)); - Platforms.onWindows(() -> run( - "$account = New-Object System.Security.Principal.NTAccount '" + System.getenv("username") + "'; " + - "$pathInfo = Get-Item '" + path + "'; " + - "$toChown = @(); " + - "if ($pathInfo.PSIsContainer) { " + - " $toChown += Get-ChildItem '" + path + "' -Recurse; " + - "}" + - "$toChown += $pathInfo; " + - "$toChown | ForEach-Object { " + - "$acl = Get-Acl $_.FullName; " + - "$acl.SetOwner($account); " + - "Set-Acl $_.FullName $acl " + - "}" - )); + Platforms.onWindows( + () -> run( + String.format( + Locale.ROOT, + "$account = New-Object System.Security.Principal.NTAccount '%s'; " + + "$pathInfo = Get-Item '%s'; " + + "$toChown = @(); " + + "if ($pathInfo.PSIsContainer) { " + + " $toChown += Get-ChildItem '%s' -Recurse; " + + "}" + + "$toChown += $pathInfo; " + + "$toChown | ForEach-Object { " + + " $acl = Get-Acl $_.FullName; " + + " $acl.SetOwner($account); " + + " Set-Acl $_.FullName $acl " + + "}", + System.getenv("username"), + path, + path + ) + ) + ); } public void extractZip(Path zipPath, Path destinationDir) throws Exception { @@ -165,22 +172,13 @@ private Result runScriptIgnoreExitCode(String[] command) { if (process.isAlive()) { process.destroyForcibly(); } - Result result = new Result( - -1, - readFileIfExists(stdOut), - readFileIfExists(stdErr) - ); + Result result = new Result(-1, readFileIfExists(stdOut), readFileIfExists(stdErr)); throw new IllegalStateException( - "Timed out running shell command: " + Arrays.toString(command) + "\n" + - "Result:\n" + result + "Timed out running shell command: " + Arrays.toString(command) + "\n" + "Result:\n" + result ); } - Result result = new Result( - process.exitValue(), - readFileIfExists(stdOut), - readFileIfExists(stdErr) - ); + Result result = new Result(process.exitValue(), readFileIfExists(stdOut), readFileIfExists(stdErr)); logger.info("Ran: {} {}", Arrays.toString(command), result); return result; @@ -203,7 +201,7 @@ private String readFileIfExists(Path path) throws IOException { if (Files.exists(path)) { long size = Files.size(path); if (size > 100 * 1024) { - return "<>"; + return "<>"; } try (Stream lines = Files.lines(path, StandardCharsets.UTF_8)) { return lines.collect(Collectors.joining("\n")); @@ -225,15 +223,7 @@ private static void setWorkingDirectory(ProcessBuilder builder, Path path) { } public String toString() { - return new StringBuilder() - .append(" ") - .append("env = [") - .append(env) - .append("]") - .append("workingDirectory = [") - .append(workingDirectory) - .append("]") - .toString(); + return String.format(Locale.ROOT, " env = [%s] workingDirectory = [%s]", env, workingDirectory); } public static class Result { @@ -252,17 +242,7 @@ public boolean isSuccess() { } public String toString() { - return new StringBuilder() - .append("exitCode = [") - .append(exitCode) - .append("] ") - .append("stdout = [") - .append(stdout.trim()) - .append("] ") - .append("stderr = [") - .append(stderr.trim()) - .append("]") - .toString(); + return String.format(Locale.ROOT, "exitCode = [%d] stdout = [%s] stderr = [%s]", exitCode, stdout.trim(), stderr.trim()); } } diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle index 2d0acca20a50d..cc3d3851e4df4 100644 --- a/qa/remote-clusters/build.gradle +++ b/qa/remote-clusters/build.gradle @@ -25,12 +25,7 @@ apply plugin: 'elasticsearch.distribution-download' testFixtures.useFixture() -configurations { - restSpec -} - dependencies { - restSpec project(':rest-api-spec') testCompile project(':client:rest-high-level') } @@ -90,12 +85,8 @@ def createAndSetWritable(Object... locations) { } processTestResources { - from({ zipTree(configurations.restSpec.singleFile) }) { - include 'rest-api-spec/api/**' - } from project(':x-pack:plugin:core') .file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks') - dependsOn configurations.restSpec } task integTest(type: Test) { diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index bf1bc2d5b1073..003fc75910523 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -31,21 +31,6 @@ task bwcTest { group = 'verification' } -configurations { - restSpec -} - -dependencies { - restSpec project(':rest-api-spec') -} - -processTestResources { - from({ zipTree(configurations.restSpec.singleFile) }) { - include 'rest-api-spec/api/**' - } - dependsOn configurations.restSpec -} - for (Version bwcVersion : bwcVersions.wireCompatible) { /* * The goal here is to: diff --git a/qa/smoke-test-multinode/build.gradle b/qa/smoke-test-multinode/build.gradle index b76a865c6f898..244b5d1e8af6e 100644 --- a/qa/smoke-test-multinode/build.gradle +++ b/qa/smoke-test-multinode/build.gradle @@ -21,8 +21,10 @@ apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' -integTest { - includePackaged = true +restResources { + restTests { + includeCore '*' + } } File repo = file("$buildDir/testclusters/repo") diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index d95ad476682b1..fa29345e0ff6b 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -4,3 +4,13 @@ apply plugin: 'nebula.maven-scm' test.enabled = false jarHell.enabled = false + +configurations { + restSpecs + restTests +} + +artifacts { + restSpecs(new File(projectDir, "src/main/resources/rest-api-spec/api")) + restTests(new File(projectDir, "src/main/resources/rest-api-spec/test")) +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml index c83ea31ab3875..8f4daaa1e59e4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/20_translog.yml @@ -107,6 +107,10 @@ metric: [ translog ] - match: { indices.test.primaries.translog.operations: 1 } - match: { indices.test.primaries.translog.uncommitted_operations: 1 } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid - do: indices.close: index: test diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxAgeCondition.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxAgeCondition.java index bf6c9e2f69592..6c8be03779a77 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxAgeCondition.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxAgeCondition.java @@ -57,7 +57,12 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { - //TODO here we should just use TimeValue#writeTo and same for de-serialization in the constructor, we lose information this way + // While we technically could serialize this would out.writeTimeValue(...), that would + // require doing the song and dance around backwards compatibility for this value. Since + // in this case the deserialized version is not displayed to a user, it's okay to simply use + // milliseconds. It's possible to lose precision if someone were to say, specify 50 + // nanoseconds, however, in that case, their max age is indistinguishable from 0 + // milliseconds regardless. out.writeLong(value.getMillis()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 22da82dfeb681..8caf615dc1d07 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -116,8 +116,8 @@ abstract class AbstractSearchAsyncAction exten iterators.add(iterator); } } - this.toSkipShardsIts = new GroupShardsIterator<>(toSkipIterators, false); - this.shardsIts = new GroupShardsIterator<>(iterators, false); + this.toSkipShardsIts = new GroupShardsIterator<>(toSkipIterators); + this.shardsIts = new GroupShardsIterator<>(iterators); // we need to add 1 for non active partition, since we count it in the total. This means for each shard in the iterator we sum up // it's number of active shards but use 1 as the default if no replica of a shard is active at this point. // on a per shards level we use shardIt.remaining() to increment the totalOps pointer but add 1 for the current shard result diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index aba32d2c850a0..59debedbcf8d0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -113,7 +113,7 @@ private GroupShardsIterator getIterator(CanMatchSearchPhase return shardsIts; } FieldSortBuilder fieldSort = FieldSortBuilder.getPrimaryFieldSortOrNull(source); - return new GroupShardsIterator<>(sortShards(shardsIts, results.minAndMaxes, fieldSort.order()), false); + return new GroupShardsIterator<>(sortShards(shardsIts, results.minAndMaxes, fieldSort.order())); } private static List sortShards(GroupShardsIterator shardsIts, @@ -122,7 +122,7 @@ private static List sortShards(GroupShardsIterator shardsIts.get(ord)) + .map(shardsIts::get) .collect(Collectors.toList()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 24f0e04371588..2f7e8e338ea24 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -553,10 +553,10 @@ static GroupShardsIterator mergeShardsIterators(GroupShards for (ShardIterator shardIterator : localShardsIterator) { shards.add(new SearchShardIterator(localClusterAlias, shardIterator.shardId(), shardIterator.getShardRoutings(), localIndices)); } - return new GroupShardsIterator<>(shards); + return GroupShardsIterator.sortAndCreate(shards); } - private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, + private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators, SearchTimeProvider timeProvider, BiFunction connectionLookup, @@ -572,8 +572,19 @@ private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchReque return new CanMatchPreFilterSearchPhase(logger, searchTransportService, connectionLookup, aliasFilter, concreteIndexBoosts, indexRoutings, executor, searchRequest, listener, shardIterators, timeProvider, clusterStateVersion, task, (iter) -> { - AbstractSearchAsyncAction action = searchAsyncAction(task, searchRequest, iter, timeProvider, connectionLookup, - clusterStateVersion, aliasFilter, concreteIndexBoosts, indexRoutings, listener, false, clusters); + AbstractSearchAsyncAction action = searchAsyncAction( + task, + searchRequest, + iter, + timeProvider, + connectionLookup, + clusterStateVersion, + aliasFilter, + concreteIndexBoosts, + indexRoutings, + listener, + false, + clusters); return new SearchPhase(action.getName()) { @Override public void run() { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index d5eec608b549e..82bcf2001b352 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -262,7 +262,7 @@ public Iterator> settings() { * normal wildcard searches unless explicitly allowed */ public static final Setting INDEX_HIDDEN_SETTING = - Setting.boolSetting(SETTING_INDEX_HIDDEN, false, Property.IndexScope, Property.Final); + Setting.boolSetting(SETTING_INDEX_HIDDEN, false, Property.Dynamic, Property.IndexScope); /** * an internal index format description, allowing us to find out if this index is upgraded or needs upgrading diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java index a9904c96d020f..1cb105ac775e3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/GroupShardsIterator.java @@ -35,19 +35,19 @@ public final class GroupShardsIterator implements private final List iterators; /** - * Constructs a enw GroupShardsIterator from the given list. + * Constructs a new sorted GroupShardsIterator from the given list. Items are sorted based on their natural ordering. + * @see PlainShardIterator#compareTo(ShardIterator) + * @see org.elasticsearch.action.search.SearchShardIterator#compareTo(ShardIterator) */ - public GroupShardsIterator(List iterators) { - this(iterators, true); + public static GroupShardsIterator sortAndCreate(List iterators) { + CollectionUtil.timSort(iterators); + return new GroupShardsIterator<>(iterators); } /** * Constructs a new GroupShardsIterator from the given list. */ - public GroupShardsIterator(List iterators, boolean useSort) { - if (useSort) { - CollectionUtil.timSort(iterators); - } + public GroupShardsIterator(List iterators) { this.iterators = iterators; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 6d9397db3b377..bc5d51b918796 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -95,7 +95,7 @@ public GroupShardsIterator searchShards(ClusterState clusterState set.add(iterator); } } - return new GroupShardsIterator<>(new ArrayList<>(set)); + return GroupShardsIterator.sortAndCreate(new ArrayList<>(set)); } private static final Map> EMPTY_ROUTING = Collections.emptyMap(); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index b595efb3a5b4b..f54e31292d6aa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -260,7 +260,7 @@ private GroupShardsIterator allSatisfyingPredicateShardsGrouped(S } } } - return new GroupShardsIterator<>(set); + return GroupShardsIterator.sortAndCreate(set); } public ShardsIterator allShards(String[] indices) { @@ -321,7 +321,7 @@ public GroupShardsIterator activePrimaryShardsGrouped(String[] in } } } - return new GroupShardsIterator<>(set); + return GroupShardsIterator.sortAndCreate(set); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 4861cea792144..42e9d121aa850 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -298,7 +298,7 @@ public void apply(Settings value, Settings current, Settings previous) { RemoteClusterService.REMOTE_CLUSTER_PING_SCHEDULE, RemoteClusterService.REMOTE_CLUSTER_COMPRESS, RemoteConnectionStrategy.REMOTE_CONNECTION_MODE, - ProxyConnectionStrategy.REMOTE_CLUSTER_ADDRESSES, + ProxyConnectionStrategy.PROXY_ADDRESS, ProxyConnectionStrategy.REMOTE_SOCKET_CONNECTIONS, ProxyConnectionStrategy.SERVER_NAME, SniffConnectionStrategy.REMOTE_CLUSTERS_PROXY, diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 05329882b7428..549f5ee755934 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -1257,6 +1257,12 @@ public static Setting boolSetting(String key, Setting fallback return new Setting<>(key, fallbackSetting, b -> parseBoolean(b, key, isFiltered(properties)), properties); } + public static Setting boolSetting(String key, Setting fallbackSetting, Validator validator, + Property... properties) { + return new Setting<>(new SimpleKey(key), fallbackSetting, fallbackSetting::getRaw, b -> parseBoolean(b, key, + isFiltered(properties)), validator, properties); + } + public static Setting boolSetting(String key, boolean defaultValue, Validator validator, Property... properties) { return new Setting<>(key, Boolean.toString(defaultValue), b -> parseBoolean(b, key, isFiltered(properties)), validator, properties); } @@ -1626,6 +1632,12 @@ public static Setting timeSetting(String key, Setting fall return new Setting<>(key, fallbackSetting, (s) -> TimeValue.parseTimeValue(s, key), properties); } + public static Setting timeSetting(String key, Setting fallBackSetting, Validator validator, + Property... properties) { + return new Setting<>(new SimpleKey(key), fallBackSetting, fallBackSetting::getRaw, (s) -> TimeValue.parseTimeValue(s, key), + validator, properties); + } + public static Setting positiveTimeSetting(String key, TimeValue defaultValue, Property... properties) { return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), properties); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java new file mode 100644 index 0000000000000..779e8f91350dc --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/ConstantFieldType.java @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.index.query.QueryShardContext; + +import java.util.List; + +/** + * A {@link MappedFieldType} that has the same value for all documents. + * Factory methods for queries are called at rewrite time so they should be + * cheap. In particular they should not read data from disk or perform a + * network call. Furthermore they may only return a {@link MatchAllDocsQuery} + * or a {@link MatchNoDocsQuery}. + */ +public abstract class ConstantFieldType extends MappedFieldType { + + public ConstantFieldType() { + super(); + } + + public ConstantFieldType(ConstantFieldType other) { + super(other); + } + + @Override + public final boolean isSearchable() { + return true; + } + + @Override + public final boolean isAggregatable() { + return true; + } + + @Override + public final Query existsQuery(QueryShardContext context) { + return new MatchAllDocsQuery(); + } + + /** + * Return whether the constant value of this field matches the provided {@code pattern} + * as documented in {@link Regex#simpleMatch}. + */ + protected abstract boolean matches(String pattern, QueryShardContext context); + + private static String valueToString(Object value) { + return value instanceof BytesRef + ? ((BytesRef) value).utf8ToString() + : value.toString(); + } + + @Override + public final Query termQuery(Object value, QueryShardContext context) { + String pattern = valueToString(value); + if (matches(pattern, context)) { + return Queries.newMatchAllQuery(); + } else { + return new MatchNoDocsQuery(); + } + } + + @Override + public final Query termsQuery(List values, QueryShardContext context) { + for (Object value : values) { + String pattern = valueToString(value); + if (matches(pattern, context)) { + // `terms` queries are a disjunction, so one matching term is enough + return Queries.newMatchAllQuery(); + } + } + return new MatchNoDocsQuery(); + } + + @Override + public final Query prefixQuery(String prefix, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + String pattern = prefix + "*"; + if (matches(pattern, context)) { + return Queries.newMatchAllQuery(); + } else { + return new MatchNoDocsQuery(); + } + } + + @Override + public final Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + if (matches(value, context)) { + return Queries.newMatchAllQuery(); + } else { + return new MatchNoDocsQuery(); + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index a42a07a3d9e0b..87d489d04b556 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; +import org.elasticsearch.index.query.DateRangeIncludingNowQuery; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; @@ -389,11 +390,16 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower DateMathParser parser = forcedDateParser == null ? dateMathParser : forcedDateParser; + boolean[] nowUsed = new boolean[1]; + LongSupplier nowSupplier = () -> { + nowUsed[0] = true; + return context.nowInMillis(); + }; long l, u; if (lowerTerm == null) { l = Long.MIN_VALUE; } else { - l = parseToLong(lowerTerm, !includeLower, timeZone, parser, context::nowInMillis); + l = parseToLong(lowerTerm, !includeLower, timeZone, parser, nowSupplier); if (includeLower == false) { ++l; } @@ -401,7 +407,7 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower if (upperTerm == null) { u = Long.MAX_VALUE; } else { - u = parseToLong(upperTerm, includeUpper, timeZone, parser, context::nowInMillis); + u = parseToLong(upperTerm, includeUpper, timeZone, parser, nowSupplier); if (includeUpper == false) { --u; } @@ -411,6 +417,9 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u); query = new IndexOrDocValuesQuery(query, dvQuery); } + if (nowUsed[0]) { + query = new DateRangeIncludingNowQuery(query); + } return query; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index 88282fa685728..06dfe1bf273e2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -21,13 +21,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -91,7 +85,7 @@ public MetadataFieldMapper getDefault(ParserContext context) { } } - static final class IndexFieldType extends MappedFieldType { + static final class IndexFieldType extends ConstantFieldType { IndexFieldType() {} @@ -110,81 +104,8 @@ public String typeName() { } @Override - public boolean isSearchable() { - // The _index field is always searchable. - return true; - } - - @Override - public Query existsQuery(QueryShardContext context) { - return new MatchAllDocsQuery(); - } - - /** - * This termQuery impl looks at the context to determine the index that - * is being queried and then returns a MATCH_ALL_QUERY or MATCH_NO_QUERY - * if the value matches this index. This can be useful if aliases or - * wildcards are used but the aim is to restrict the query to specific - * indices - */ - @Override - public Query termQuery(Object value, @Nullable QueryShardContext context) { - String pattern = value instanceof BytesRef - ? ((BytesRef) value).utf8ToString() - : value.toString(); - if (context.indexMatches(pattern)) { - // No need to OR these clauses - we can only logically be - // running in the context of just one of these index names. - return Queries.newMatchAllQuery(); - } else { - return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() + - "] doesn't match the provided value [" + value + "]."); - } - } - - @Override - public Query termsQuery(List values, QueryShardContext context) { - if (context == null) { - return super.termsQuery(values, context); - } - for (Object value : values) { - String pattern = value instanceof BytesRef - ? ((BytesRef) value).utf8ToString() - : value.toString(); - if (context.indexMatches(pattern)) { - // No need to OR these clauses - we can only logically be - // running in the context of just one of these index names. - return Queries.newMatchAllQuery(); - } - } - // None of the listed index names are this one - return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() + - "] doesn't match the provided values [" + values + "]."); - } - - @Override - public Query prefixQuery(String value, - @Nullable MultiTermQuery.RewriteMethod method, - QueryShardContext context) { - String pattern = value + "*"; - if (context.indexMatches(pattern)) { - return Queries.newMatchAllQuery(); - } else { - return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() + - "] doesn't match the provided prefix [" + value + "]."); - } - } - - @Override - public Query wildcardQuery(String value, - @Nullable MultiTermQuery.RewriteMethod method, - QueryShardContext context) { - if (context.indexMatches(value)) { - return Queries.newMatchAllQuery(); - } else { - return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() - + "] doesn't match the provided pattern [" + value + "]."); - } + protected boolean matches(String pattern, QueryShardContext context) { + return context.indexMatches(pattern); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index b237d7cbd82ee..895243d43faf3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -751,9 +751,10 @@ public static boolean hasGaps(TokenStream stream) throws IOException { @Override public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) { if (fielddata == false) { - throw new IllegalArgumentException("Fielddata is disabled on text fields by default. Set fielddata=true on [" + name() - + "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however " - + "use significant memory. Alternatively use a keyword field instead."); + throw new IllegalArgumentException("Text fields are not optimised for operations that require per-document " + + "field data like aggregations and sorting, so these operations are disabled by default. Please use a " + + "keyword field instead. Alternatively, set fielddata=true on [" + name() + "] in order to load " + + "field data by uninverting the inverted index. Note that this can use significant memory."); } return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize); } diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index d5cc101ede8ad..2e027e21b8f5f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.search.spans.SpanQuery; @@ -103,7 +104,7 @@ public final Query toQuery(QueryShardContext context) throws IOException { if (boost != DEFAULT_BOOST) { if (query instanceof SpanQuery) { query = new SpanBoostQuery((SpanQuery) query, boost); - } else { + } else if (query instanceof MatchNoDocsQuery == false) { query = new BoostQuery(query, boost); } } @@ -232,7 +233,7 @@ static Collection toQueries(Collection queryBuilders, Query IOException { List queries = new ArrayList<>(queryBuilders.size()); for (QueryBuilder queryBuilder : queryBuilders) { - Query query = queryBuilder.toQuery(context); + Query query = queryBuilder.rewrite(context).toQuery(context); if (query != null) { queries.add(query); } diff --git a/server/src/main/java/org/elasticsearch/index/query/DateRangeIncludingNowQuery.java b/server/src/main/java/org/elasticsearch/index/query/DateRangeIncludingNowQuery.java new file mode 100644 index 0000000000000..e3db9cf66d84f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/DateRangeIncludingNowQuery.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; + +import java.io.IOException; +import java.util.Objects; + +/** + * A simple wrapper class that indicates that the wrapped query has made use of NOW + * when parsing its datemath. Useful for preprocessors such as the percolator that + * need to know when not to extract dates from the query. + */ +public class DateRangeIncludingNowQuery extends Query { + + private final Query in; + + public DateRangeIncludingNowQuery(Query in) { + this.in = in; + } + + public Query getQuery() { + return in; + } + + @Override + public Query rewrite(IndexReader reader) throws IOException { + return in; + } + + @Override + public String toString(String field) { + return "DateRangeIncludingNowQuery(" + in + ")"; + } + + @Override + public void visit(QueryVisitor visitor) { + in.visit(visitor.getSubVisitor(BooleanClause.Occur.MUST, this)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DateRangeIncludingNowQuery that = (DateRangeIncludingNowQuery) o; + return Objects.equals(in, that.in); + } + + @Override + public int hashCode() { + return Objects.hash(in); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 8df0fec044124..bd3048da29bb4 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; @@ -28,7 +27,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -322,18 +320,26 @@ public String getWriteableName() { return NAME; } + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + QueryShardContext context = queryRewriteContext.convertToShardContext(); + if (context != null) { + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType == null) { + return new MatchNoneQueryBuilder(); + } + } + return super.doRewrite(context); + } + @Override protected Query doToQuery(QueryShardContext context) throws IOException { - Query query = null; - String rewrite = this.rewrite; MappedFieldType fieldType = context.fieldMapper(fieldName); - if (fieldType != null) { - query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions, context); - } - if (query == null) { - int maxEdits = fuzziness.asDistance(BytesRefs.toString(value)); - query = new FuzzyQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), maxEdits, prefixLength, maxExpansions, transpositions); + if (fieldType == null) { + throw new IllegalStateException("Rewrite first"); } + String rewrite = this.rewrite; + Query query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions, context); if (query instanceof MultiTermQuery) { MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE); diff --git a/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index 03a3fdfd646e6..721201cda094e 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; @@ -27,7 +26,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -136,17 +134,26 @@ public String getWriteableName() { return NAME; } + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + if (ids.isEmpty()) { + return new MatchNoneQueryBuilder(); + } + QueryShardContext context = queryRewriteContext.convertToShardContext(); + if (context != null && context.fieldMapper(IdFieldMapper.NAME) == null) { + // no mappings yet + return new MatchNoneQueryBuilder(); + } + return super.doRewrite(queryRewriteContext); + } + @Override protected Query doToQuery(QueryShardContext context) throws IOException { MappedFieldType idField = context.fieldMapper(IdFieldMapper.NAME); - if (idField == null) { - return new MatchNoDocsQuery("No mappings"); - } - if (this.ids.isEmpty()) { - return Queries.newMatchNoDocsQuery("Missing ids in \"" + this.getName() + "\" query."); - } else { - return idField.termsQuery(new ArrayList<>(ids), context); + if (idField == null || ids.isEmpty()) { + throw new IllegalStateException("Rewrite first"); } + return idField.termsQuery(new ArrayList<>(ids), context); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index db596e2ecfc7b..44c8dd44b49c8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -19,20 +19,20 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; @@ -171,14 +171,26 @@ public String getWriteableName() { @Override protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { - if ("_index".equals(fieldName)) { - // Special-case optimisation for canMatch phase: - // We can skip querying this shard if the index name doesn't match the value of this query on the "_index" field. - QueryShardContext shardContext = queryRewriteContext.convertToShardContext(); - if (shardContext != null && shardContext.indexMatches(value + "*") == false) { + QueryShardContext context = queryRewriteContext.convertToShardContext(); + if (context != null) { + MappedFieldType fieldType = context.fieldMapper(this.fieldName); + if (fieldType == null) { return new MatchNoneQueryBuilder(); - } + } else if (fieldType instanceof ConstantFieldType) { + // This logic is correct for all field types, but by only applying it to constant + // fields we also have the guarantee that it doesn't perform I/O, which is important + // since rewrites might happen on a network thread. + Query query = fieldType.prefixQuery(value, null, context); // the rewrite method doesn't matter + if (query instanceof MatchAllDocsQuery) { + return new MatchAllQueryBuilder(); + } else if (query instanceof MatchNoDocsQuery) { + return new MatchNoneQueryBuilder(); + } else { + assert false : "Constant fields must produce match-all or match-none queries, got " + query ; + } + } } + return super.doRewrite(queryRewriteContext); } @@ -186,20 +198,11 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws protected Query doToQuery(QueryShardContext context) throws IOException { MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE); - Query query = null; MappedFieldType fieldType = context.fieldMapper(fieldName); - if (fieldType != null) { - query = fieldType.prefixQuery(value, method, context); + if (fieldType == null) { + throw new IllegalStateException("Rewrite first"); } - if (query == null) { - PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, BytesRefs.toBytesRef(value))); - if (method != null) { - prefixQuery.setRewriteMethod(method); - } - query = prefixQuery; - } - - return query; + return fieldType.prefixQuery(value, method, context); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index b44f0edfebbc4..ead32047e113d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -125,15 +125,4 @@ public void onFailure(Exception e) { } } - /** - * In pre-processing contexts that happen at index time 'now' date ranges should be replaced by a {@link MatchAllQueryBuilder}. - * Otherwise documents that should match at query time would never match and the document that have fallen outside the - * date range would continue to match. - * - * @return indicates whether range queries with date ranges using 'now' are rewritten to a {@link MatchAllQueryBuilder}. - */ - public boolean convertNowRangeToMatchAll() { - return false; - } - } diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index 0219c782d06af..6cb1704611ba0 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -21,7 +21,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -29,14 +28,12 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; import java.io.IOException; import java.time.DateTimeException; @@ -432,35 +429,27 @@ public String getWriteableName() { // Overridable for testing only protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { QueryShardContext shardContext = queryRewriteContext.convertToShardContext(); - // If the context is null we are not on the shard and cannot - // rewrite so just pretend there is an intersection so that the rewrite is a noop - if (shardContext == null || shardContext.getIndexReader() == null) { - return MappedFieldType.Relation.INTERSECTS; - } - final MapperService mapperService = shardContext.getMapperService(); - final MappedFieldType fieldType = mapperService.fieldType(fieldName); - if (fieldType == null) { - // no field means we have no values - return MappedFieldType.Relation.DISJOINT; - } else { + if (shardContext != null) { + final MappedFieldType fieldType = shardContext.fieldMapper(fieldName); + if (fieldType == null) { + return MappedFieldType.Relation.DISJOINT; + } + if (shardContext.getIndexReader() == null) { + // No reader, this may happen e.g. for percolator queries. + return MappedFieldType.Relation.INTERSECTS; + } + DateMathParser dateMathParser = getForceDateParser(); return fieldType.isFieldWithinQuery(shardContext.getIndexReader(), from, to, includeLower, includeUpper, timeZone, dateMathParser, queryRewriteContext); } + + // Not on the shard, we have no way to know what the relation is. + return MappedFieldType.Relation.INTERSECTS; } @Override protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { - // Percolator queries get rewritten and pre-processed at index time. - // If a range query has a date range using 'now' and 'now' gets resolved at index time then - // the pre-processing uses that to pre-process. This can then lead to mismatches at query time. - if (queryRewriteContext.convertNowRangeToMatchAll()) { - if ((from() != null && from().toString().contains("now")) || - (to() != null && to().toString().contains("now"))) { - return new MatchAllQueryBuilder(); - } - } - final MappedFieldType.Relation relation = getRelation(queryRewriteContext); switch (relation) { case DISJOINT: @@ -500,26 +489,14 @@ protected Query doToQuery(QueryShardContext context) throws IOException { return ExistsQueryBuilder.newFilter(context, fieldName); } } - Query query = null; MappedFieldType mapper = context.fieldMapper(this.fieldName); - if (mapper != null) { - DateMathParser forcedDateParser = getForceDateParser(); - query = mapper.rangeQuery( - from, to, includeLower, includeUpper, - relation, timeZone, forcedDateParser, context); - } else { - if (timeZone != null) { - throw new QueryShardException(context, "[range] time_zone can not be applied to non unmapped field [" - + fieldName + "]"); - } - } - - if (query == null) { - query = new TermRangeQuery(this.fieldName, - BytesRefs.toBytesRef(from), BytesRefs.toBytesRef(to), - includeLower, includeUpper); + if (mapper == null) { + throw new IllegalStateException("Rewrite first"); } - return query; + DateMathParser forcedDateParser = getForceDateParser(); + return mapper.rangeQuery( + from, to, includeLower, includeUpper, + relation, timeZone, forcedDateParser, context); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 49e5e53e1ed91..d55f9bceaa9bd 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -126,11 +126,16 @@ public static SpanMultiTermQueryBuilder fromXContent(XContentParser parser) thro @Override protected Query doToQuery(QueryShardContext context) throws IOException { - if (multiTermQueryBuilder instanceof PrefixQueryBuilder) { + // We do the rewrite in toQuery to not have to deal with the case when a multi-term builder rewrites to a non-multi-term + // builder. + QueryBuilder multiTermQueryBuilder = Rewriteable.rewrite(this.multiTermQueryBuilder, context); + if (multiTermQueryBuilder instanceof MatchNoneQueryBuilder) { + return new SpanMatchNoDocsQuery(this.multiTermQueryBuilder.fieldName(), "Inner query rewrote to match_none"); + } else if (multiTermQueryBuilder instanceof PrefixQueryBuilder) { PrefixQueryBuilder prefixBuilder = (PrefixQueryBuilder) multiTermQueryBuilder; - MappedFieldType fieldType = context.fieldMapper(multiTermQueryBuilder.fieldName()); + MappedFieldType fieldType = context.fieldMapper(prefixBuilder.fieldName()); if (fieldType == null) { - return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), "unknown field"); + throw new IllegalStateException("Rewrite first"); } final SpanMultiTermQueryWrapper.SpanRewriteMethod spanRewriteMethod; if (prefixBuilder.rewrite() != null) { @@ -159,7 +164,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } } if (subQuery instanceof MatchNoDocsQuery) { - return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), subQuery.toString()); + return new SpanMatchNoDocsQuery(this.multiTermQueryBuilder.fieldName(), subQuery.toString()); } else if (subQuery instanceof MultiTermQuery == false) { throw new UnsupportedOperationException("unsupported inner query, should be " + MultiTermQuery.class.getName() + " but was " + subQuery.getClass().getName()); diff --git a/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java index 262bfb2c6b5b3..8a0118d26e8db 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java @@ -19,15 +19,15 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.ConstantFieldType; import java.io.IOException; @@ -132,28 +132,35 @@ public static TermQueryBuilder fromXContent(XContentParser parser) throws IOExce @Override protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { - if ("_index".equals(fieldName)) { - // Special-case optimisation for canMatch phase: - // We can skip querying this shard if the index name doesn't match the value of this query on the "_index" field. - QueryShardContext shardContext = queryRewriteContext.convertToShardContext(); - if (shardContext != null && shardContext.indexMatches(BytesRefs.toString(value)) == false) { + QueryShardContext context = queryRewriteContext.convertToShardContext(); + if (context != null) { + MappedFieldType fieldType = context.fieldMapper(this.fieldName); + if (fieldType == null) { return new MatchNoneQueryBuilder(); - } + } else if (fieldType instanceof ConstantFieldType) { + // This logic is correct for all field types, but by only applying it to constant + // fields we also have the guarantee that it doesn't perform I/O, which is important + // since rewrites might happen on a network thread. + Query query = fieldType.termQuery(value, context); + if (query instanceof MatchAllDocsQuery) { + return new MatchAllQueryBuilder(); + } else if (query instanceof MatchNoDocsQuery) { + return new MatchNoneQueryBuilder(); + } else { + assert false : "Constant fields must produce match-all or match-none queries, got " + query ; + } + } } return super.doRewrite(queryRewriteContext); } @Override protected Query doToQuery(QueryShardContext context) throws IOException { - Query query = null; MappedFieldType mapper = context.fieldMapper(this.fieldName); - if (mapper != null) { - query = mapper.termQuery(this.value, context); - } - if (query == null) { - query = new TermQuery(new Term(this.fieldName, BytesRefs.toBytesRef(this.value))); + if (mapper == null) { + throw new IllegalStateException("Rewrite first"); } - return query; + return mapper.termQuery(this.value, context); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index 2cececd041b18..a7a5dfc42c8cb 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -19,8 +19,9 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.SetOnce; @@ -33,13 +34,12 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.indices.TermsLookup; import java.io.IOException; @@ -417,12 +417,9 @@ public String getWriteableName() { @Override protected Query doToQuery(QueryShardContext context) throws IOException { - if (termsLookup != null || supplier != null) { + if (termsLookup != null || supplier != null || values == null || values.isEmpty()) { throw new UnsupportedOperationException("query must be rewritten first"); } - if (values == null || values.isEmpty()) { - return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); - } int maxTermsCount = context.getIndexSettings().getMaxTermsCount(); if (values.size() > maxTermsCount){ throw new IllegalArgumentException( @@ -431,16 +428,10 @@ protected Query doToQuery(QueryShardContext context) throws IOException { IndexSettings.MAX_TERMS_COUNT_SETTING.getKey() + "] index level setting."); } MappedFieldType fieldType = context.fieldMapper(fieldName); - - if (fieldType != null) { - return fieldType.termsQuery(values, context); - } else { - BytesRef[] filterValues = new BytesRef[values.size()]; - for (int i = 0; i < filterValues.length; i++) { - filterValues[i] = BytesRefs.toBytesRef(values.get(i)); - } - return new TermInSetQuery(fieldName, filterValues); + if (fieldType == null) { + throw new IllegalStateException("Rewrite first"); } + return fieldType.termsQuery(values, context); } private void fetch(TermsLookup termsLookup, Client client, ActionListener> actionListener) { @@ -482,21 +473,31 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) { }))); return new TermsQueryBuilder(this.fieldName, supplier::get); } - if ("_index".equals(this.fieldName) && values != null) { - // Special-case optimisation for canMatch phase: - // We can skip querying this shard if the index name doesn't match any of the search terms. - QueryShardContext shardContext = queryRewriteContext.convertToShardContext(); - if (shardContext != null) { - for (Object localValue : values) { - if (shardContext.indexMatches(BytesRefs.toString(localValue))) { - // We can match - at least one index name matches - return this; - } - } - // all index names are invalid - no possibility of a match on this shard. + + if (values == null || values.isEmpty()) { + return new MatchNoneQueryBuilder(); + } + + QueryShardContext context = queryRewriteContext.convertToShardContext(); + if (context != null) { + MappedFieldType fieldType = context.fieldMapper(this.fieldName); + if (fieldType == null) { return new MatchNoneQueryBuilder(); + } else if (fieldType instanceof ConstantFieldType) { + // This logic is correct for all field types, but by only applying it to constant + // fields we also have the guarantee that it doesn't perform I/O, which is important + // since rewrites might happen on a network thread. + Query query = fieldType.termsQuery(values, context); + if (query instanceof MatchAllDocsQuery) { + return new MatchAllQueryBuilder(); + } else if (query instanceof MatchNoDocsQuery) { + return new MatchNoneQueryBuilder(); + } else { + assert false : "Constant fields must produce match-all or match-none queries, got " + query ; + } } } + return this; } } diff --git a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index 115fa8d476dfd..39ca3b0a45b6f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; @@ -27,11 +28,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; @@ -182,14 +183,26 @@ public static WildcardQueryBuilder fromXContent(XContentParser parser) throws IO @Override protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { - if ("_index".equals(fieldName)) { - // Special-case optimisation for canMatch phase: - // We can skip querying this shard if the index name doesn't match the value of this query on the "_index" field. - QueryShardContext shardContext = queryRewriteContext.convertToShardContext(); - if (shardContext != null && shardContext.indexMatches(BytesRefs.toString(value)) == false) { + QueryShardContext context = queryRewriteContext.convertToShardContext(); + if (context != null) { + MappedFieldType fieldType = context.fieldMapper(this.fieldName); + if (fieldType == null) { return new MatchNoneQueryBuilder(); - } + } else if (fieldType instanceof ConstantFieldType) { + // This logic is correct for all field types, but by only applying it to constant + // fields we also have the guarantee that it doesn't perform I/O, which is important + // since rewrites might happen on a network thread. + Query query = fieldType.wildcardQuery(value, null, context); // the rewrite method doesn't matter + if (query instanceof MatchAllDocsQuery) { + return new MatchAllQueryBuilder(); + } else if (query instanceof MatchNoDocsQuery) { + return new MatchNoneQueryBuilder(); + } else { + assert false : "Constant fields must produce match-all or match-none queries, got " + query ; + } + } } + return super.doRewrite(queryRewriteContext); } @@ -198,7 +211,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { - return new MatchNoDocsQuery("unknown field [" + fieldName + "]"); + throw new IllegalStateException("Rewrite first"); } MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod( diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 20cd90b7b28bd..c5eb0cdda3642 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -265,21 +265,21 @@ public Node(Environment environment) { /** * Constructs a node * - * @param environment the environment for this node + * @param initialEnvironment the initial environment for this node, which will be added to by plugins * @param classpathPlugins the plugins to be loaded from the classpath * @param forbidPrivateIndexSettings whether or not private index settings are forbidden when creating an index; this is used in the * test framework for tests that rely on being able to set private settings */ - protected Node( - final Environment environment, Collection> classpathPlugins, boolean forbidPrivateIndexSettings) { + protected Node(final Environment initialEnvironment, + Collection> classpathPlugins, boolean forbidPrivateIndexSettings) { logger = LogManager.getLogger(Node.class); final List resourcesToClose = new ArrayList<>(); // register everything we need to release in the case of an error boolean success = false; try { - Settings tmpSettings = Settings.builder().put(environment.settings()) + Settings tmpSettings = Settings.builder().put(initialEnvironment.settings()) .put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build(); - nodeEnvironment = new NodeEnvironment(tmpSettings, environment); + nodeEnvironment = new NodeEnvironment(tmpSettings, initialEnvironment); resourcesToClose.add(nodeEnvironment); logger.info("node name [{}], node ID [{}], cluster name [{}]", NODE_NAME_SETTING.get(tmpSettings), nodeEnvironment.nodeId(), @@ -311,11 +311,12 @@ protected Node( if (logger.isDebugEnabled()) { logger.debug("using config [{}], data [{}], logs [{}], plugins [{}]", - environment.configFile(), Arrays.toString(environment.dataFiles()), environment.logsFile(), environment.pluginsFile()); + initialEnvironment.configFile(), Arrays.toString(initialEnvironment.dataFiles()), + initialEnvironment.logsFile(), initialEnvironment.pluginsFile()); } - this.pluginsService = new PluginsService(tmpSettings, environment.configFile(), environment.modulesFile(), - environment.pluginsFile(), classpathPlugins); + this.pluginsService = new PluginsService(tmpSettings, initialEnvironment.configFile(), initialEnvironment.modulesFile(), + initialEnvironment.pluginsFile(), classpathPlugins); final Settings settings = pluginsService.updatedSettings(); final Set possibleRoles = Stream.concat( DiscoveryNodeRole.BUILT_IN_ROLES.stream(), @@ -329,8 +330,8 @@ protected Node( // create the environment based on the finalized (processed) view of the settings // this is just to makes sure that people get the same settings, no matter where they ask them from - this.environment = new Environment(settings, environment.configFile()); - Environment.assertEquivalent(environment, this.environment); + this.environment = new Environment(settings, initialEnvironment.configFile()); + Environment.assertEquivalent(initialEnvironment, this.environment); final List> executorBuilders = pluginsService.getExecutorBuilders(settings); diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index 4e654262912c5..e67fd5f143a1d 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -338,7 +338,8 @@ public Map> getResultRea /** * Specification for a {@link PipelineAggregator}. */ - class PipelineAggregationSpec extends SearchExtensionSpec { + class PipelineAggregationSpec extends SearchExtensionSpec> { private final Map> resultReaders = new TreeMap<>(); private final Writeable.Reader aggregatorReader; @@ -356,7 +357,7 @@ class PipelineAggregationSpec extends SearchExtensionSpec builderReader, Writeable.Reader aggregatorReader, - PipelineAggregator.Parser parser) { + ContextParser parser) { super(name, builderReader, parser); this.aggregatorReader = aggregatorReader; } @@ -375,11 +376,52 @@ public PipelineAggregationSpec(ParseField name, public PipelineAggregationSpec(String name, Writeable.Reader builderReader, Writeable.Reader aggregatorReader, - PipelineAggregator.Parser parser) { + ContextParser parser) { super(name, builderReader, parser); this.aggregatorReader = aggregatorReader; } + /** + * Specification of a {@link PipelineAggregator}. + * + * @param name holds the names by which this aggregation might be parsed. The {@link ParseField#getPreferredName()} is special as it + * is the name by under which the readers are registered. So it is the name that the {@link PipelineAggregationBuilder} and + * {@link PipelineAggregator} should return from {@link NamedWriteable#getWriteableName()}. + * @param builderReader the reader registered for this aggregation's builder. Typically a reference to a constructor that takes a + * {@link StreamInput} + * @param aggregatorReader reads the {@link PipelineAggregator} from a stream + * @param parser reads the aggregation builder from XContent + * @deprecated prefer the ctor that takes a {@link ContextParser} + */ + @Deprecated + public PipelineAggregationSpec(ParseField name, + Writeable.Reader builderReader, + Writeable.Reader aggregatorReader, + PipelineAggregator.Parser parser) { + super(name, builderReader, (p, n) -> parser.parse(n, p)); + this.aggregatorReader = aggregatorReader; + } + + /** + * Specification of a {@link PipelineAggregator}. + * + * @param name name by which this aggregation might be parsed or deserialized. Make sure it is the name that the + * {@link PipelineAggregationBuilder} and {@link PipelineAggregator} should return from + * {@link NamedWriteable#getWriteableName()}. + * @param builderReader the reader registered for this aggregation's builder. Typically a reference to a constructor that takes a + * {@link StreamInput} + * @param aggregatorReader reads the {@link PipelineAggregator} from a stream + * @deprecated prefer the ctor that takes a {@link ContextParser} + */ + @Deprecated + public PipelineAggregationSpec(String name, + Writeable.Reader builderReader, + Writeable.Reader aggregatorReader, + PipelineAggregator.Parser parser) { + super(name, builderReader, (p, n) -> parser.parse(n, p)); + this.aggregatorReader = aggregatorReader; + } + /** * Add a reader for the shard level results of the aggregation with {@linkplain #getName()}'s {@link ParseField#getPreferredName()} * as the {@link NamedWriteable#getWriteableName()}. diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 5af7ccd6709fe..a9173a3f79fa3 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -504,7 +504,7 @@ private void registerPipelineAggregations(List plugins) { BucketScriptPipelineAggregationBuilder.NAME, BucketScriptPipelineAggregationBuilder::new, BucketScriptPipelineAggregator::new, - (name, p) -> BucketScriptPipelineAggregationBuilder.PARSER.parse(p, name))); + BucketScriptPipelineAggregationBuilder.PARSER)); registerPipelineAggregation(new PipelineAggregationSpec( BucketSelectorPipelineAggregationBuilder.NAME, BucketSelectorPipelineAggregationBuilder::new, @@ -524,16 +524,14 @@ private void registerPipelineAggregations(List plugins) { MovFnPipelineAggregationBuilder.NAME, MovFnPipelineAggregationBuilder::new, MovFnPipelineAggregator::new, - (name, p) -> MovFnPipelineAggregationBuilder.PARSER.parse(p, name))); + MovFnPipelineAggregationBuilder.PARSER)); registerFromPlugin(plugins, SearchPlugin::getPipelineAggregations, this::registerPipelineAggregation); } private void registerPipelineAggregation(PipelineAggregationSpec spec) { - namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> { - String name = (String) c; - return spec.getParser().parse(name, p); - })); + namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), + (p, c) -> spec.getParser().parse(p, (String) c))); namedWriteables.add( new NamedWriteableRegistry.Entry(PipelineAggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader())); namedWriteables.add( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregationBuilder.java index 81d1b0445b3e2..45a755d07b286 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregationBuilder.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -127,10 +128,23 @@ protected SignificantTermsAggregationBuilder(SignificantTermsAggregationBuilder } @Override - protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map metaData) { + protected SignificantTermsAggregationBuilder shallowCopy(Builder factoriesBuilder, Map metaData) { return new SignificantTermsAggregationBuilder(this, factoriesBuilder, metaData); } + @Override + protected AggregationBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { + if (filterBuilder != null) { + QueryBuilder rewrittenFilter = filterBuilder.rewrite(queryShardContext); + if (rewrittenFilter != filterBuilder) { + SignificantTermsAggregationBuilder rewritten = shallowCopy(factoriesBuilder, metaData); + rewritten.backgroundFilter(rewrittenFilter); + return rewritten; + } + } + return super.doRewrite(queryShardContext); + } + @Override protected void innerWriteTo(StreamOutput out) throws IOException { bucketCountThresholds.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java index 851304a4c63ce..ca6b13ac5a421 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java @@ -99,11 +99,17 @@ private static double[] validatePercentiles(double[] percents, String aggName) { throw new IllegalArgumentException("[percents] must not be empty: [" + aggName + "]"); } double[] sortedPercents = Arrays.copyOf(percents, percents.length); + double previousPercent = -1.0; Arrays.sort(sortedPercents); for (double percent : sortedPercents) { if (percent < 0.0 || percent > 100.0) { throw new IllegalArgumentException("percent must be in [0,100], got [" + percent + "]: [" + aggName + "]"); } + + if (percent == previousPercent) { + throw new IllegalArgumentException("percent [" + percent + "] has been specified twice: [" + aggName + "]"); + } + previousPercent = percent; } return sortedPercents; } diff --git a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java index cd3983781d0f8..5ce6de7998801 100644 --- a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java @@ -53,7 +53,7 @@ public class ProxyConnectionStrategy extends RemoteConnectionStrategy { /** * The remote address for the proxy. The connections will be opened to the configured address. */ - public static final Setting.AffixSetting REMOTE_CLUSTER_ADDRESSES = Setting.affixKeySetting( + public static final Setting.AffixSetting PROXY_ADDRESS = Setting.affixKeySetting( "cluster.remote.", "proxy_address", (ns, key) -> Setting.simpleString(key, new StrategyValidator<>(ns, key, ConnectionStrategy.PROXY, s -> { @@ -99,7 +99,7 @@ public class ProxyConnectionStrategy extends RemoteConnectionStrategy { transportService, connectionManager, REMOTE_SOCKET_CONNECTIONS.getConcreteSettingForNamespace(clusterAlias).get(settings), - REMOTE_CLUSTER_ADDRESSES.getConcreteSettingForNamespace(clusterAlias).get(settings), + PROXY_ADDRESS.getConcreteSettingForNamespace(clusterAlias).get(settings), SERVER_NAME.getConcreteSettingForNamespace(clusterAlias).get(settings)); } @@ -141,7 +141,7 @@ public class ProxyConnectionStrategy extends RemoteConnectionStrategy { } static Stream> enablementSettings() { - return Stream.of(ProxyConnectionStrategy.REMOTE_CLUSTER_ADDRESSES); + return Stream.of(ProxyConnectionStrategy.PROXY_ADDRESS); } static Writeable.Reader infoReader() { @@ -155,7 +155,7 @@ protected boolean shouldOpenMoreConnections() { @Override protected boolean strategyMustBeRebuilt(Settings newSettings) { - String address = REMOTE_CLUSTER_ADDRESSES.getConcreteSettingForNamespace(clusterAlias).get(newSettings); + String address = PROXY_ADDRESS.getConcreteSettingForNamespace(clusterAlias).get(newSettings); int numOfSockets = REMOTE_SOCKET_CONNECTIONS.getConcreteSettingForNamespace(clusterAlias).get(newSettings); return numOfSockets != maxNumConnections || configuredAddress.equals(address) == false; } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index 08c819c70a47c..258910bb55716 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -109,7 +109,7 @@ public void listenForUpdates(ClusterSettings clusterSettings) { SniffConnectionStrategy.REMOTE_CLUSTERS_PROXY, SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS, SniffConnectionStrategy.REMOTE_NODE_CONNECTIONS, - ProxyConnectionStrategy.REMOTE_CLUSTER_ADDRESSES, + ProxyConnectionStrategy.PROXY_ADDRESS, ProxyConnectionStrategy.REMOTE_SOCKET_CONNECTIONS, ProxyConnectionStrategy.SERVER_NAME); clusterSettings.addAffixGroupUpdateConsumer(remoteClusterSettings, this::validateAndUpdateRemoteCluster); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index 779d4deacd45a..1d6b6a12af0c0 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -40,8 +40,10 @@ import java.io.Closeable; import java.io.IOException; +import java.util.Arrays; import java.util.Collection; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -95,24 +97,24 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl Setting.affixKeySetting( "cluster.remote.", "skip_unavailable", - key -> boolSetting( + (ns, key) -> boolSetting( key, false, + new RemoteConnectionEnabled<>(ns, key), Setting.Property.Dynamic, - Setting.Property.NodeScope), - () -> SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS); + Setting.Property.NodeScope)); public static final Setting.AffixSetting REMOTE_CLUSTER_PING_SCHEDULE = Setting.affixKeySetting( "cluster.remote.", "transport.ping_schedule", - key -> timeSetting(key, TransportSettings.PING_SCHEDULE, Setting.Property.Dynamic, Setting.Property.NodeScope), - () -> SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS); + (ns, key) -> timeSetting(key, TransportSettings.PING_SCHEDULE, new RemoteConnectionEnabled<>(ns, key), + Setting.Property.Dynamic, Setting.Property.NodeScope)); public static final Setting.AffixSetting REMOTE_CLUSTER_COMPRESS = Setting.affixKeySetting( "cluster.remote.", "transport.compress", - key -> boolSetting(key, TransportSettings.TRANSPORT_COMPRESS, Setting.Property.Dynamic, Setting.Property.NodeScope), - () -> SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS); + (ns, key) -> boolSetting(key, TransportSettings.TRANSPORT_COMPRESS, + new RemoteConnectionEnabled<>(ns, key), Setting.Property.Dynamic, Setting.Property.NodeScope)); private final TransportService transportService; private final Map remoteClusters = ConcurrentCollections.newConcurrentMap(); @@ -386,4 +388,38 @@ public Client getRemoteClusterClient(ThreadPool threadPool, String clusterAlias) Collection getConnections() { return remoteClusters.values(); } + + private static class RemoteConnectionEnabled implements Setting.Validator { + + private final String clusterAlias; + private final String key; + + private RemoteConnectionEnabled(String clusterAlias, String key) { + this.clusterAlias = clusterAlias; + this.key = key; + } + + @Override + public void validate(T value) { + } + + @Override + public void validate(T value, Map, Object> settings, boolean isPresent) { + if (isPresent && RemoteConnectionStrategy.isConnectionEnabled(clusterAlias, settings) == false) { + throw new IllegalArgumentException("Cannot configure setting [" + key + "] if remote cluster is not enabled."); + } + } + + @Override + public Iterator> settings() { + return Stream.concat(Stream.of(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias)), + settingsStream()).iterator(); + } + + private Stream> settingsStream() { + return Arrays.stream(RemoteConnectionStrategy.ConnectionStrategy.values()) + .flatMap(strategy -> strategy.getEnablementSettings().get()) + .map(as -> as.getConcreteSettingForNamespace(clusterAlias)); + } + }; } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java index 152cafccb61e0..3c421c79d9db3 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java @@ -51,8 +51,7 @@ public RemoteConnectionInfo(String clusterAlias, ModeInfo modeInfo, TimeValue in } public RemoteConnectionInfo(StreamInput input) throws IOException { - // TODO: Change to 7.6 after backport - if (input.getVersion().onOrAfter(Version.V_8_0_0)) { + if (input.getVersion().onOrAfter(Version.V_7_6_0)) { RemoteConnectionStrategy.ConnectionStrategy mode = input.readEnum(RemoteConnectionStrategy.ConnectionStrategy.class); modeInfo = mode.getReader().read(input); initialConnectionTimeout = input.readTimeValue(); @@ -91,8 +90,7 @@ public boolean isSkipUnavailable() { @Override public void writeTo(StreamOutput out) throws IOException { - // TODO: Change to 7.6 after backport - if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + if (out.getVersion().onOrAfter(Version.V_7_6_0)) { out.writeEnum(modeInfo.modeType()); modeInfo.writeTo(out); out.writeTimeValue(initialConnectionTimeout); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionStrategy.java index d198224d0de33..ad6b80ba49b9e 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionStrategy.java @@ -88,6 +88,10 @@ public int getNumberOfChannels() { return numberOfChannels; } + public Supplier>> getEnablementSettings() { + return enablementSettings; + } + public Writeable.Reader getReader() { return reader.get(); } @@ -149,7 +153,7 @@ static RemoteConnectionStrategy buildStrategy(String clusterAlias, TransportServ static Set getRemoteClusters(Settings settings) { final Stream> enablementSettings = Arrays.stream(ConnectionStrategy.values()) - .flatMap(strategy -> strategy.enablementSettings.get()); + .flatMap(strategy -> strategy.getEnablementSettings().get()); return enablementSettings.flatMap(s -> getClusterAlias(settings, s)).collect(Collectors.toSet()); } @@ -159,7 +163,21 @@ public static boolean isConnectionEnabled(String clusterAlias, Settings settings List seeds = SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS.getConcreteSettingForNamespace(clusterAlias).get(settings); return seeds.isEmpty() == false; } else { - String address = ProxyConnectionStrategy.REMOTE_CLUSTER_ADDRESSES.getConcreteSettingForNamespace(clusterAlias).get(settings); + String address = ProxyConnectionStrategy.PROXY_ADDRESS.getConcreteSettingForNamespace(clusterAlias).get(settings); + return Strings.isEmpty(address) == false; + } + } + + @SuppressWarnings("unchecked") + public static boolean isConnectionEnabled(String clusterAlias, Map, Object> settings) { + ConnectionStrategy mode = (ConnectionStrategy) settings.get(REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias)); + if (mode.equals(ConnectionStrategy.SNIFF)) { + List seeds = (List) settings.get(SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS + .getConcreteSettingForNamespace(clusterAlias)); + return seeds.isEmpty() == false; + } else { + String address = (String) settings.get(ProxyConnectionStrategy.PROXY_ADDRESS + .getConcreteSettingForNamespace(clusterAlias)); return Strings.isEmpty(address) == false; } } diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index 212ae30da6faf..0f3ffc2448ef4 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -45,8 +45,8 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -308,7 +308,7 @@ protected TokenStreamComponents createComponents(String fieldName) { } PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new StandardAnalyzer(CharArraySet.EMPTY_SET), mapping); - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriterConfig conf = new IndexWriterConfig(wrapper); conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java index f7fe59e501b33..45c57a0cdce84 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java @@ -69,7 +69,7 @@ public void testSize() { ShardId shardId = new ShardId(index, 1); list.add(new PlainShardIterator(shardId, randomShardRoutings(shardId, 0))); } - GroupShardsIterator iter = new GroupShardsIterator<>(list); + GroupShardsIterator iter = new GroupShardsIterator<>(list); assertEquals(7, iter.totalSizeWith1ForEmpty()); assertEquals(5, iter.size()); assertEquals(6, iter.totalSize()); @@ -106,13 +106,24 @@ public void testIterate() { } Collections.shuffle(list, random()); - List actualIterators = new ArrayList<>(); - GroupShardsIterator iter = new GroupShardsIterator<>(list); - for (ShardIterator shardsIterator : iter) { - actualIterators.add(shardsIterator); + { + GroupShardsIterator unsorted = new GroupShardsIterator<>(list); + GroupShardsIterator iter = new GroupShardsIterator<>(list); + List actualIterators = new ArrayList<>(); + for (ShardIterator shardsIterator : iter) { + actualIterators.add(shardsIterator); + } + assertEquals(actualIterators, list); + } + { + GroupShardsIterator iter = GroupShardsIterator.sortAndCreate(list); + List actualIterators = new ArrayList<>(); + for (ShardIterator shardsIterator : iter) { + actualIterators.add(shardsIterator); + } + CollectionUtil.timSort(actualIterators); + assertEquals(actualIterators, list); } - CollectionUtil.timSort(actualIterators); - assertEquals(actualIterators, list); } public void testOrderingWithSearchShardIterators() { @@ -123,7 +134,7 @@ public void testOrderingWithSearchShardIterators() { String[] clusters = generateRandomStringArray(5, 10, false, false); Arrays.sort(clusters); - List expected = new ArrayList<>(); + List sorted = new ArrayList<>(); int numShards = randomIntBetween(1, 10); for (int i = 0; i < numShards; i++) { for (String index : indices) { @@ -131,23 +142,33 @@ public void testOrderingWithSearchShardIterators() { ShardId shardId = new ShardId(index, uuid, i); SearchShardIterator shardIterator = new SearchShardIterator(null, shardId, GroupShardsIteratorTests.randomShardRoutings(shardId), OriginalIndicesTests.randomOriginalIndices()); - expected.add(shardIterator); + sorted.add(shardIterator); for (String cluster : clusters) { SearchShardIterator remoteIterator = new SearchShardIterator(cluster, shardId, GroupShardsIteratorTests.randomShardRoutings(shardId), OriginalIndicesTests.randomOriginalIndices()); - expected.add(remoteIterator); + sorted.add(remoteIterator); } } } } - List shuffled = new ArrayList<>(expected); + List shuffled = new ArrayList<>(sorted); Collections.shuffle(shuffled, random()); - List actualIterators = new ArrayList<>(); - GroupShardsIterator iter = new GroupShardsIterator<>(shuffled); - for (SearchShardIterator searchShardIterator : iter) { - actualIterators.add(searchShardIterator); + { + List actualIterators = new ArrayList<>(); + GroupShardsIterator iter = new GroupShardsIterator<>(shuffled); + for (SearchShardIterator searchShardIterator : iter) { + actualIterators.add(searchShardIterator); + } + assertEquals(shuffled, actualIterators); + } + { + List actualIterators = new ArrayList<>(); + GroupShardsIterator iter = GroupShardsIterator.sortAndCreate(shuffled); + for (SearchShardIterator searchShardIterator : iter) { + actualIterators.add(searchShardIterator); + } + assertEquals(sorted, actualIterators); } - assertEquals(expected, actualIterators); } } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java index f0d4c88e01c19..5803ab8f2b99a 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java @@ -28,7 +28,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.store.ByteBuffersDirectory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; @@ -36,7 +36,7 @@ public class MultiPhrasePrefixQueryTests extends ESTestCase { public void testSimple() throws Exception { - IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); + IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document doc = new Document(); doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED)); writer.addDocument(doc); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java index 337c61243baa3..09e8525c6e9bd 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java @@ -27,8 +27,8 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.test.ESTestCase; @@ -38,7 +38,7 @@ public class MoreLikeThisQueryTests extends ESTestCase { public void testSimple() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); indexWriter.commit(); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java b/server/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java index 74de6b77f77cd..b6a80dbf722ea 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java @@ -19,10 +19,11 @@ package org.elasticsearch.common.lucene.store; +import org.apache.lucene.store.ByteBuffersDirectory; +import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -32,7 +33,7 @@ public class InputStreamIndexInputTests extends ESTestCase { public void testSingleReadSingleByteLimit() throws IOException { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); @@ -68,7 +69,7 @@ public void testSingleReadSingleByteLimit() throws IOException { } public void testReadMultiSingleByteLimit1() throws IOException { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); @@ -106,7 +107,7 @@ public void testReadMultiSingleByteLimit1() throws IOException { } public void testSingleReadTwoBytesLimit() throws IOException { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); @@ -147,7 +148,7 @@ public void testSingleReadTwoBytesLimit() throws IOException { } public void testReadMultiTwoBytesLimit1() throws IOException { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); @@ -190,7 +191,7 @@ public void testReadMultiTwoBytesLimit1() throws IOException { } public void testReadMultiFourBytesLimit() throws IOException { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); @@ -228,7 +229,7 @@ public void testReadMultiFourBytesLimit() throws IOException { } public void testMarkRest() throws Exception { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 6b117fc7e96dc..b6f11ff77eaed 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -42,8 +42,8 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; @@ -55,7 +55,7 @@ public class SimpleLuceneTests extends ESTestCase { public void testSortValues() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); for (int i = 0; i < 10; i++) { Document document = new Document(); @@ -74,7 +74,7 @@ public void testSortValues() throws Exception { } public void testSimpleNumericOps() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); @@ -104,7 +104,7 @@ public void testSimpleNumericOps() throws Exception { * first (with load and break). */ public void testOrdering() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); @@ -132,7 +132,7 @@ public Status needsField(FieldInfo fieldInfo) throws IOException { } public void testNRTSearchOnClosedWriter() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); DirectoryReader reader = DirectoryReader.open(indexWriter); diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 7d01b3992fcbd..eb8cedeba8290 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -35,8 +35,8 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.vectorhighlight.CustomFieldQuery; import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; @@ -46,7 +46,7 @@ public class VectorHighlighterTests extends ESTestCase { public void testVectorHighlighter() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); @@ -72,7 +72,7 @@ public void testVectorHighlighter() throws Exception { } public void testVectorHighlighterPrefixQuery() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); @@ -113,7 +113,7 @@ public void testVectorHighlighterPrefixQuery() throws Exception { } public void testVectorHighlighterNoStore() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); @@ -138,7 +138,7 @@ public void testVectorHighlighterNoStore() throws Exception { } public void testVectorHighlighterNoTermVector() throws Exception { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); Document document = new Document(); diff --git a/server/src/test/java/org/elasticsearch/index/HiddenIndexIT.java b/server/src/test/java/org/elasticsearch/index/HiddenIndexIT.java index ca4fa588b144a..56637cd3735cc 100644 --- a/server/src/test/java/org/elasticsearch/index/HiddenIndexIT.java +++ b/server/src/test/java/org/elasticsearch/index/HiddenIndexIT.java @@ -77,6 +77,15 @@ public void testHiddenIndexSearch() { .get(); matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> ".hidden-index".equals(hit.getIndex())); assertTrue(matchedHidden); + + // make index not hidden + assertAcked(client().admin().indices().prepareUpdateSettings("hidden-index") + .setSettings(Settings.builder().put("index.hidden", false).build()) + .get()); + searchResponse = + client().prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get(); + matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex())); + assertTrue(matchedHidden); } public void testGlobalTemplatesDoNotApply() { diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 2c75437ee352e..8789cc8935d67 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -33,8 +33,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.BitSetProducer; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; @@ -68,7 +68,7 @@ private static int matchCount(BitSetProducer producer, IndexReader reader) throw public void testInvalidateEntries() throws Exception { IndexWriter writer = new IndexWriter( - new RAMDirectory(), + new ByteBuffersDirectory(), new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy()) ); Document document = new Document(); @@ -128,7 +128,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { public void testListener() throws IOException { IndexWriter writer = new IndexWriter( - new RAMDirectory(), + new ByteBuffersDirectory(), new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy()) ); Document document = new Document(); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 5d998865886a2..4fa17f633d470 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -4506,8 +4506,8 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog) throws I engine.syncTranslog(); } if (frequently()) { - final long lastSyncedGlobalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); engine.flush(randomBoolean(), true); + final long lastSyncedGlobalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); final List commits = DirectoryReader.listCommits(store.directory()); // Keep only one safe commit as the oldest commit. final IndexCommit safeCommit = commits.get(0); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 3b4bf428adf08..d20bf83d7d0f2 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -30,7 +30,7 @@ import org.apache.lucene.index.LogByteSizeMergePolicy; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; -import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.store.ByteBuffersDirectory; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; @@ -136,7 +136,7 @@ public void setup() throws Exception { indicesFieldDataCache = getInstanceFromNode(IndicesService.class).getIndicesFieldDataCache(); // LogByteSizeMP to preserve doc ID order writer = new IndexWriter( - new RAMDirectory(), new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy()) + new ByteBuffersDirectory(), new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy()) ); shardContext = indexService.newQueryShardContext(0, null, () -> 0, null); } diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index bfa7cf9a9e1f1..8cece535a6841 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -28,7 +28,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; @@ -110,7 +110,7 @@ public void testClearField() throws Exception { final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); final MappedFieldType mapper1 = new TextFieldMapper.Builder("field_1").fielddata(true).build(ctx).fieldType(); final MappedFieldType mapper2 = new TextFieldMapper.Builder("field_2").fielddata(true).build(ctx).fieldType(); - final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); + final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("field_1", "thisisastring", Store.NO)); doc.add(new StringField("field_2", "thisisanotherstring", Store.NO)); @@ -169,7 +169,7 @@ public void testFieldDataCacheListener() throws Exception { final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); final MappedFieldType mapper1 = new TextFieldMapper.Builder("s").fielddata(true).build(ctx).fieldType(); - final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); + final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("s", "thisisastring", Store.NO)); writer.addDocument(doc); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index 1fd3f51ece916..37d586e0ea41a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -27,8 +27,8 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -80,7 +80,7 @@ public void testDefaults() throws IOException { .endObject()), XContentType.JSON)); - try (Directory dir = new RAMDirectory(); + try (Directory dir = new ByteBuffersDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())))) { w.addDocuments(doc.docs()); try (DirectoryReader reader = DirectoryReader.open(w)) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 011d5ae3ef6c1..82302c0493669 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper.Resolution; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.query.DateRangeIncludingNowQuery; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.joda.time.DateTimeZone; @@ -269,6 +270,15 @@ BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry assertEquals(expected, ft.rangeQuery(date1, date2, true, true, null, null, null, context).rewrite(new MultiReader())); + instant1 = nowInMillis; + instant2 = instant1 + 100; + expected = new DateRangeIncludingNowQuery(new IndexOrDocValuesQuery( + LongPoint.newRangeQuery("field", instant1, instant2), + SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2) + )); + assertEquals(expected, + ft.rangeQuery("now", instant2, true, true, null, null, null, context)); + ft.setIndexOptions(IndexOptions.NONE); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ft.rangeQuery(date1, date2, true, true, null, null, null, context)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 3e506e5f470b6..6ac75999c0b4e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -23,7 +23,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.store.ByteBuffersDirectory; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -41,7 +41,7 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase { public void testBytesAndNumericRepresentation() throws Exception { - IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); + IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); String mapping = Strings .toString(XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 822035d9767fb..cd07023e6b31c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -515,7 +515,7 @@ public void testFielddata() throws IOException { FieldMapper fieldMapper = (FieldMapper) disabledMapper.mappers().getMapper("field"); fieldMapper.fieldType().fielddataBuilder("test"); }); - assertThat(e.getMessage(), containsString("Fielddata is disabled")); + assertThat(e.getMessage(), containsString("Text fields are not optimised for operations that require per-document field data")); mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 70826f7aab1e8..3e859035b51b2 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -85,7 +86,7 @@ protected void doAssertLuceneQuery(BoolQueryBuilder queryBuilder, Query query, Q if (clauses.isEmpty()) { assertThat(query, instanceOf(MatchAllDocsQuery.class)); - } else { + } else if (query instanceof MatchNoDocsQuery == false) { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; if (queryBuilder.adjustPureNegative()) { @@ -113,7 +114,7 @@ private static List getBooleanClauses(List queryBui BooleanClause.Occur occur, QueryShardContext context) throws IOException { List clauses = new ArrayList<>(); for (QueryBuilder query : queryBuilders) { - Query innerQuery = query.toQuery(context); + Query innerQuery = query.rewrite(context).toQuery(context); if (innerQuery != null) { clauses.add(new BooleanClause(innerQuery, occur)); } @@ -195,15 +196,15 @@ public void testMinShouldMatchFilterWithoutShouldClauses() throws Exception { public void testMinShouldMatchBiggerThanNumberOfShouldClauses() throws Exception { BooleanQuery bq = (BooleanQuery) parseQuery( boolQuery() - .should(termQuery("foo", "bar")) - .should(termQuery("foo2", "bar2")) + .should(termQuery(STRING_FIELD_NAME, "bar")) + .should(termQuery(STRING_FIELD_NAME_2, "bar2")) .minimumShouldMatch("3")).toQuery(createShardContext()); assertEquals(3, bq.getMinimumNumberShouldMatch()); bq = (BooleanQuery) parseQuery( boolQuery() - .should(termQuery("foo", "bar")) - .should(termQuery("foo2", "bar2")) + .should(termQuery(STRING_FIELD_NAME, "bar")) + .should(termQuery(STRING_FIELD_NAME_2, "bar2")) .minimumShouldMatch(3)).toQuery(createShardContext()); assertEquals(3, bq.getMinimumNumberShouldMatch()); } @@ -211,8 +212,8 @@ public void testMinShouldMatchBiggerThanNumberOfShouldClauses() throws Exception public void testMinShouldMatchDisableCoord() throws Exception { BooleanQuery bq = (BooleanQuery) parseQuery( boolQuery() - .should(termQuery("foo", "bar")) - .should(termQuery("foo2", "bar2")) + .should(termQuery(STRING_FIELD_NAME, "bar")) + .should(termQuery(STRING_FIELD_NAME, "bar2")) .minimumShouldMatch("3")).toQuery(createShardContext()); assertEquals(3, bq.getMinimumNumberShouldMatch()); } @@ -291,22 +292,22 @@ public void testRewrite() throws IOException { boolean mustRewrite = false; if (randomBoolean()) { mustRewrite = true; - boolQueryBuilder.must(new WrapperQueryBuilder(new TermsQueryBuilder("foo", "must").toString())); + boolQueryBuilder.must(new WrapperQueryBuilder(new TermsQueryBuilder(STRING_FIELD_NAME, "must").toString())); } if (randomBoolean()) { mustRewrite = true; - boolQueryBuilder.should(new WrapperQueryBuilder(new TermsQueryBuilder("foo", "should").toString())); + boolQueryBuilder.should(new WrapperQueryBuilder(new TermsQueryBuilder(STRING_FIELD_NAME, "should").toString())); } if (randomBoolean()) { mustRewrite = true; - boolQueryBuilder.filter(new WrapperQueryBuilder(new TermsQueryBuilder("foo", "filter").toString())); + boolQueryBuilder.filter(new WrapperQueryBuilder(new TermsQueryBuilder(STRING_FIELD_NAME, "filter").toString())); } if (randomBoolean()) { mustRewrite = true; - boolQueryBuilder.mustNot(new WrapperQueryBuilder(new TermsQueryBuilder("foo", "must_not").toString())); + boolQueryBuilder.mustNot(new WrapperQueryBuilder(new TermsQueryBuilder(STRING_FIELD_NAME, "must_not").toString())); } if (mustRewrite == false && randomBoolean()) { - boolQueryBuilder.must(new TermsQueryBuilder("foo", "no_rewrite")); + boolQueryBuilder.must(new TermsQueryBuilder(STRING_FIELD_NAME, "no_rewrite")); } QueryBuilder rewritten = boolQueryBuilder.rewrite(createShardContext()); if (mustRewrite == false && boolQueryBuilder.must().isEmpty()) { @@ -317,16 +318,16 @@ public void testRewrite() throws IOException { if (mustRewrite) { assertNotSame(rewrite, boolQueryBuilder); if (boolQueryBuilder.must().isEmpty() == false) { - assertEquals(new TermsQueryBuilder("foo", "must"), rewrite.must().get(0)); + assertEquals(new TermsQueryBuilder(STRING_FIELD_NAME, "must"), rewrite.must().get(0)); } if (boolQueryBuilder.should().isEmpty() == false) { - assertEquals(new TermsQueryBuilder("foo", "should"), rewrite.should().get(0)); + assertEquals(new TermsQueryBuilder(STRING_FIELD_NAME, "should"), rewrite.should().get(0)); } if (boolQueryBuilder.mustNot().isEmpty() == false) { - assertEquals(new TermsQueryBuilder("foo", "must_not"), rewrite.mustNot().get(0)); + assertEquals(new TermsQueryBuilder(STRING_FIELD_NAME, "must_not"), rewrite.mustNot().get(0)); } if (boolQueryBuilder.filter().isEmpty() == false) { - assertEquals(new TermsQueryBuilder("foo", "filter"), rewrite.filter().get(0)); + assertEquals(new TermsQueryBuilder(STRING_FIELD_NAME, "filter"), rewrite.filter().get(0)); } } else { assertSame(rewrite, boolQueryBuilder); @@ -359,14 +360,14 @@ public void testRewriteWithMatchNone() throws IOException { assertEquals(new MatchNoneQueryBuilder(), rewritten); boolQueryBuilder = new BoolQueryBuilder(); - boolQueryBuilder.must(new TermQueryBuilder("foo","bar")); + boolQueryBuilder.must(new TermQueryBuilder(STRING_FIELD_NAME,"bar")); boolQueryBuilder.filter(new WrapperQueryBuilder(new WrapperQueryBuilder(new MatchNoneQueryBuilder().toString()).toString())); rewritten = boolQueryBuilder.rewrite(createShardContext()); assertEquals(new MatchNoneQueryBuilder(), rewritten); boolQueryBuilder = new BoolQueryBuilder(); - boolQueryBuilder.must(new TermQueryBuilder("foo","bar")); - boolQueryBuilder.filter(new BoolQueryBuilder().should(new TermQueryBuilder("foo","bar")) + boolQueryBuilder.must(new TermQueryBuilder(STRING_FIELD_NAME,"bar")); + boolQueryBuilder.filter(new BoolQueryBuilder().should(new TermQueryBuilder(STRING_FIELD_NAME,"bar")) .filter(new MatchNoneQueryBuilder())); rewritten = Rewriteable.rewrite(boolQueryBuilder, createShardContext()); assertEquals(new MatchNoneQueryBuilder(), rewritten); @@ -377,7 +378,7 @@ public void testRewriteWithMatchNone() throws IOException { assertEquals(new MatchNoneQueryBuilder(), rewritten); boolQueryBuilder = new BoolQueryBuilder(); - boolQueryBuilder.should(new TermQueryBuilder("foo", "bar")); + boolQueryBuilder.should(new TermQueryBuilder(STRING_FIELD_NAME, "bar")); boolQueryBuilder.should(new WrapperQueryBuilder(new MatchNoneQueryBuilder().toString())); rewritten = Rewriteable.rewrite(boolQueryBuilder, createShardContext()); assertNotEquals(new MatchNoneQueryBuilder(), rewritten); @@ -386,4 +387,16 @@ public void testRewriteWithMatchNone() throws IOException { rewritten = Rewriteable.rewrite(boolQueryBuilder, createShardContext()); assertNotEquals(new MatchNoneQueryBuilder(), rewritten); } + + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + TermQueryBuilder termQuery = new TermQueryBuilder("unmapped_field", 42); + BoolQueryBuilder boolQuery = new BoolQueryBuilder(); + boolQuery.must(termQuery); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> boolQuery.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java index 534126ee5f35a..22d4db861be82 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java @@ -40,8 +40,8 @@ protected BoostingQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(BoostingQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - Query positive = queryBuilder.positiveQuery().toQuery(context); - Query negative = queryBuilder.negativeQuery().toQuery(context); + Query positive = queryBuilder.positiveQuery().rewrite(context).toQuery(context); + Query negative = queryBuilder.negativeQuery().rewrite(context).toQuery(context); if (positive == null || negative == null) { assertThat(query, nullValue()); } else { @@ -103,4 +103,22 @@ public void testRewrite() throws IOException { assertEquals(new BoostingQueryBuilder(positive.rewrite(createShardContext()), negative.rewrite(createShardContext())), rewrite); } } + + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + + BoostingQueryBuilder queryBuilder1 = new BoostingQueryBuilder( + new TermQueryBuilder("unmapped_field", "foo"), new MatchNoneQueryBuilder()); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> queryBuilder1.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + + BoostingQueryBuilder queryBuilder2 = new BoostingQueryBuilder( + new MatchAllQueryBuilder(), new TermQueryBuilder("unmapped_field", "foo")); + e = expectThrows(IllegalStateException.class, + () -> queryBuilder2.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java index fd2ad04af5f95..5281788a8307d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.AbstractQueryTestCase; @@ -41,9 +42,11 @@ protected ConstantScoreQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(ConstantScoreQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - Query innerQuery = queryBuilder.innerQuery().toQuery(context); + Query innerQuery = queryBuilder.innerQuery().rewrite(context).toQuery(context); if (innerQuery == null) { assertThat(query, nullValue()); + } else if (innerQuery instanceof MatchNoDocsQuery) { + assertThat(query, instanceOf(MatchNoDocsQuery.class)); } else { assertThat(query, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; @@ -107,4 +110,14 @@ public void testRewriteToMatchNone() throws IOException { QueryBuilder rewrite = constantScoreQueryBuilder.rewrite(createShardContext()); assertEquals(rewrite, new MatchNoneQueryBuilder()); } + + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + ConstantScoreQueryBuilder queryBuilder = new ConstantScoreQueryBuilder(new TermQueryBuilder("unmapped_field", "foo")); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> queryBuilder.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java index 06f197f4c137e..7185210caadb8 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -99,4 +99,14 @@ protected QueryBuilder parseQuery(XContentParser parser) throws IOException { assertThat(query, instanceOf(IdsQueryBuilder.class)); return (IdsQueryBuilder) query; } + + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContextWithNoType(); + context.setAllowUnmappedFields(true); + IdsQueryBuilder queryBuilder = createTestQueryBuilder(); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> queryBuilder.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index c7f703113f264..ab6601b507d1b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -57,8 +57,6 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase { - boolean requiresRewrite = false; - @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping( @@ -79,10 +77,6 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected NestedQueryBuilder doCreateTestQueryBuilder() { QueryBuilder innerQueryBuilder = RandomQueryBuilder.createQuery(random()); - if (randomBoolean()) { - requiresRewrite = true; - innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString()); - } NestedQueryBuilder nqb = new NestedQueryBuilder("nested1", innerQueryBuilder, RandomPicks.randomFrom(random(), ScoreMode.values())); nqb.ignoreUnmapped(randomBoolean()); @@ -185,13 +179,14 @@ public void testFromJson() throws IOException { @Override public void testMustRewrite() throws IOException { - try { - super.testMustRewrite(); - } catch (UnsupportedOperationException e) { - if (requiresRewrite == false) { - throw e; - } - } + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + TermQueryBuilder innerQueryBuilder = new TermQueryBuilder("nested1.unmapped_field", "foo"); + NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("nested1", innerQueryBuilder, + RandomPicks.randomFrom(random(), ScoreMode.values())); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> nestedQueryBuilder.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); } public void testIgnoreUnmapped() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java index dba92d712c107..94596ffd6c58d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java @@ -20,11 +20,13 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.AbstractQueryTestCase; +import org.hamcrest.Matchers; import java.io.IOException; import java.util.HashMap; @@ -68,12 +70,14 @@ private static PrefixQueryBuilder randomPrefixQuery() { @Override protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - assertThat(query, instanceOf(PrefixQuery.class)); - PrefixQuery prefixQuery = (PrefixQuery) query; + assertThat(query, Matchers.anyOf(instanceOf(PrefixQuery.class), instanceOf(MatchNoDocsQuery.class))); + if (context.fieldMapper(queryBuilder.fieldName()) != null) { // The field is mapped + PrefixQuery prefixQuery = (PrefixQuery) query; - String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); - assertThat(prefixQuery.getPrefix().field(), equalTo(expectedFieldName)); - assertThat(prefixQuery.getPrefix().text(), equalTo(queryBuilder.value())); + String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); + assertThat(prefixQuery.getPrefix().field(), equalTo(expectedFieldName)); + assertThat(prefixQuery.getPrefix().text(), equalTo(queryBuilder.value())); + } } public void testIllegalArguments() { @@ -88,10 +92,10 @@ public void testIllegalArguments() { public void testBlendedRewriteMethod() throws IOException { String rewrite = "top_terms_blended_freqs_10"; - Query parsedQuery = parseQuery(prefixQuery("field", "val").rewrite(rewrite)).toQuery(createShardContext()); + Query parsedQuery = parseQuery(prefixQuery(STRING_FIELD_NAME, "val").rewrite(rewrite)).toQuery(createShardContext()); assertThat(parsedQuery, instanceOf(PrefixQuery.class)); PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; - assertThat(prefixQuery.getPrefix(), equalTo(new Term("field", "val"))); + assertThat(prefixQuery.getPrefix(), equalTo(new Term(STRING_FIELD_NAME, "val"))); assertThat(prefixQuery.getRewriteMethod(), instanceOf(MultiTermQuery.TopTermsBlendedFreqScoringRewrite.class)); } @@ -153,7 +157,16 @@ public void testRewriteIndexQueryToNotMatchNone() throws Exception { PrefixQueryBuilder query = prefixQuery("_index", getIndex().getName()); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); - assertThat(rewritten, instanceOf(PrefixQueryBuilder.class)); + assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class)); } + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + PrefixQueryBuilder queryBuilder = new PrefixQueryBuilder("unmapped_field", "foo"); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> queryBuilder.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index df1bf4910e76e..c9c43a1e90186 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -52,7 +52,6 @@ import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.sameInstance; @@ -242,16 +241,6 @@ public void testIllegalArguments() { expectThrows(IllegalArgumentException.class, () -> rangeQueryBuilder.format("badFormat")); } - /** - * Specifying a timezone together with an unmapped field should throw an exception. - */ - public void testToQueryUnmappedWithTimezone() throws QueryShardException { - RangeQueryBuilder query = new RangeQueryBuilder("bogus_field"); - query.from(1).to(10).timeZone("UTC"); - QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("[range] time_zone can not be applied")); - } - public void testToQueryNumericField() throws IOException { Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric @@ -343,6 +332,8 @@ public void testDateRangeQueryTimezone() throws IOException { "}"; QueryShardContext context = createShardContext(); Query parsedQuery = parseQuery(query).toQuery(context); + assertThat(parsedQuery, instanceOf(DateRangeIncludingNowQuery.class)); + parsedQuery = ((DateRangeIncludingNowQuery)parsedQuery).getQuery(); assertThat(parsedQuery, instanceOf(IndexOrDocValuesQuery.class)); parsedQuery = ((IndexOrDocValuesQuery) parsedQuery).getIndexQuery(); assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); @@ -565,35 +556,6 @@ public void testParseRelation() { assertEquals(ShapeRelation.INTERSECTS, builder.relation()); } - public void testConvertNowRangeToMatchAll() throws IOException { - RangeQueryBuilder query = new RangeQueryBuilder(DATE_FIELD_NAME); - DateTime queryFromValue = new DateTime(2019, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); - DateTime queryToValue = new DateTime(2020, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); - if (randomBoolean()) { - query.from("now"); - query.to(queryToValue); - } else if (randomBoolean()) { - query.from(queryFromValue); - query.to("now"); - } else { - query.from("now"); - query.to("now+1h"); - } - QueryShardContext queryShardContext = createShardContext(); - QueryBuilder rewritten = query.rewrite(queryShardContext); - assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); - - queryShardContext = new QueryShardContext(queryShardContext) { - - @Override - public boolean convertNowRangeToMatchAll() { - return true; - } - }; - rewritten = query.rewrite(queryShardContext); - assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class)); - } - public void testTypeField() throws IOException { RangeQueryBuilder builder = QueryBuilders.rangeQuery("_type") .from("value1"); diff --git a/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java index 04322a01d0f68..d3ff1434c4d2c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ScriptScoreQueryBuilderTests.java @@ -99,6 +99,19 @@ public void testCacheability() throws IOException { assertFalse("query should not be cacheable: " + queryBuilder.toString(), context.isCacheable()); } + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + TermQueryBuilder termQueryBuilder = new TermQueryBuilder("unmapped_field", "foo"); + String scriptStr = "1"; + Script script = new Script(ScriptType.INLINE, MockScriptEngine.NAME, scriptStr, Collections.emptyMap()); + ScriptScoreQueryBuilder scriptScoreQueryBuilder = new ScriptScoreQueryBuilder(termQueryBuilder, script); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> scriptScoreQueryBuilder.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } + public void testDisallowExpensiveQueries() { QueryShardContext queryShardContext = mock(QueryShardContext.class); when(queryShardContext.allowExpensiveQueries()).thenReturn(false); diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 52d2dfaad8634..232e9b20e25b9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -89,7 +89,8 @@ protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query if (query instanceof SpanMatchNoDocsQuery) { return; } - assertThat(query, either(instanceOf(SpanMultiTermQueryWrapper.class)).or(instanceOf(FieldMaskingSpanQuery.class))); + assertThat(query, either(instanceOf(SpanMultiTermQueryWrapper.class)) + .or(instanceOf(FieldMaskingSpanQuery.class))); if (query instanceof SpanMultiTermQueryWrapper) { SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; Query innerQuery = queryBuilder.innerQuery().toQuery(context); diff --git a/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java index 0bf6ddbc57438..376aa54ef15b3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java @@ -21,11 +21,11 @@ import com.fasterxml.jackson.core.io.JsonStringEncoder; import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -91,7 +91,7 @@ protected TermQueryBuilder createQueryBuilder(String fieldName, Object value) { @Override protected void doAssertLuceneQuery(TermQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class))); + assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)).or(instanceOf(MatchNoDocsQuery.class))); MappedFieldType mapper = context.fieldMapper(queryBuilder.fieldName()); if (query instanceof TermQuery) { TermQuery termQuery = (TermQuery) query; @@ -99,14 +99,12 @@ protected void doAssertLuceneQuery(TermQueryBuilder queryBuilder, Query query, Q String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); assertThat(termQuery.getTerm().field(), equalTo(expectedFieldName)); - if (mapper != null) { - Term term = ((TermQuery) mapper.termQuery(queryBuilder.value(), null)).getTerm(); - assertThat(termQuery.getTerm(), equalTo(term)); - } else { - assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); - } - } else { + Term term = ((TermQuery) mapper.termQuery(queryBuilder.value(), null)).getTerm(); + assertThat(termQuery.getTerm(), equalTo(term)); + } else if (mapper != null) { assertEquals(query, mapper.termQuery(queryBuilder.value(), null)); + } else { + assertThat(query, instanceOf(MatchNoDocsQuery.class)); } } @@ -185,6 +183,16 @@ public void testRewriteIndexQueryToNotMatchNone() throws IOException { TermQueryBuilder query = QueryBuilders.termQuery("_index", getIndex().getName()); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); - assertThat(rewritten, instanceOf(TermQueryBuilder.class)); - } + assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class)); + } + + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + TermQueryBuilder queryBuilder = new TermQueryBuilder("unmapped_field", "foo"); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> queryBuilder.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index b4a42e65e5f8a..c772c4d431670 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -108,16 +108,13 @@ private TermsLookup randomTermsLookup() { protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { if (queryBuilder.termsLookup() == null && (queryBuilder.values() == null || queryBuilder.values().isEmpty())) { assertThat(query, instanceOf(MatchNoDocsQuery.class)); - MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query; - assertThat(matchNoDocsQuery.toString(), containsString("No terms supplied for \"terms\" query.")); } else if (queryBuilder.termsLookup() != null && randomTerms.size() == 0){ assertThat(query, instanceOf(MatchNoDocsQuery.class)); - MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query; - assertThat(matchNoDocsQuery.toString(), containsString("No terms supplied for \"terms\" query.")); } else { assertThat(query, either(instanceOf(TermInSetQuery.class)) .or(instanceOf(PointInSetQuery.class)) - .or(instanceOf(ConstantScoreQuery.class))); + .or(instanceOf(ConstantScoreQuery.class)) + .or(instanceOf(MatchNoDocsQuery.class))); if (query instanceof ConstantScoreQuery) { assertThat(((ConstantScoreQuery) query).getQuery(), instanceOf(BooleanQuery.class)); } @@ -137,8 +134,13 @@ protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, } String fieldName = expectedFieldName(queryBuilder.fieldName()); - TermInSetQuery expected = new TermInSetQuery(fieldName, - terms.stream().filter(Objects::nonNull).map(Object::toString).map(BytesRef::new).collect(Collectors.toList())); + Query expected; + if (context.fieldMapper(fieldName) != null) { + expected = new TermInSetQuery(fieldName, + terms.stream().filter(Objects::nonNull).map(Object::toString).map(BytesRef::new).collect(Collectors.toList())); + } else { + expected = new MatchNoDocsQuery(); + } assertEquals(expected, query); } } @@ -263,8 +265,16 @@ public void testMustRewrite() throws IOException { UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> termsQueryBuilder.toQuery(createShardContext())); assertEquals("query must be rewritten first", e.getMessage()); - assertEquals(rewriteAndFetch(termsQueryBuilder, createShardContext()), new TermsQueryBuilder(STRING_FIELD_NAME, - randomTerms.stream().filter(x -> x != null).collect(Collectors.toList()))); // terms lookup removes null values + + // terms lookup removes null values + List nonNullTerms = randomTerms.stream().filter(x -> x != null).collect(Collectors.toList()); + QueryBuilder expected; + if (nonNullTerms.isEmpty()) { + expected = new MatchNoneQueryBuilder(); + } else { + expected = new TermsQueryBuilder(STRING_FIELD_NAME, nonNullTerms); + } + assertEquals(expected, rewriteAndFetch(termsQueryBuilder, createShardContext())); } public void testGeo() throws Exception { @@ -325,7 +335,7 @@ public void testRewriteIndexQueryToNotMatchNone() throws IOException { TermsQueryBuilder query = new TermsQueryBuilder("_index", "does_not_exist", getIndex().getName()); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); - assertThat(rewritten, instanceOf(TermsQueryBuilder.class)); + assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class)); } @Override @@ -334,4 +344,5 @@ protected QueryBuilder parseQuery(XContentParser parser) throws IOException { assertThat(query, CoreMatchers.instanceOf(TermsQueryBuilder.class)); return query; } + } diff --git a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index bf88ab9ee2da6..9482e0bb87d49 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -152,6 +152,16 @@ public void testRewriteIndexQueryNotMatchNone() throws IOException { WildcardQueryBuilder query = new WildcardQueryBuilder("_index", firstHalfOfIndexName +"*"); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); - assertThat(rewritten, instanceOf(WildcardQueryBuilder.class)); - } + assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class)); + } + + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + WildcardQueryBuilder queryBuilder = new WildcardQueryBuilder("unmapped_field", "foo"); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> queryBuilder.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java index d5ccff8402420..ad607a561fbce 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java @@ -119,7 +119,7 @@ public void testFromJson() throws IOException { @Override public void testMustRewrite() throws IOException { - TermQueryBuilder tqb = new TermQueryBuilder("foo", "bar"); + TermQueryBuilder tqb = new TermQueryBuilder(STRING_FIELD_NAME, "bar"); WrapperQueryBuilder qb = new WrapperQueryBuilder(tqb.toString()); UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> qb.toQuery(createShardContext())); assertEquals("this query must be rewritten first", e.getMessage()); @@ -137,7 +137,7 @@ public void testRewriteWithInnerName() throws IOException { } public void testRewriteWithInnerBoost() throws IOException { - final TermQueryBuilder query = new TermQueryBuilder("foo", "bar").boost(2); + final TermQueryBuilder query = new TermQueryBuilder(STRING_FIELD_NAME, "bar").boost(2); QueryBuilder builder = new WrapperQueryBuilder(query.toString()); QueryShardContext shardContext = createShardContext(); assertEquals(query, builder.rewrite(shardContext)); @@ -149,15 +149,15 @@ public void testRewriteInnerQueryToo() throws IOException { QueryShardContext shardContext = createShardContext(); QueryBuilder qb = new WrapperQueryBuilder( - new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()).toString() + new WrapperQueryBuilder(new TermQueryBuilder(STRING_FIELD_NAME, "bar").toString()).toString() ); - assertEquals(new TermQuery(new Term("foo", "bar")), qb.rewrite(shardContext).toQuery(shardContext)); + assertEquals(new TermQuery(new Term(STRING_FIELD_NAME, "bar")), qb.rewrite(shardContext).toQuery(shardContext)); qb = new WrapperQueryBuilder( new WrapperQueryBuilder( - new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()).toString() + new WrapperQueryBuilder(new TermQueryBuilder(STRING_FIELD_NAME, "bar").toString()).toString() ).toString() ); - assertEquals(new TermQuery(new Term("foo", "bar")), qb.rewrite(shardContext).toQuery(shardContext)); + assertEquals(new TermQuery(new Term(STRING_FIELD_NAME, "bar")), qb.rewrite(shardContext).toQuery(shardContext)); qb = new WrapperQueryBuilder(new BoolQueryBuilder().toString()); assertEquals(new MatchAllDocsQuery(), qb.rewrite(shardContext).toQuery(shardContext)); diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index 16e5e87ba83e7..b565778b35962 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -552,11 +552,12 @@ public void testMalformedThrowsException() throws IOException { } public void testCustomWeightFactorQueryBuilderWithFunctionScore() throws IOException { - Query parsedQuery = parseQuery(functionScoreQuery(termQuery("name.last", "banon"), weightFactorFunction(1.3f))) - .toQuery(createShardContext()); + QueryShardContext context = createShardContext(); + Query parsedQuery = parseQuery(functionScoreQuery(termQuery(STRING_FIELD_NAME_2, "banon"), weightFactorFunction(1.3f))) + .rewrite(context).toQuery(context); assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class)); FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery; - assertThat(((TermQuery) functionScoreQuery.getSubQuery()).getTerm(), equalTo(new Term("name.last", "banon"))); + assertThat(((TermQuery) functionScoreQuery.getSubQuery()).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "banon"))); assertThat((double) (functionScoreQuery.getFunctions()[0]).getWeight(), closeTo(1.3, 0.001)); } @@ -642,14 +643,14 @@ public void testFromJson() throws IOException { public void testRewrite() throws IOException { FunctionScoreQueryBuilder functionScoreQueryBuilder = - new FunctionScoreQueryBuilder(new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString())) + new FunctionScoreQueryBuilder(new WrapperQueryBuilder(new TermQueryBuilder(STRING_FIELD_NAME, "bar").toString())) .boostMode(CombineFunction.REPLACE) .scoreMode(FunctionScoreQuery.ScoreMode.SUM) .setMinScore(1) .maxBoost(100); FunctionScoreQueryBuilder rewrite = (FunctionScoreQueryBuilder) functionScoreQueryBuilder.rewrite(createShardContext()); assertNotSame(functionScoreQueryBuilder, rewrite); - assertEquals(rewrite.query(), new TermQueryBuilder("foo", "bar")); + assertEquals(rewrite.query(), new TermQueryBuilder(STRING_FIELD_NAME, "bar")); assertEquals(rewrite.boostMode(), CombineFunction.REPLACE); assertEquals(rewrite.scoreMode(), FunctionScoreQuery.ScoreMode.SUM); assertEquals(rewrite.getMinScore(), 1f, 0.0001); @@ -657,18 +658,18 @@ public void testRewrite() throws IOException { } public void testRewriteWithFunction() throws IOException { - QueryBuilder firstFunction = new WrapperQueryBuilder(new TermQueryBuilder("tq", "1").toString()); - TermQueryBuilder secondFunction = new TermQueryBuilder("tq", "2"); - QueryBuilder queryBuilder = randomBoolean() ? new WrapperQueryBuilder(new TermQueryBuilder("foo", "bar").toString()) - : new TermQueryBuilder("foo", "bar"); + QueryBuilder firstFunction = new WrapperQueryBuilder(new TermQueryBuilder(STRING_FIELD_NAME_2, "1").toString()); + TermQueryBuilder secondFunction = new TermQueryBuilder(STRING_FIELD_NAME_2, "2"); + QueryBuilder queryBuilder = randomBoolean() ? new WrapperQueryBuilder(new TermQueryBuilder(STRING_FIELD_NAME, "bar").toString()) + : new TermQueryBuilder(STRING_FIELD_NAME, "bar"); FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(queryBuilder, new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { new FunctionScoreQueryBuilder.FilterFunctionBuilder(firstFunction, new RandomScoreFunctionBuilder()), new FunctionScoreQueryBuilder.FilterFunctionBuilder(secondFunction, new RandomScoreFunctionBuilder()) }); FunctionScoreQueryBuilder rewrite = (FunctionScoreQueryBuilder) functionScoreQueryBuilder.rewrite(createShardContext()); assertNotSame(functionScoreQueryBuilder, rewrite); - assertEquals(rewrite.query(), new TermQueryBuilder("foo", "bar")); - assertEquals(rewrite.filterFunctionBuilders()[0].getFilter(), new TermQueryBuilder("tq", "1")); + assertEquals(rewrite.query(), new TermQueryBuilder(STRING_FIELD_NAME, "bar")); + assertEquals(rewrite.filterFunctionBuilders()[0].getFilter(), new TermQueryBuilder(STRING_FIELD_NAME_2, "1")); assertSame(rewrite.filterFunctionBuilders()[1].getFilter(), secondFunction); } @@ -685,7 +686,8 @@ public void testSingleScriptFunction() throws IOException { builder.boostMode(randomFrom(CombineFunction.values())); } - Query query = builder.toQuery(createShardContext()); + QueryShardContext shardContext = createShardContext(); + Query query = builder.rewrite(shardContext).toQuery(shardContext); assertThat(query, instanceOf(FunctionScoreQuery.class)); CombineFunction expectedBoostMode = builder.boostMode() != null @@ -840,4 +842,27 @@ private boolean isCacheable(FunctionScoreQueryBuilder queryBuilder) { } return true; } + + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + TermQueryBuilder termQueryBuilder = new TermQueryBuilder("unmapped_field", "foo"); + + // main query needs rewriting + FunctionScoreQueryBuilder functionQueryBuilder1 = new FunctionScoreQueryBuilder(termQueryBuilder); + functionQueryBuilder1.setMinScore(1); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> functionQueryBuilder1.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + + // filter needs rewriting + FunctionScoreQueryBuilder functionQueryBuilder2 = new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), + new FilterFunctionBuilder[] { + new FilterFunctionBuilder(termQueryBuilder, new RandomScoreFunctionBuilder()) + }); + e = expectThrows(IllegalStateException.class, + () -> functionQueryBuilder2.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index d6079bd7677bb..663310138758e 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -343,14 +343,18 @@ public void testMaybeFlush() throws Exception { assertTrue(shard.shouldPeriodicallyFlush()); final Translog translog = getTranslog(shard); assertEquals(2, translog.stats().getUncommittedOperations()); + assertThat(shard.flushStats().getTotal(), equalTo(0L)); client().prepareIndex("test").setId("2").setSource("{}", XContentType.JSON) .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE).get(); + assertThat(shard.getLastKnownGlobalCheckpoint(), equalTo(2L)); assertBusy(() -> { // this is async assertFalse(shard.shouldPeriodicallyFlush()); - assertThat(shard.flushStats().getPeriodic(), greaterThan(0L)); + assertThat(shard.flushStats().getPeriodic(), equalTo(1L)); + assertThat(shard.flushStats().getTotal(), equalTo(1L)); }); shard.sync(); - assertEquals(0, translog.stats().getUncommittedOperations()); + assertThat(shard.getLastSyncedGlobalCheckpoint(), equalTo(2L)); + assertThat("last commit [" + shard.commitStats().getUserData() + "]", translog.stats().getUncommittedOperations(), equalTo(0)); long size = Math.max(translog.stats().getUncommittedSizeInBytes(), Translog.DEFAULT_HEADER_SIZE_IN_BYTES + 1); logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", translog.stats().getUncommittedSizeInBytes(), translog.stats().getUncommittedOperations(), translog.getGeneration()); diff --git a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java index 22089bc40e498..755086539fe95 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java @@ -37,8 +37,8 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.Version; import org.elasticsearch.script.SimilarityScript; @@ -117,7 +117,7 @@ public double execute(double weight, ScriptedSimilarity.Query query, }; }; ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", scriptFactory, true); - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); Document doc = new Document(); @@ -211,7 +211,7 @@ public double execute(double weight, ScriptedSimilarity.Query query, }; }; ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightScriptFactory, "foobaz", scriptFactory, true); - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); Document doc = new Document(); diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 30903df58b43e..56fb91cc7f16c 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -40,6 +40,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.Term; import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; @@ -47,7 +48,6 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.NIOFSDirectory; -import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.Version; @@ -948,7 +948,7 @@ public void testCanOpenIndex() throws IOException { public void testDeserializeCorruptionException() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); - final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA + final Directory dir = new ByteBuffersDirectory(); // I use ram dir to prevent that virusscanner being a PITA Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId)); CorruptIndexException ex = new CorruptIndexException("foo", "bar"); store.markStoreCorrupted(ex); @@ -977,7 +977,7 @@ public void testDeserializeCorruptionException() throws IOException { public void testCorruptionMarkerVersionCheck() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); - final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA + final Directory dir = new ByteBuffersDirectory(); // I use ram dir to prevent that virusscanner being a PITA try (Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId))) { final String corruptionMarkerName = Store.CORRUPTED_MARKER_NAME_PREFIX + UUIDs.randomBase64UUID(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index a267d3b52c45e..b50f76f1de59b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -696,13 +696,15 @@ private void termsAggregator(ValueType valueType, MappedFieldType fieldType, } if (multiValued == false) { + MappedFieldType filterFieldType = new KeywordFieldMapper.KeywordFieldType(); + filterFieldType.setName("include"); aggregationBuilder = new FilterAggregationBuilder("_name1", QueryBuilders.termQuery("include", "yes")); aggregationBuilder.subAggregation(new TermsAggregationBuilder("_name2", valueType) .executionHint(executionHint) .size(numTerms) .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) .field("field")); - aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType, filterFieldType); aggregator.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), aggregator); aggregator.postCollection(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index 9d9c74f283b45..7ed434f3a202d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -30,20 +30,32 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.Percentile; -import org.elasticsearch.search.aggregations.metrics.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.hamcrest.Matchers; import java.io.IOException; import java.util.Iterator; +import java.util.List; public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase { + @Override + protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + return new PercentileRanksAggregationBuilder("hdr_ranks", new double[]{0.1, 0.5, 12}) + .field(fieldName) + .percentilesConfig(new PercentilesConfig.Hdr()); + } + + @Override + protected List getSupportedValuesSourceTypes() { + return List.of(CoreValuesSourceType.NUMERIC); + } + public void testEmpty() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.5}) .field("field") diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java index 40ba3ef617b8c..c70cfc459fa96 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java @@ -33,10 +33,14 @@ import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; +import java.util.List; import java.util.function.Consumer; import static java.util.Arrays.asList; @@ -46,6 +50,18 @@ public class HDRPercentilesAggregatorTests extends AggregatorTestCase { + @Override + protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + return new PercentilesAggregationBuilder("hdr_percentiles") + .field(fieldName) + .percentilesConfig(new PercentilesConfig.Hdr()); + } + + @Override + protected List getSupportedValuesSourceTypes() { + return List.of(CoreValuesSourceType.NUMERIC); + } + public void testNoDocs() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { // Intentionally not writing any docs diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java index c18fa664ffd99..5d1d9f1353d32 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java @@ -78,6 +78,12 @@ public void testOutOfRangePercentilesThrows() throws IOException { assertEquals("percent must be in [0,100], got [104.0]: [testAgg]", ex.getMessage()); } + public void testDuplicatePercentilesThrows() throws IOException { + PercentilesAggregationBuilder builder = new PercentilesAggregationBuilder("testAgg"); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> builder.percentiles(5, 42, 10, 99, 42, 87)); + assertEquals("percent [42.0] has been specified twice: [testAgg]", ex.getMessage()); + } + public void testExceptionMultipleMethods() throws IOException { final String illegalAgg = "{\n" + " \"percentiles\": {\n" + diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java index 2541583e94580..420c9b8cecbcf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java @@ -30,20 +30,32 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.Percentile; -import org.elasticsearch.search.aggregations.metrics.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.hamcrest.Matchers; import java.io.IOException; import java.util.Iterator; +import java.util.List; public class TDigestPercentileRanksAggregatorTests extends AggregatorTestCase { + @Override + protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + return new PercentileRanksAggregationBuilder("tdigest_ranks", new double[]{0.1, 0.5, 12}) + .field(fieldName) + .percentilesConfig(new PercentilesConfig.TDigest()); + } + + @Override + protected List getSupportedValuesSourceTypes() { + return List.of(CoreValuesSourceType.NUMERIC); + } + public void testEmpty() throws IOException { PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.5}) .field("field") diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java index aa65ae6f28bdd..ea2aca16aa8a2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java @@ -33,10 +33,14 @@ import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; +import java.util.List; import java.util.function.Consumer; import static java.util.Arrays.asList; @@ -46,6 +50,18 @@ public class TDigestPercentilesAggregatorTests extends AggregatorTestCase { + @Override + protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + return new PercentilesAggregationBuilder("tdist_percentiles") + .field(fieldName) + .percentilesConfig(new PercentilesConfig.TDigest()); + } + + @Override + protected List getSupportedValuesSourceTypes() { + return List.of(CoreValuesSourceType.NUMERIC); + } + public void testNoDocs() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { // Intentionally not writing any docs diff --git a/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java b/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java index 11257e424634f..5956306fcad75 100644 --- a/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/collapse/CollapseBuilderTests.java @@ -24,8 +24,8 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.Query; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; @@ -137,7 +137,7 @@ protected NamedXContentRegistry xContentRegistry() { } public void testBuild() throws IOException { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { writer.commit(); } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 31a2de6b3d821..c1bef65f18cd1 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -291,6 +291,7 @@ public MappedFieldType fieldMapper(String name) { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder highlightBuilder = randomHighlighterBuilder(); + highlightBuilder = Rewriteable.rewrite(highlightBuilder, mockShardContext); SearchContextHighlight highlight = highlightBuilder.build(mockShardContext); for (SearchContextHighlight.Field field : highlight.fields()) { String encoder = highlightBuilder.encoder() != null ? highlightBuilder.encoder() : HighlightBuilder.DEFAULT_ENCODER; diff --git a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index 59b2fc6059a0c..c0054b04f095c 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -27,8 +27,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; @@ -204,7 +204,7 @@ public void testInvalidArguments() throws Exception { } public void testToFilterSimple() throws IOException { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { writer.commit(); } @@ -224,7 +224,7 @@ public void testToFilterSimple() throws IOException { } public void testToFilterRandom() throws IOException { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { writer.commit(); } @@ -307,7 +307,7 @@ public void testToFilterRandom() throws IOException { } public void testInvalidField() throws IOException { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { writer.commit(); } @@ -321,7 +321,7 @@ public void testInvalidField() throws IOException { } public void testToFilterWithRouting() throws IOException { - Directory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) { writer.commit(); } diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 2ec4038529972..eda4871e32362 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -48,6 +48,7 @@ import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.ScriptEngine; @@ -154,7 +155,8 @@ public void testBuildSortField() throws IOException { QueryShardContext mockShardContext = createMockShardContext(); for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { T sortBuilder = createTestItem(); - SortFieldAndFormat sortField = sortBuilder.build(mockShardContext); + SortFieldAndFormat sortField = Rewriteable.rewrite(sortBuilder, mockShardContext) + .build(mockShardContext); sortFieldAssertions(sortBuilder, sortField.field, sortField.format); } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java index 94c5bf329eb74..f7a0827affa7e 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java @@ -41,8 +41,8 @@ import org.apache.lucene.index.MultiTerms; import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.SuggestMode; +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.search.suggest.phrase.NoisyChannelSpellChecker.Result; @@ -65,7 +65,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase { private final BytesRef postTag = new BytesRef(""); public void testNgram() throws IOException { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); Map mapping = new HashMap<>(); mapping.put("body_ngram", new Analyzer() { @@ -226,7 +226,7 @@ protected TokenStreamComponents createComponents(String fieldName) { } public void testMultiGenerator() throws IOException { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); Map mapping = new HashMap<>(); mapping.put("body_ngram", new Analyzer() { @@ -343,7 +343,7 @@ protected TokenStreamComponents createComponents(String fieldName) { } public void testTrigram() throws IOException { - RAMDirectory dir = new RAMDirectory(); + Directory dir = new ByteBuffersDirectory(); Map mapping = new HashMap<>(); mapping.put("body_ngram", new Analyzer() { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index a65c75817a816..39220c5eaa879 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -29,7 +29,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.MultiTerms; -import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.store.ByteBuffersDirectory; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.ToXContent; @@ -112,7 +112,7 @@ public void testBuildWordScorer() throws IOException { Map mapping = new HashMap<>(); mapping.put("field", new WhitespaceAnalyzer()); PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(), mapping); - IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(wrapper)); + IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(wrapper)); Document doc = new Document(); doc.add(new Field("field", "someText", TextField.TYPE_NOT_STORED)); writer.addDocument(doc); diff --git a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java index 47ba6b17205d1..ee9d98d07b3ec 100644 --- a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java @@ -291,7 +291,7 @@ public void testProxyStrategyWillNeedToBeRebuiltIfNumOfSocketsOrAddressesChange( Setting modeSetting = RemoteConnectionStrategy.REMOTE_CONNECTION_MODE .getConcreteSettingForNamespace("cluster-alias"); - Setting addressesSetting = ProxyConnectionStrategy.REMOTE_CLUSTER_ADDRESSES + Setting addressesSetting = ProxyConnectionStrategy.PROXY_ADDRESS .getConcreteSettingForNamespace("cluster-alias"); Setting socketConnections = ProxyConnectionStrategy.REMOTE_SOCKET_CONNECTIONS .getConcreteSettingForNamespace("cluster-alias"); @@ -320,7 +320,7 @@ public void testProxyStrategyWillNeedToBeRebuiltIfNumOfSocketsOrAddressesChange( public void testModeSettingsCannotBeUsedWhenInDifferentMode() { List, String>> restrictedSettings = Arrays.asList( - new Tuple<>(ProxyConnectionStrategy.REMOTE_CLUSTER_ADDRESSES, "192.168.0.1:8080"), + new Tuple<>(ProxyConnectionStrategy.PROXY_ADDRESS, "192.168.0.1:8080"), new Tuple<>(ProxyConnectionStrategy.REMOTE_SOCKET_CONNECTIONS, "3")); RemoteConnectionStrategy.ConnectionStrategy sniff = RemoteConnectionStrategy.ConnectionStrategy.SNIFF; diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index bda5fe58d6571..f8a4fe4b9429b 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -590,7 +590,7 @@ private Settings buildRandomSettings(String clusterAlias, List addresses private static Settings buildProxySettings(String clusterAlias, List addresses) { Settings.Builder builder = Settings.builder(); - builder.put(ProxyConnectionStrategy.REMOTE_CLUSTER_ADDRESSES.getConcreteSettingForNamespace(clusterAlias).getKey(), + builder.put(ProxyConnectionStrategy.PROXY_ADDRESS.getConcreteSettingForNamespace(clusterAlias).getKey(), addresses.get(0)); builder.put(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias).getKey(), "proxy"); return builder.build(); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index e0e9b69ae29f8..9b35f8367914c 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -85,7 +85,7 @@ public void testSettingsAreRegistered() { assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(SniffConnectionStrategy.REMOTE_CONNECTIONS_PER_CLUSTER)); assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS)); assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(SniffConnectionStrategy.REMOTE_NODE_CONNECTIONS)); - assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(ProxyConnectionStrategy.REMOTE_CLUSTER_ADDRESSES)); + assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(ProxyConnectionStrategy.PROXY_ADDRESS)); assertTrue(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.contains(ProxyConnectionStrategy.REMOTE_SOCKET_CONNECTIONS)); } @@ -664,40 +664,72 @@ public void onFailure(Exception e) { public void testRemoteClusterSkipIfDisconnectedSetting() { { Settings settings = Settings.builder() - .put("cluster.remote.foo.skip_unavailable", true) - .put("cluster.remote.bar.skip_unavailable", false).build(); + .put("cluster.remote.foo.seeds", "127.0.0.1:9300") + .put("cluster.remote.foo.skip_unavailable", true).build(); RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE.getAllConcreteSettings(settings).forEach(setting -> setting.get(settings)); } { - Settings brokenSettings = Settings.builder() + Settings brokenSettingsDependency = Settings.builder() + .put("cluster.remote.foo.skip_unavailable", true).build(); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE.getAllConcreteSettings(brokenSettingsDependency) + .forEach(setting -> setting.get(brokenSettingsDependency))); + assertEquals("Cannot configure setting [cluster.remote.foo.skip_unavailable] if remote cluster is not enabled.", + iae.getMessage()); + } + { + Settings brokenSettingsType = Settings.builder() .put("cluster.remote.foo.skip_unavailable", "broken").build(); - expectThrows(IllegalArgumentException.class, () -> - RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE.getAllConcreteSettings(brokenSettings) - .forEach(setting -> setting.get(brokenSettings))); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE.getAllConcreteSettings(brokenSettingsType) + .forEach(setting -> setting.get(brokenSettingsType))); + } + + { + Settings settings = Settings.builder() + .put("cluster.remote.foo.mode", "proxy") + .put("cluster.remote.foo.proxy_address", "127.0.0.1:9300") + .put("cluster.remote.foo.transport.ping_schedule", "5s").build(); + RemoteClusterService.REMOTE_CLUSTER_PING_SCHEDULE.getAllConcreteSettings(settings).forEach(setting -> setting.get(settings)); + } + { + Settings brokenSettingsDependency = Settings.builder() + .put("cluster.remote.foo.proxy_address", "127.0.0.1:9300") + .put("cluster.remote.foo.transport.ping_schedule", "5s").build(); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + RemoteClusterService.REMOTE_CLUSTER_PING_SCHEDULE.getAllConcreteSettings(brokenSettingsDependency) + .forEach(setting -> setting.get(brokenSettingsDependency))); + assertEquals("Cannot configure setting [cluster.remote.foo.transport.ping_schedule] if remote cluster is not enabled.", + iae.getMessage()); + } + + { + Settings settings = Settings.builder() + .put("cluster.remote.foo.seeds", "127.0.0.1:9300") + .put("cluster.remote.foo.transport.compress", false).build(); + RemoteClusterService.REMOTE_CLUSTER_COMPRESS.getAllConcreteSettings(settings).forEach(setting -> setting.get(settings)); + } + { + Settings brokenSettingsDependency = Settings.builder() + .put("cluster.remote.foo.proxy_address", "127.0.0.1:9300") + .put("cluster.remote.foo.transport.compress", true).build(); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + RemoteClusterService.REMOTE_CLUSTER_COMPRESS.getAllConcreteSettings(brokenSettingsDependency) + .forEach(setting -> setting.get(brokenSettingsDependency))); + assertEquals("Cannot configure setting [cluster.remote.foo.transport.compress] if remote cluster is not enabled.", + iae.getMessage()); } AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS, RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE))); { - Settings settings = Settings.builder().put("cluster.remote.foo.skip_unavailable", randomBoolean()).build(); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> service.validate(settings, true)); - assertEquals("missing required setting [cluster.remote.foo.seeds] for setting [cluster.remote.foo.skip_unavailable]", + Settings brokenSettingsDependency = Settings.builder().put("cluster.remote.foo.skip_unavailable", randomBoolean()).build(); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> service.validate(brokenSettingsDependency, true)); + assertEquals("Cannot configure setting [cluster.remote.foo.skip_unavailable] if remote cluster is not enabled.", iae.getMessage()); } - { - try (MockTransportService remoteSeedTransport = startTransport("seed", new CopyOnWriteArrayList<>(), Version.CURRENT)) { - String seed = remoteSeedTransport.getLocalDiscoNode().getAddress().toString(); - service.validate(Settings.builder().put("cluster.remote.foo.skip_unavailable", randomBoolean()) - .put("cluster.remote.foo.seeds", seed).build(), true); - service.validate(Settings.builder().put("cluster.remote.foo.seeds", seed).build(), true); - - AbstractScopedSettings service2 = new ClusterSettings(Settings.builder().put("cluster.remote.foo.seeds", seed).build(), - new HashSet<>(Arrays.asList(SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS, - RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE))); - service2.validate(Settings.builder().put("cluster.remote.foo.skip_unavailable", randomBoolean()).build(), false); - } - } } public void testReconnectWhenStrategySettingsUpdated() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionStrategyTests.java index c9ce9a4c9a6df..cb4a4fb7189bf 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionStrategyTests.java @@ -36,6 +36,7 @@ public void testStrategyChangeMeansThatStrategyMustBeRebuilt() { RemoteConnectionStrategy.ConnectionStrategy.PROXY); Settings newSettings = Settings.builder() .put(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace("cluster-alias").getKey(), "sniff") + .put(SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS.getConcreteSettingForNamespace("cluster-alias").getKey(), "127.0.0.1:9300") .build(); assertTrue(first.shouldRebuildConnection(newSettings)); } @@ -47,6 +48,7 @@ public void testSameStrategyChangeMeansThatStrategyDoesNotNeedToBeRebuilt() { RemoteConnectionStrategy.ConnectionStrategy.PROXY); Settings newSettings = Settings.builder() .put(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace("cluster-alias").getKey(), "proxy") + .put(ProxyConnectionStrategy.PROXY_ADDRESS.getConcreteSettingForNamespace("cluster-alias").getKey(), "127.0.0.1:9300") .build(); assertFalse(first.shouldRebuildConnection(newSettings)); } @@ -61,6 +63,7 @@ public void testChangeInConnectionProfileMeansTheStrategyMustBeRebuilt() { Settings.Builder newBuilder = Settings.builder(); newBuilder.put(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace("cluster-alias").getKey(), "proxy"); + newBuilder.put(ProxyConnectionStrategy.PROXY_ADDRESS.getConcreteSettingForNamespace("cluster-alias").getKey(), "127.0.0.1:9300"); if (randomBoolean()) { newBuilder.put(RemoteClusterService.REMOTE_CLUSTER_PING_SCHEDULE.getConcreteSettingForNamespace("cluster-alias").getKey(), TimeValue.timeValueSeconds(5)); diff --git a/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/FakeOAuth2HttpHandler.java b/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/FakeOAuth2HttpHandler.java index 7dcaaf16f4a37..c49ae19dba0b5 100644 --- a/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/FakeOAuth2HttpHandler.java +++ b/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/FakeOAuth2HttpHandler.java @@ -35,11 +35,11 @@ public class FakeOAuth2HttpHandler implements HttpHandler { @Override public void handle(final HttpExchange exchange) throws IOException { try { + while (exchange.getRequestBody().read(BUFFER) >= 0) ; byte[] response = ("{\"access_token\":\"foo\",\"token_type\":\"Bearer\",\"expires_in\":3600}").getBytes(UTF_8); exchange.getResponseHeaders().add("Content-Type", "application/json"); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); exchange.getResponseBody().write(response); - while (exchange.getRequestBody().read(BUFFER) >= 0) ; } finally { int read = exchange.getRequestBody().read(); assert read == -1 : "Request body should have been fully read here but saw [" + read + "]"; diff --git a/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpHandler.java b/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpHandler.java index 944f025d6e3f5..e96a3db83133d 100644 --- a/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpHandler.java +++ b/test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpHandler.java @@ -142,9 +142,14 @@ public void handle(final HttpExchange exchange) throws IOException { if (matcher.find() == false) { throw new AssertionError("Range bytes header does not match expected format: " + range); } - - BytesReference response = Integer.parseInt(matcher.group(1)) == 0 ? blob : BytesArray.EMPTY; + final int offset = Integer.parseInt(matcher.group(1)); + final int end = Integer.parseInt(matcher.group(2)); + BytesReference response = blob; exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); + final int bufferedLength = response.length(); + if (offset > 0 || bufferedLength > end) { + response = response.slice(offset, Math.min(end + 1 - offset, bufferedLength - offset)); + } exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length()); response.writeTo(exchange.getResponseBody()); } else { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 7d0913f3fee0a..2a82d831ee73e 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.document.BinaryDocValuesField; +import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -41,6 +42,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -74,6 +76,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.ObjectMapper.Nested; import org.elasticsearch.index.mapper.RangeFieldMapper; @@ -205,7 +208,9 @@ protected A createAggregator(Query query, MappedFieldType... fieldTypes) throws IOException { SearchContext searchContext = createSearchContext(indexSearcher, indexSettings, query, bucketConsumer, fieldTypes); @SuppressWarnings("unchecked") - A aggregator = (A) aggregationBuilder.build(searchContext.getQueryShardContext(), null) + A aggregator = (A) aggregationBuilder + .rewrite(searchContext.getQueryShardContext()) + .build(searchContext.getQueryShardContext(), null) .create(searchContext, null, true); return aggregator; } @@ -605,8 +610,7 @@ protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldTy * * Exception types/messages are not currently checked, just presence/absence of an exception. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/52681") - public void testSupportedFieldTypes() throws IOException { + public final void testSupportedFieldTypes() throws IOException { MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); Settings settings = Settings.builder().put("index.version.created", Version.CURRENT.id).build(); String fieldName = "typeTestFieldName"; @@ -647,7 +651,7 @@ public void testSupportedFieldTypes() throws IOException { indexWriter.close(); try (IndexReader indexReader = DirectoryReader.open(directory)) { - IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + IndexSearcher indexSearcher = newIndexSearcher(indexReader); AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, fieldName); // TODO in the future we can make this more explicit with expectThrows(), when the exceptions are standardized @@ -676,67 +680,78 @@ public void testSupportedFieldTypes() throws IOException { */ private void writeTestDoc(MappedFieldType fieldType, String fieldName, RandomIndexWriter iw) throws IOException { - if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.NUMERIC)) { + String typeName = fieldType.typeName(); + ValuesSourceType vst = fieldType.getValuesSourceType(); + + if (vst.equals(CoreValuesSourceType.NUMERIC)) { // TODO note: once VS refactor adds DATE/BOOLEAN, this conditional will go away - if (fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE) - || fieldType.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) { + if (typeName.equals(DateFieldMapper.CONTENT_TYPE) || typeName.equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) { iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomNonNegativeLong()))); - } else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) { + } else if (typeName.equals(BooleanFieldMapper.CONTENT_TYPE)) { iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomBoolean() ? 0 : 1))); + } else if (typeName.equals(NumberFieldMapper.NumberType.DOUBLE.typeName())) { + long encoded = NumericUtils.doubleToSortableLong(Math.abs(randomDouble())); + iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded))); + } else if (typeName.equals(NumberFieldMapper.NumberType.FLOAT.typeName())) { + long encoded = NumericUtils.floatToSortableInt(Math.abs(randomFloat())); + iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded))); + } else if (typeName.equals(NumberFieldMapper.NumberType.HALF_FLOAT.typeName())) { + long encoded = HalfFloatPoint.halfFloatToSortableShort(Math.abs(randomFloat())); + iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded))); } else { - iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomLong()))); + iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomNonNegativeLong()))); } - } else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.BYTES)) { - if (fieldType.typeName().equals(BinaryFieldMapper.CONTENT_TYPE)) { + } else if (vst.equals(CoreValuesSourceType.BYTES)) { + if (typeName.equals(BinaryFieldMapper.CONTENT_TYPE)) { iw.addDocument(singleton(new BinaryFieldMapper.CustomBinaryDocValuesField(fieldName, new BytesRef("a").bytes))); - } else if (fieldType.typeName().equals(IpFieldMapper.CONTENT_TYPE)) { + } else if (typeName.equals(IpFieldMapper.CONTENT_TYPE)) { // TODO note: once VS refactor adds IP, this conditional will go away boolean v4 = randomBoolean(); iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef(InetAddressPoint.encode(randomIp(v4)))))); } else { iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef("a")))); } - } else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.RANGE)) { + } else if (vst.equals(CoreValuesSourceType.RANGE)) { Object start; Object end; RangeType rangeType; - if (fieldType.typeName().equals(RangeType.DOUBLE.typeName())) { + if (typeName.equals(RangeType.DOUBLE.typeName())) { start = randomDouble(); end = RangeType.DOUBLE.nextUp(start); rangeType = RangeType.DOUBLE; - } else if (fieldType.typeName().equals(RangeType.FLOAT.typeName())) { + } else if (typeName.equals(RangeType.FLOAT.typeName())) { start = randomFloat(); end = RangeType.FLOAT.nextUp(start); rangeType = RangeType.DOUBLE; - } else if (fieldType.typeName().equals(RangeType.IP.typeName())) { + } else if (typeName.equals(RangeType.IP.typeName())) { boolean v4 = randomBoolean(); start = randomIp(v4); end = RangeType.IP.nextUp(start); rangeType = RangeType.IP; - } else if (fieldType.typeName().equals(RangeType.LONG.typeName())) { + } else if (typeName.equals(RangeType.LONG.typeName())) { start = randomLong(); end = RangeType.LONG.nextUp(start); rangeType = RangeType.LONG; - } else if (fieldType.typeName().equals(RangeType.INTEGER.typeName())) { + } else if (typeName.equals(RangeType.INTEGER.typeName())) { start = randomInt(); end = RangeType.INTEGER.nextUp(start); rangeType = RangeType.INTEGER; - } else if (fieldType.typeName().equals(RangeType.DATE.typeName())) { + } else if (typeName.equals(RangeType.DATE.typeName())) { start = randomNonNegativeLong(); end = RangeType.DATE.nextUp(start); rangeType = RangeType.DATE; } else { - throw new IllegalStateException("Unknown type of range [" + fieldType.typeName() + "]"); + throw new IllegalStateException("Unknown type of range [" + typeName + "]"); } final RangeFieldMapper.Range range = new RangeFieldMapper.Range(rangeType, start, end, true, true); iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(Collections.singleton(range))))); - } else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.GEOPOINT)) { + } else if (vst.equals(CoreValuesSourceType.GEOPOINT)) { iw.addDocument(singleton(new LatLonDocValuesField(fieldName, randomDouble(), randomDouble()))); } else { - throw new IllegalStateException("Unknown field type [" + fieldType.typeName() + "]"); + throw new IllegalStateException("Unknown field type [" + typeName + "]"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index ba06640b41342..cfe7f7e274a8e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -22,6 +22,7 @@ import com.fasterxml.jackson.core.io.JsonStringEncoder; import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanBoostQuery; @@ -453,7 +454,7 @@ public void testToQuery() throws IOException { rewrite(secondLuceneQuery), rewrite(firstLuceneQuery)); } - if (supportsBoost()) { + if (supportsBoost() && firstLuceneQuery instanceof MatchNoDocsQuery == false) { secondQuery.boost(firstQuery.boost() + 1f + randomFloat()); Query thirdLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); assertNotEquals("modifying the boost doesn't affect the corresponding lucene query", rewrite(firstLuceneQuery), @@ -495,20 +496,23 @@ protected boolean supportsQueryName() { * {@link #doAssertLuceneQuery(AbstractQueryBuilder, Query, QueryShardContext)} for query specific checks. */ private void assertLuceneQuery(QB queryBuilder, Query query, QueryShardContext context) throws IOException { - if (queryBuilder.queryName() != null) { + if (queryBuilder.queryName() != null && query instanceof MatchNoDocsQuery == false) { Query namedQuery = context.copyNamedQueries().get(queryBuilder.queryName()); assertThat(namedQuery, equalTo(query)); } if (query != null) { if (queryBuilder.boost() != AbstractQueryBuilder.DEFAULT_BOOST) { - assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(SpanBoostQuery.class))); + assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(SpanBoostQuery.class)) + .or(instanceOf(MatchNoDocsQuery.class))); if (query instanceof SpanBoostQuery) { SpanBoostQuery spanBoostQuery = (SpanBoostQuery) query; assertThat(spanBoostQuery.getBoost(), equalTo(queryBuilder.boost())); query = spanBoostQuery.getQuery(); - } else { + } else if (query instanceof BoostQuery) { BoostQuery boostQuery = (BoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(queryBuilder.boost())); + if (boostQuery.getQuery() instanceof MatchNoDocsQuery == false) { + assertThat(boostQuery.getBoost(), equalTo(queryBuilder.boost())); + } query = boostQuery.getQuery(); } } diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 57759b9427bee..c7a82ba62ec3e 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -1,5 +1,3 @@ -import java.nio.charset.StandardCharsets - apply plugin: 'elasticsearch.docs-test' /* List of files that have snippets that probably should be converted to @@ -21,10 +19,11 @@ dependencies { testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } -// copy xpack rest api -File xpackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') -project.copyRestSpec.from(xpackResources) { - include 'rest-api-spec/api/**' +restResources { + restApi { + includeCore '*' + includeXpack '*' + } } testClusters.integTest { diff --git a/x-pack/docs/en/security/authentication/file-realm.asciidoc b/x-pack/docs/en/security/authentication/file-realm.asciidoc index f0549a477cd8a..d3528e62b32d2 100644 --- a/x-pack/docs/en/security/authentication/file-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/file-realm.asciidoc @@ -7,17 +7,21 @@ With the `file` realm, users are defined in local files on each node in the clus IMPORTANT: As the administrator of the cluster, it is your responsibility to ensure the same users are defined on every node in the cluster. The {stack} -{security-features} do not deliver any mechanism to guarantee this. +{security-features} do not deliver any mechanism to guarantee this. You should +also be aware that you cannot add or manage users in the `file` realm via the +<> and you cannot add or manage them in {kib} on the +*Management / Security / Users* page -The `file` realm is primarily supported to serve as a fallback/recovery realm. It -is mostly useful in situations where all users locked themselves out of the system -(no one remembers their username/password). In this type of scenarios, the `file` -realm is your only way out - you can define a new `admin` user in the `file` realm -and use it to log in and reset the credentials of all other users. +The `file` realm is very useful as a fallback or recovery realm. For example in cases where +the cluster is unresponsive or the security index is unavailable, or when you forget the +password for your administrative users. +In this type of scenario, the `file` realm is a convenient way out - you can +define a new `admin` user in the `file` realm and use it to log in and reset the +credentials of all other users. IMPORTANT: When you configure realms in `elasticsearch.yml`, only the realms you -specify are used for authentication. To use the `file` realm as a fallback, you -must include it in the realm chain. +specify are used for authentication. To use the `file` realm you must explicitly +include it in the realm chain. To define users, the {security-features} provide the <> command-line tool. This tool enables you to add @@ -26,4 +30,4 @@ and remove users, assign user roles, and manage user passwords. [[file-realm-configuration]] ==== Configuring a file realm -include::configuring-file-realm.asciidoc[] \ No newline at end of file +include::configuring-file-realm.asciidoc[] diff --git a/x-pack/docs/en/security/ccs-clients-integrations/monitoring.asciidoc b/x-pack/docs/en/security/ccs-clients-integrations/monitoring.asciidoc index a46767629a003..3e924518799e7 100644 --- a/x-pack/docs/en/security/ccs-clients-integrations/monitoring.asciidoc +++ b/x-pack/docs/en/security/ccs-clients-integrations/monitoring.asciidoc @@ -17,7 +17,7 @@ with the monitoring cluster. For more information, see: -* <> +* <> * {kibana-ref}/monitoring-xpack-kibana.html[Configuring monitoring in {kib}] * {logstash-ref}/configuring-logstash.html[Configuring monitoring for Logstash nodes] diff --git a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc index 6fb91d2a71253..58359ca037ac8 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc @@ -63,7 +63,7 @@ For example, the following `daily` schedule triggers once every day at 5:00 PM: "trigger" : { "schedule" : { "daily" : { - "at" { + "at" : { "hour" : 17, "minute" : 0 } @@ -93,4 +93,4 @@ or minutes as an array. For example, following `daily` schedule triggers at } } -------------------------------------------------- -// NOTCONSOLE \ No newline at end of file +// NOTCONSOLE diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java index dc4ad6fa51f9f..694e472fd66bf 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java @@ -56,7 +56,7 @@ public List getPipelineAggregations() { CumulativeCardinalityPipelineAggregationBuilder.NAME, CumulativeCardinalityPipelineAggregationBuilder::new, CumulativeCardinalityPipelineAggregator::new, - (name, p) -> CumulativeCardinalityPipelineAggregationBuilder.PARSER.parse(p, name)) + CumulativeCardinalityPipelineAggregationBuilder.PARSER) ); } diff --git a/x-pack/plugin/autoscaling/qa/build.gradle b/x-pack/plugin/autoscaling/qa/build.gradle index 46609933699ca..e1c6fb4f95bea 100644 --- a/x-pack/plugin/autoscaling/qa/build.gradle +++ b/x-pack/plugin/autoscaling/qa/build.gradle @@ -1,18 +1,6 @@ -import org.elasticsearch.gradle.test.RestIntegTestTask - apply plugin: 'elasticsearch.build' test.enabled = false dependencies { compile project(':test:framework') } - -subprojects { - project.tasks.withType(RestIntegTestTask) { - final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xPackResources) { - include 'rest-api-spec/api/**' - } - } - -} diff --git a/x-pack/plugin/autoscaling/qa/rest/build.gradle b/x-pack/plugin/autoscaling/qa/rest/build.gradle index 9e297a87ea18d..631c6e950ee1a 100644 --- a/x-pack/plugin/autoscaling/qa/rest/build.gradle +++ b/x-pack/plugin/autoscaling/qa/rest/build.gradle @@ -9,6 +9,12 @@ dependencies { testCompile project(path: xpackModule('autoscaling'), configuration: 'runtime') } +restResources { + restApi { + includeXpack 'autoscaling' + } +} + task restTest(type: RestIntegTestTask) { mustRunAfter(precommit) } diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 85d28fcd65d40..f2ca68bd81416 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -13,6 +13,13 @@ dependencies { // https://github.com/elastic/x-plugins/issues/724 configurations { testArtifacts.extendsFrom testRuntime + restXpackSpecs + restXpackTests +} + +artifacts { + restXpackSpecs(new File(projectDir, "src/test/resources/rest-api-spec/api")) + restXpackTests(new File(projectDir, "src/test/resources/rest-api-spec/test")) } task testJar(type: Jar) { diff --git a/x-pack/plugin/ccr/qa/build.gradle b/x-pack/plugin/ccr/qa/build.gradle index 46609933699ca..e1c6fb4f95bea 100644 --- a/x-pack/plugin/ccr/qa/build.gradle +++ b/x-pack/plugin/ccr/qa/build.gradle @@ -1,18 +1,6 @@ -import org.elasticsearch.gradle.test.RestIntegTestTask - apply plugin: 'elasticsearch.build' test.enabled = false dependencies { compile project(':test:framework') } - -subprojects { - project.tasks.withType(RestIntegTestTask) { - final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xPackResources) { - include 'rest-api-spec/api/**' - } - } - -} diff --git a/x-pack/plugin/ccr/qa/rest/build.gradle b/x-pack/plugin/ccr/qa/rest/build.gradle index 4383db34f3e11..d95ac271df519 100644 --- a/x-pack/plugin/ccr/qa/rest/build.gradle +++ b/x-pack/plugin/ccr/qa/rest/build.gradle @@ -3,6 +3,12 @@ import org.elasticsearch.gradle.test.RestIntegTestTask apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-test' +restResources { + restApi { + includeXpack 'ccr' + } +} + dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('ccr'), configuration: 'runtime') diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 75f8ff59d1fa5..422bc55087577 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -364,6 +364,7 @@ static String[] extractLeaderShardHistoryUUIDs(Map ccrIndexMetaD IndexMetaData.INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING, IndexMetaData.INDEX_PRIORITY_SETTING, IndexMetaData.SETTING_WAIT_FOR_ACTIVE_SHARDS, + IndexMetaData.INDEX_HIDDEN_SETTING, EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING, EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING, ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index e2511c39db119..33e19cb2882d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.license.License; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.common.time.TimeUtils; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -236,21 +235,6 @@ public License.OperationMode getLicenseLevel() { return licenseLevel; } - public boolean isAvailableWithLicense(XPackLicenseState licenseState) { - // Basic is always true - if (licenseLevel.equals(License.OperationMode.BASIC)) { - return true; - } - - // The model license does not matter, Platinum license gets the same functions as the highest license - if (licenseState.isAllowedByLicense(License.OperationMode.PLATINUM)) { - return true; - } - - // catch the rest, if the license is active and is at least the required model license - return licenseState.isAllowedByLicense(licenseLevel, true, false); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java index 77b9ab2691f0e..11e66e69c0f75 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfigTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.License; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -44,10 +43,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class TrainedModelConfigTests extends AbstractSerializingTestCase { @@ -306,66 +301,4 @@ public void testSerializationWithCompressedLazyDefinition() throws IOException { .assertToXContentEquivalence(true) .test(); } - - public void testIsAvailableWithLicense() { - TrainedModelConfig.Builder builder = createTestInstance(randomAlphaOfLength(10)); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - - // Reject everything - when(licenseState.isAllowedByLicense(any(License.OperationMode.class), anyBoolean(), anyBoolean())).thenAnswer( - invocationOnMock -> { - final Object[] arguments = invocationOnMock.getArguments(); - assertTrue((boolean) arguments[1]); // ensure the call is made to require active license - return false; - } - ); - assertFalse(builder.setLicenseLevel(License.OperationMode.ENTERPRISE.description()).build().isAvailableWithLicense(licenseState)); - assertFalse(builder.setLicenseLevel(License.OperationMode.PLATINUM.description()).build().isAvailableWithLicense(licenseState)); - assertFalse(builder.setLicenseLevel(License.OperationMode.GOLD.description()).build().isAvailableWithLicense(licenseState)); - // Basic license always works not matter what - assertTrue(builder.setLicenseLevel(License.OperationMode.BASIC.description()).build().isAvailableWithLicense(licenseState)); - } - - public void testActivePlatinumLicenseAlwaysWorks() { - TrainedModelConfig.Builder builder = createTestInstance(randomAlphaOfLength(10)); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - - when(licenseState.isAllowedByLicense(License.OperationMode.PLATINUM)).thenReturn(true); - - // Active Platinum license functions the same as Enterprise license (highest) and should always work - when(licenseState.isAllowedByLicense(any(License.OperationMode.class), anyBoolean(), anyBoolean())).thenAnswer( - invocationOnMock -> { - final Object[] arguments = invocationOnMock.getArguments(); - assertEquals(License.OperationMode.PLATINUM, arguments[0]); - assertTrue((boolean) arguments[1]); // ensure the call is made to require active license - assertTrue((boolean) arguments[2]); - return true; - } - ); - assertTrue(builder.setLicenseLevel(License.OperationMode.ENTERPRISE.description()).build().isAvailableWithLicense(licenseState)); - assertTrue(builder.setLicenseLevel(License.OperationMode.PLATINUM.description()).build().isAvailableWithLicense(licenseState)); - assertTrue(builder.setLicenseLevel(License.OperationMode.BASIC.description()).build().isAvailableWithLicense(licenseState)); - assertTrue(builder.setLicenseLevel(License.OperationMode.GOLD.description()).build().isAvailableWithLicense(licenseState)); - } - - public void testActiveGoldLicenseWillWorkWhenRequiredLevelIsGold() { - TrainedModelConfig.Builder builder = createTestInstance(randomAlphaOfLength(10)); - XPackLicenseState licenseState = mock(XPackLicenseState.class); - - // Active Gold license should work when required level is gold - when(licenseState.isAllowedByLicense(any(License.OperationMode.class), anyBoolean(), anyBoolean())).thenAnswer( - invocationOnMock -> { - final Object[] arguments = invocationOnMock.getArguments(); - assertTrue((boolean) arguments[1]); // ensure the call is made to require active license - if (License.OperationMode.PLATINUM == arguments[0] && Boolean.TRUE.equals(arguments[2])) { - return false; - } else - return License.OperationMode.GOLD == arguments[0] && Boolean.FALSE.equals(arguments[2]); - } - ); - assertFalse(builder.setLicenseLevel(License.OperationMode.ENTERPRISE.description()).build().isAvailableWithLicense(licenseState)); - assertFalse(builder.setLicenseLevel(License.OperationMode.PLATINUM.description()).build().isAvailableWithLicense(licenseState)); - assertTrue(builder.setLicenseLevel(License.OperationMode.BASIC.description()).build().isAvailableWithLicense(licenseState)); - assertTrue(builder.setLicenseLevel(License.OperationMode.GOLD.description()).build().isAvailableWithLicense(licenseState)); - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java index 3cfab7ddcfe9e..d6097f06d5fa7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardContext; @@ -68,6 +69,7 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase { + private static final String MISSING_FIELD_NAME = "does-not-exist"; private static final int FIELD_COUNT = 10; private ExecutorService singleThreadExecutor; @@ -99,7 +101,7 @@ public void testSameBitSetIsReturnedForIdenticalQuery() throws Exception { public void testNullBitSetIsReturnedForNonMatchingQuery() throws Exception { final DocumentSubsetBitsetCache cache = newCache(Settings.EMPTY); runTestOnIndex((shardContext, leafContext) -> { - final Query query = QueryBuilders.termQuery("does-not-exist", "any-value").toQuery(shardContext); + final Query query = QueryBuilders.termQuery(MISSING_FIELD_NAME, "any-value").rewrite(shardContext).toQuery(shardContext); final BitSet bitSet = cache.getBitSet(query, leafContext); assertThat(bitSet, nullValue()); }); @@ -542,6 +544,17 @@ null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), private void runTestOnIndices(int numberIndices, CheckedConsumer, Exception> body) throws Exception { final MapperService mapperService = mock(MapperService.class); + when(mapperService.fieldType(Mockito.anyString())).thenAnswer(invocation -> { + final String fieldName = (String) invocation.getArguments()[0]; + if (fieldName.equals(MISSING_FIELD_NAME)) { + return null; + } else { + KeywordFieldMapper.KeywordFieldType ft = new KeywordFieldMapper.KeywordFieldType(); + ft.setName(fieldName); + ft.freeze(); + return ft; + } + }); final Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index 117fa4b2f49c9..a66be9616600c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -29,12 +29,14 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.test.AbstractBuilderTestCase; @@ -69,6 +71,13 @@ public void testDLS() throws Exception { when(mapperService.documentMapper()).thenReturn(null); when(mapperService.simpleMatchToFullName(anyString())) .then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0])); + when(mapperService.fieldType(Mockito.anyString())).then(invocation -> { + final String fieldName = (String) invocation.getArguments()[0]; + KeywordFieldMapper.KeywordFieldType ft = new KeywordFieldMapper.KeywordFieldType(); + ft.setName(fieldName); + ft.freeze(); + return ft; + }); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); @@ -177,6 +186,13 @@ public void testDLSWithLimitedPermissions() throws Exception { when(mapperService.documentMapper()).thenReturn(null); when(mapperService.simpleMatchToFullName(anyString())) .then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0])); + when(mapperService.fieldType(Mockito.anyString())).then(invocation -> { + final String fieldName = (String) invocation.getArguments()[0]; + KeywordFieldMapper.KeywordFieldType ft = new KeywordFieldMapper.KeywordFieldType(); + ft.setName(fieldName); + ft.freeze(); + return ft; + }); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); @@ -203,7 +219,8 @@ public void testDLSWithLimitedPermissions() throws Exception { IndicesAccessControl.IndexAccessControl limitedIndexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new FieldPermissions(), DocumentPermissions.filteredBy(queries)); - IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), + Settings.builder().put(IndexSettings.ALLOW_UNMAPPED.getKey(), false).build()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); final long nowInMillis = randomNonNegativeLong(); diff --git a/x-pack/plugin/enrich/qa/build.gradle b/x-pack/plugin/enrich/qa/build.gradle index d3e95d997c3fb..79ff4091f6d2d 100644 --- a/x-pack/plugin/enrich/qa/build.gradle +++ b/x-pack/plugin/enrich/qa/build.gradle @@ -6,12 +6,3 @@ test.enabled = false dependencies { compile project(':test:framework') } - -subprojects { - project.tasks.withType(RestIntegTestTask) { - final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xPackResources) { - include 'rest-api-spec/api/**' - } - } -} diff --git a/x-pack/plugin/enrich/qa/rest/build.gradle b/x-pack/plugin/enrich/qa/rest/build.gradle index 30ad9ff335e98..209154dd448b0 100644 --- a/x-pack/plugin/enrich/qa/rest/build.gradle +++ b/x-pack/plugin/enrich/qa/rest/build.gradle @@ -2,6 +2,12 @@ apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' +restResources { + restApi { + includeXpack 'enrich' + } +} + dependencies { testCompile project(path: xpackModule('enrich'), configuration: 'runtime') testCompile project(path: xpackModule('enrich:qa:common'), configuration: 'runtime') diff --git a/x-pack/plugin/eql/qa/build.gradle b/x-pack/plugin/eql/qa/build.gradle index d3e95d997c3fb..79ff4091f6d2d 100644 --- a/x-pack/plugin/eql/qa/build.gradle +++ b/x-pack/plugin/eql/qa/build.gradle @@ -6,12 +6,3 @@ test.enabled = false dependencies { compile project(':test:framework') } - -subprojects { - project.tasks.withType(RestIntegTestTask) { - final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xPackResources) { - include 'rest-api-spec/api/**' - } - } -} diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index 8e4f9dc97e1fa..a7e94fcba6d6d 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -4,6 +4,12 @@ apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' +restResources { + restApi { + includeXpack 'eql' + } +} + dependencies { testCompile project(path: xpackModule('eql'), configuration: 'runtime') testCompile project(path: xpackModule('eql:qa:common'), configuration: 'runtime') diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/SourceGenerator.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/SourceGenerator.java new file mode 100644 index 0000000000000..dbc75c7bd8a9b --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/SourceGenerator.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.eql.execution.search; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.xpack.eql.querydsl.container.QueryContainer; +import org.elasticsearch.xpack.ql.execution.search.QlSourceBuilder; + +import java.util.List; + +import static java.util.Collections.singletonList; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; + +public abstract class SourceGenerator { + + private SourceGenerator() {} + + private static final List NO_STORED_FIELD = singletonList(StoredFieldsContext._NONE_); + + public static SearchSourceBuilder sourceBuilder(QueryContainer container, QueryBuilder filter, Integer size) { + QueryBuilder finalQuery = null; + // add the source + if (container.query() != null) { + if (filter != null) { + finalQuery = boolQuery().must(container.query().asBuilder()).filter(filter); + } else { + finalQuery = container.query().asBuilder(); + } + } else { + if (filter != null) { + finalQuery = boolQuery().filter(filter); + } + } + + final SearchSourceBuilder source = new SearchSourceBuilder(); + source.query(finalQuery); + + QlSourceBuilder sortBuilder = new QlSourceBuilder(); + // Iterate through all the columns requested, collecting the fields that + // need to be retrieved from the result documents + + // NB: the sortBuilder takes care of eliminating duplicates + container.fields().forEach(f -> f.v1().collectFields(sortBuilder)); + sortBuilder.build(source); + optimize(sortBuilder, source); + + return source; + } + + private static void optimize(QlSourceBuilder qlSource, SearchSourceBuilder builder) { + if (qlSource.noSource()) { + disableSource(builder); + } + } + + private static void optimize(QueryContainer query, SearchSourceBuilder builder) { + if (query.shouldTrackHits()) { + builder.trackTotalHits(true); + } + } + + private static void disableSource(SearchSourceBuilder builder) { + builder.fetchSource(FetchSourceContext.DO_NOT_FETCH_SOURCE); + if (builder.storedFields() == null) { + builder.storedFields(NO_STORED_FIELD); + } + } +} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java index c7451b44595d5..30d917eeb7d0a 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java @@ -77,4 +77,8 @@ public boolean equals(Object obj) { public String nodeString() { return nodeName() + "[" + index + "," + queryContainer + "]"; } + + public QueryContainer queryContainer() { + return queryContainer; + } } \ No newline at end of file diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryFolder.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryFolder.java index c43b71ac9e8af..594fc62cf9061 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryFolder.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryFolder.java @@ -6,10 +6,17 @@ package org.elasticsearch.xpack.eql.planner; +import org.elasticsearch.xpack.eql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.eql.plan.physical.FilterExec; import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.eql.querydsl.container.QueryContainer; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.planner.ExpressionTranslators; +import org.elasticsearch.xpack.ql.querydsl.query.Query; +import org.elasticsearch.xpack.ql.rule.Rule; import org.elasticsearch.xpack.ql.rule.RuleExecutor; -import static java.util.Collections.emptyList; +import java.util.Arrays; class QueryFolder extends RuleExecutor { @@ -19,6 +26,59 @@ PhysicalPlan fold(PhysicalPlan plan) { @Override protected Iterable.Batch> batches() { - return emptyList(); + Batch fold = new Batch("Fold queries", + new FoldFilter() + ); + Batch finish = new Batch("Finish query", Limiter.ONCE, + new PlanOutputToQueryRef() + ); + + return Arrays.asList(fold, finish); } -} + + private static class FoldFilter extends FoldingRule { + + @Override + protected PhysicalPlan rule(FilterExec plan) { + if (plan.child() instanceof EsQueryExec) { + EsQueryExec exec = (EsQueryExec) plan.child(); + QueryContainer qContainer = exec.queryContainer(); + + Query query = ExpressionTranslators.toQuery(plan.condition()); + + if (qContainer.query() != null || query != null) { + query = ExpressionTranslators.and(plan.source(), qContainer.query(), query); + } + + qContainer = qContainer.with(query); + return exec.with(qContainer); + } + return plan; + } + } + + private static class PlanOutputToQueryRef extends FoldingRule { + @Override + protected PhysicalPlan rule(EsQueryExec exec) { + QueryContainer qContainer = exec.queryContainer(); + + for (Attribute attr : exec.output()) { + qContainer = qContainer.addColumn(attr); + } + + // after all attributes have been resolved + return exec.with(qContainer); + } + } + + abstract static class FoldingRule extends Rule { + + @Override + public final PhysicalPlan apply(PhysicalPlan plan) { + return plan.transformUp(this::rule, typeToken()); + } + + @Override + protected abstract PhysicalPlan rule(SubPlan plan); + } +} \ No newline at end of file diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/ComputedRef.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/ComputedRef.java new file mode 100644 index 0000000000000..7fc7ac36f6665 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/ComputedRef.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.eql.querydsl.container; + +import org.elasticsearch.xpack.ql.execution.search.FieldExtraction; +import org.elasticsearch.xpack.ql.execution.search.QlSourceBuilder; +import org.elasticsearch.xpack.ql.expression.gen.pipeline.Pipe; + +public class ComputedRef implements FieldExtraction { + + private final Pipe processor; + + public ComputedRef(Pipe processor) { + this.processor = processor; + } + + public Pipe processor() { + return processor; + } + + @Override + public boolean supportedByAggsOnlyQuery() { + return processor.supportedByAggsOnlyQuery(); + } + + @Override + public void collectFields(QlSourceBuilder sourceBuilder) { + processor.collectFields(sourceBuilder); + } + + @Override + public String toString() { + return processor + "(" + processor + ")"; + } +} + diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/QueryContainer.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/QueryContainer.java index 05a54cfd86e08..afab13caada85 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/QueryContainer.java @@ -5,9 +5,174 @@ */ package org.elasticsearch.xpack.eql.querydsl.container; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.eql.EqlIllegalArgumentException; +import org.elasticsearch.xpack.eql.execution.search.SourceGenerator; +import org.elasticsearch.xpack.ql.execution.search.FieldExtraction; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.AttributeMap; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Expressions; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.expression.gen.pipeline.ConstantInput; +import org.elasticsearch.xpack.ql.querydsl.query.Query; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.ql.util.CollectionUtils.combine; + public class QueryContainer { + private final Query query; + // attributes found in the tree + private final AttributeMap attributes; + // list of fields available in the output + private final List> fields; + + private final boolean trackHits; + private final boolean includeFrozen; + + public QueryContainer() { + this(null, emptyList(), AttributeMap.emptyAttributeMap(), false, false); + } + + private QueryContainer(Query query, List> fields, AttributeMap attributes, boolean trackHits, + boolean includeFrozen) { + this.query = query; + this.fields = fields; + this.attributes = attributes; + this.trackHits = trackHits; + this.includeFrozen = includeFrozen; + } + public QueryContainer withFrozen() { throw new UnsupportedOperationException(); } -} + + public Query query() { + return query; + } + + public List> fields() { + return fields; + } + + public boolean shouldTrackHits() { + return trackHits; + } + + public QueryContainer with(Query q) { + return new QueryContainer(q, fields, attributes, trackHits, includeFrozen); + } + + public QueryContainer addColumn(Attribute attr) { + Expression expression = attributes.getOrDefault(attr, attr); + Tuple tuple = asFieldExtraction(attr); + return tuple.v1().addColumn(tuple.v2(), Expressions.id(expression)); + } + + private Tuple asFieldExtraction(Attribute attr) { + // resolve it Expression + Expression expression = attributes.getOrDefault(attr, attr); + + if (expression instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) expression; + if (fa.isNested()) { + throw new UnsupportedOperationException("Nested not yet supported"); + } + return new Tuple<>(this, topHitFieldRef(fa)); + } + + if (expression.foldable()) { + return new Tuple<>(this, new ComputedRef(new ConstantInput(expression.source(), expression, expression.fold()))); + } + + throw new EqlIllegalArgumentException("Unknown output attribute {}", attr); + } + + // + // reference methods + // + private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) { + FieldAttribute actualField = fieldAttr; + FieldAttribute rootField = fieldAttr; + StringBuilder fullFieldName = new StringBuilder(fieldAttr.field().getName()); + + // Only if the field is not an alias (in which case it will be taken out from docvalue_fields if it's isAggregatable()), + // go up the tree of parents until a non-object (and non-nested) type of field is found and use that specific parent + // as the field to extract data from, from _source. We do it like this because sub-fields are not in the _source, only + // the root field to which those sub-fields belong to, are. Instead of "text_field.keyword_subfield" for _source extraction, + // we use "text_field", because there is no source for "keyword_subfield". + /* + * "text_field": { + * "type": "text", + * "fields": { + * "keyword_subfield": { + * "type": "keyword" + * } + * } + * } + */ + if (fieldAttr.field().isAlias() == false) { + while (actualField.parent() != null + && actualField.parent().field().getDataType() != DataTypes.OBJECT + && actualField.parent().field().getDataType() != DataTypes.NESTED + && actualField.field().getDataType().hasDocValues() == false) { + actualField = actualField.parent(); + } + } + while (rootField.parent() != null) { + fullFieldName.insert(0, ".").insert(0, rootField.parent().field().getName()); + rootField = rootField.parent(); + } + + return new SearchHitFieldRef(actualField.name(), fullFieldName.toString(), fieldAttr.field().getDataType(), + fieldAttr.field().isAggregatable(), fieldAttr.field().isAlias()); + } + + public QueryContainer addColumn(FieldExtraction ref, String id) { + return new QueryContainer(query, combine(fields, new Tuple<>(ref, id)), attributes, trackHits, includeFrozen); + } + + @Override + public int hashCode() { + return Objects.hash(query, attributes, fields, trackHits, includeFrozen); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + QueryContainer other = (QueryContainer) obj; + return Objects.equals(query, other.query) + && Objects.equals(attributes, other.attributes) + && Objects.equals(fields, other.fields) + && Objects.equals(trackHits, other.trackHits) + && Objects.equals(includeFrozen, other.includeFrozen); + } + + @Override + public String toString() { + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.humanReadable(true).prettyPrint(); + SourceGenerator.sourceBuilder(this, null, null).toXContent(builder, ToXContent.EMPTY_PARAMS); + return Strings.toString(builder); + } catch (IOException e) { + throw new EqlIllegalArgumentException("error rendering", e); + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/SearchHitFieldRef.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/SearchHitFieldRef.java new file mode 100644 index 0000000000000..727df6a6047c6 --- /dev/null +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/querydsl/container/SearchHitFieldRef.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.eql.querydsl.container; + +import org.elasticsearch.xpack.ql.execution.search.FieldExtraction; +import org.elasticsearch.xpack.ql.execution.search.QlSourceBuilder; +import org.elasticsearch.xpack.ql.type.DataType; + +import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; + +// NB: this class is taken from SQL - it hasn't been ported over to QL +// since at this stage is unclear whether the whole FieldExtraction infrastructure +// needs porting or just the field extraction +public class SearchHitFieldRef implements FieldExtraction { + + private final String name; + private final String fullFieldName; // path included. If field full path is a.b.c, full field name is "a.b.c" and name is "c" + private final DataType dataType; + private final boolean docValue; + private final String hitName; + + public SearchHitFieldRef(String name, String fullFieldName, DataType dataType, boolean useDocValueInsteadOfSource, boolean isAlias) { + this(name, fullFieldName, dataType, useDocValueInsteadOfSource, isAlias, null); + } + + public SearchHitFieldRef(String name, String fullFieldName, DataType dataType, boolean useDocValueInsteadOfSource, boolean isAlias, + String hitName) { + this.name = name; + this.fullFieldName = fullFieldName; + this.dataType = dataType; + // these field types can only be extracted from docvalue_fields (ie, values already computed by Elasticsearch) + // because, for us to be able to extract them from _source, we would need the mapping of those fields (which we don't have) + this.docValue = isAlias ? useDocValueInsteadOfSource : (hasDocValues(dataType) ? useDocValueInsteadOfSource : false); + this.hitName = hitName; + } + + public String hitName() { + return hitName; + } + + public String name() { + return name; + } + + public String fullFieldName() { + return fullFieldName; + } + + public DataType getDataType() { + return dataType; + } + + public boolean useDocValue() { + return docValue; + } + + @Override + public void collectFields(QlSourceBuilder sourceBuilder) { + // nested fields are handled by inner hits + if (hitName != null) { + return; + } + if (docValue) { + sourceBuilder.addDocField(name, format(dataType)); + } else { + sourceBuilder.addSourceField(name); + } + } + + @Override + public final boolean supportedByAggsOnlyQuery() { + return false; + } + + @Override + public String toString() { + return name; + } + + private static boolean hasDocValues(DataType dataType) { + return dataType == KEYWORD || dataType == DATETIME; + } + + private static String format(DataType dataType) { + return dataType == DATETIME ? "epoch_millis" : null; + } +} \ No newline at end of file diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderTests.java index 147cabbfdb527..ad2b42880c4df 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderTests.java @@ -13,12 +13,14 @@ import org.elasticsearch.xpack.eql.expression.function.EqlFunctionRegistry; import org.elasticsearch.xpack.eql.optimizer.Optimizer; import org.elasticsearch.xpack.eql.parser.EqlParser; +import org.elasticsearch.xpack.eql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.eql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.ql.QlClientException; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.TypesTests.loadMapping; +import static org.hamcrest.Matchers.containsString; public class QueryFolderTests extends ESTestCase { @@ -38,8 +40,21 @@ private PhysicalPlan plan(IndexResolution resolution, String eql) { private PhysicalPlan plan(String eql) { return plan(index, eql); } - - public void testBasicPlan() throws Exception { - expectThrows(QlClientException.class, "not yet implemented", () -> plan("process where true")); + + public void testBasicPlan() { + PhysicalPlan p = plan("process where true"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec eqe = (EsQueryExec) p; + assertEquals(22, eqe.output().size()); + assertEquals(KEYWORD, eqe.output().get(0).dataType()); + String query = eqe.queryContainer().toString().replaceAll("\\s+", ""); + // test query term + assertThat(query, containsString("\"term\":{\"event_type\":{\"value\":\"process\"")); + // test field source extraction + assertThat(query, containsString("\"_source\":{\"includes\":[")); + assertThat(query, containsString("\"pid\"")); + // test docvalue extraction + assertThat(query, containsString("{\"field\":\"command_line\"}")); + assertThat(query, containsString("{\"field\":\"timestamp\",\"format\":\"epoch_millis\"}")); } } diff --git a/x-pack/plugin/graph/qa/build.gradle b/x-pack/plugin/graph/qa/build.gradle index 012e25f5d4f9d..e69de29bb2d1d 100644 --- a/x-pack/plugin/graph/qa/build.gradle +++ b/x-pack/plugin/graph/qa/build.gradle @@ -1,17 +0,0 @@ -import org.elasticsearch.gradle.test.RestIntegTestTask - -subprojects { - // HACK: please fix this - // we want to add the rest api specs for xpack to qa tests, but we - // need to wait until after the project is evaluated to only apply - // to those that rest tests. this used to be done automatically - // when xpack was a plugin, but now there is no place with xpack as a module. - // instead, we should package these and make them easy to use for rest tests, - // but currently, they must be copied into the resources of the test runner. - project.tasks.withType(RestIntegTestTask) { - File xpackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xpackResources) { - include 'rest-api-spec/api/**' - } - } -} diff --git a/x-pack/plugin/graph/qa/with-security/build.gradle b/x-pack/plugin/graph/qa/with-security/build.gradle index 578f910146748..869e926102dcb 100644 --- a/x-pack/plugin/graph/qa/with-security/build.gradle +++ b/x-pack/plugin/graph/qa/with-security/build.gradle @@ -7,14 +7,15 @@ dependencies { } // bring in graph rest test suite -task copyGraphRestTests(type: Copy) { - into project.sourceSets.test.output.resourcesDir - from project(xpackProject('plugin').path).sourceSets.test.resources.srcDirs - include 'rest-api-spec/test/graph/**' +restResources { + restApi { + includeXpack 'graph' + } + restTests { + includeXpack 'graph' + } } -integTest.dependsOn copyGraphRestTests - testClusters.integTest { testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'true' diff --git a/x-pack/plugin/ilm/qa/build.gradle b/x-pack/plugin/ilm/qa/build.gradle index 46908e1d849b9..e69de29bb2d1d 100644 --- a/x-pack/plugin/ilm/qa/build.gradle +++ b/x-pack/plugin/ilm/qa/build.gradle @@ -1,11 +0,0 @@ -import org.elasticsearch.gradle.test.RestIntegTestTask - -subprojects { - project.tasks.withType(RestIntegTestTask) { - final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xPackResources) { - include 'rest-api-spec/api/**' - } - } -} - diff --git a/x-pack/plugin/ilm/qa/rest/build.gradle b/x-pack/plugin/ilm/qa/rest/build.gradle index 3f67169914928..6c5ea98a0b46d 100644 --- a/x-pack/plugin/ilm/qa/rest/build.gradle +++ b/x-pack/plugin/ilm/qa/rest/build.gradle @@ -8,6 +8,12 @@ dependencies { testCompile project(path: xpackModule('ilm'), configuration: 'runtime') } +restResources { + restApi { + includeXpack 'ilm', 'slm' + } +} + def clusterCredentials = [username: System.getProperty('tests.rest.cluster.username', 'test_admin'), password: System.getProperty('tests.rest.cluster.password', 'x-pack-test-password')] diff --git a/x-pack/plugin/ml/qa/build.gradle b/x-pack/plugin/ml/qa/build.gradle index 012e25f5d4f9d..e69de29bb2d1d 100644 --- a/x-pack/plugin/ml/qa/build.gradle +++ b/x-pack/plugin/ml/qa/build.gradle @@ -1,17 +0,0 @@ -import org.elasticsearch.gradle.test.RestIntegTestTask - -subprojects { - // HACK: please fix this - // we want to add the rest api specs for xpack to qa tests, but we - // need to wait until after the project is evaluated to only apply - // to those that rest tests. this used to be done automatically - // when xpack was a plugin, but now there is no place with xpack as a module. - // instead, we should package these and make them easy to use for rest tests, - // but currently, they must be copied into the resources of the test runner. - project.tasks.withType(RestIntegTestTask) { - File xpackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xpackResources) { - include 'rest-api-spec/api/**' - } - } -} diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index 8b4f118756d95..2ac378e472a7b 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -9,15 +9,15 @@ dependencies { } // bring in machine learning rest test suite -task copyMlRestTests(type: Copy) { - into project.sourceSets.test.output.resourcesDir - from project(xpackProject('plugin').path).sourceSets.test.resources.srcDirs - include 'rest-api-spec/test/ml/**' +restResources { + restApi { + includeXpack 'ml', 'cat' + } + restTests { + includeXpack 'ml' + } } -integTest.runner { - dependsOn copyMlRestTests -} integTest.runner { systemProperty 'tests.rest.blacklist', [ // Remove this test because it doesn't call an ML endpoint and we don't want diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index 248169b37f6c1..a32367a6766ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -79,8 +79,8 @@ protected void doExecute(Task task, Request request, ActionListener li } else { trainedModelProvider.getTrainedModel(request.getModelId(), false, ActionListener.wrap( trainedModelConfig -> { - responseBuilder.setLicensed(trainedModelConfig.isAvailableWithLicense(licenseState)); - if (trainedModelConfig.isAvailableWithLicense(licenseState) || request.isPreviouslyLicensed()) { + responseBuilder.setLicensed(licenseState.isAllowedByLicense(trainedModelConfig.getLicenseLevel())); + if (licenseState.isAllowedByLicense(trainedModelConfig.getLicenseLevel()) || request.isPreviouslyLicensed()) { this.modelLoadingService.getModel(request.getModelId(), getModelListener); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackField.MACHINE_LEARNING)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java index 14df58a35ce8f..6807dfd8e4b33 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java @@ -43,7 +43,7 @@ public final class FileStructureUtils { private static final int KEYWORD_MAX_LEN = 256; private static final int KEYWORD_MAX_SPACES = 5; - private static final String BEAT_TIMEZONE_FIELD = "beat.timezone"; + private static final String BEAT_TIMEZONE_FIELD = "event.timezone"; private FileStructureUtils() { } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java index c457f4bc2bdf5..f0bbc3f872e1d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/cat/RestCatDatafeedsAction.java @@ -92,7 +92,7 @@ protected Table getTableWithHeader(RestRequest request) { .build()); table.addCell("search.bucket_avg", TableColumnAttributeBuilder.builder("the average search time per bucket (millisecond)", false) - .setAliases("sba", "bucketTimeMin") + .setAliases("sba", "searchBucketAvg") .build()); table.addCell("search.exp_avg_hour", TableColumnAttributeBuilder.builder("the exponential average search time per hour (millisecond)", false) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index e1d696794c9da..048e8d8f91162 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -245,20 +245,27 @@ public void testCloseUnassignedFailedJobAndStopUnassignedStoppingDatafeed() thro // stopping. PersistentTasksCustomMetaData tasks = clusterService().state().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); PersistentTasksCustomMetaData.PersistentTask task = MlTasks.getDatafeedTask(datafeedId, tasks); + + // It is possible that the datafeed has already detected the job failure and + // terminated itself. In this happens there is no persistent task to stop + assumeFalse("The datafeed task is null most likely because the datafeed detected the job had failed. " + + "This is expected to happen extremely rarely but the test cannot continue in these circumstances.", task == null); + UpdatePersistentTaskStatusAction.Request updatePersistentTaskStatusRequest = - new UpdatePersistentTaskStatusAction.Request(task.getId(), task.getAllocationId(), DatafeedState.STOPPING); + new UpdatePersistentTaskStatusAction.Request(task.getId(), task.getAllocationId(), DatafeedState.STOPPING); PersistentTaskResponse updatePersistentTaskStatusResponse = - client().execute(UpdatePersistentTaskStatusAction.INSTANCE, updatePersistentTaskStatusRequest).actionGet(); + client().execute(UpdatePersistentTaskStatusAction.INSTANCE, updatePersistentTaskStatusRequest).actionGet(); assertNotNull(updatePersistentTaskStatusResponse.getTask()); // Confirm the datafeed state is now stopping - this may take a while to update in cluster state assertBusy(() -> { GetDatafeedsStatsAction.Request datafeedStatsRequest = new GetDatafeedsStatsAction.Request(datafeedId); GetDatafeedsStatsAction.Response datafeedStatsResponse = - client().execute(GetDatafeedsStatsAction.INSTANCE, datafeedStatsRequest).actionGet(); + client().execute(GetDatafeedsStatsAction.INSTANCE, datafeedStatsRequest).actionGet(); assertEquals(DatafeedState.STOPPING, datafeedStatsResponse.getResponse().results().get(0).getDatafeedState()); }); + // Stop the node running the failed job/stopping datafeed ensureGreen(); // replicas must be assigned, otherwise we could lose a whole index internalCluster().stopRandomNode(settings -> jobNode.getName().equals(settings.get("node.name"))); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java index 16ad0d4fd42af..ebbd84ef07c7c 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/planner/ExpressionTranslators.java @@ -69,6 +69,10 @@ public final class ExpressionTranslators { new Scalars() ); + public static Query toQuery(Expression e) { + return toQuery(e, new QlTranslatorHandler()); + } + public static Query toQuery(Expression e, TranslatorHandler handler) { Query translation = null; for (ExpressionTranslator translator : QUERY_TRANSLATORS) { diff --git a/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java b/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java index db3d46fc1a7dd..e91ef3a5d76f9 100644 --- a/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java +++ b/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java @@ -170,4 +170,13 @@ public void testRewrite() throws IOException { assertThat(rewritten, instanceOf(PinnedQueryBuilder.class)); } + @Override + public void testMustRewrite() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + PinnedQueryBuilder queryBuilder = new PinnedQueryBuilder(new TermQueryBuilder("unmapped_field", "42")); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> queryBuilder.toQuery(context)); + assertEquals("Rewrite first", e.getMessage()); + } } diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 1b5b380e8dcd8..5847270e0b7fa 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -56,8 +56,8 @@ dependencies { compile 'com.google.guava:guava:19.0' // Dependencies for oidc - compile "com.nimbusds:oauth2-oidc-sdk:6.16.5" - compile "com.nimbusds:nimbus-jose-jwt:8.2" + compile "com.nimbusds:oauth2-oidc-sdk:7.0.2" + compile "com.nimbusds:nimbus-jose-jwt:8.6" compile "com.nimbusds:lang-tag:1.4.4" compile "com.sun.mail:jakarta.mail:1.6.3" compile "net.jcip:jcip-annotations:1.0" @@ -82,7 +82,7 @@ dependencies { testCompile('org.apache.kerby:kerb-crypto:1.1.1') testCompile('org.apache.kerby:kerb-util:1.1.1') testCompile('org.apache.kerby:token-provider:1.1.1') - testCompile('com.nimbusds:nimbus-jose-jwt:8.2') + testCompile('com.nimbusds:nimbus-jose-jwt:8.6') testCompile('net.jcip:jcip-annotations:1.0') testCompile('org.apache.kerby:kerb-admin:1.1.1') testCompile('org.apache.kerby:kerb-server:1.1.1') @@ -100,7 +100,7 @@ dependencies { testCompile('org.apache.directory.api:api-ldap-extras-codec:1.0.0') testCompile('org.apache.directory.api:api-ldap-extras-codec-api:1.0.0') testCompile('commons-pool:commons-pool:1.6') - testCompile('commons-collections:commons-collections:3.2') + testCompile('commons-collections:commons-collections:3.2.2') testCompile('org.apache.mina:mina-core:2.0.17') testCompile('org.apache.directory.api:api-util:1.0.1') testCompile('org.apache.directory.api:api-i18n:1.0.1') @@ -458,6 +458,8 @@ thirdPartyAudit.ignoreMissingClasses( 'javax.xml.bind.JAXBException', 'javax.xml.bind.Unmarshaller', 'javax.xml.bind.UnmarshallerHandler', + // Optional dependencies of oauth2-oidc-sdk + 'com.nimbusds.common.contenttype.ContentType', 'javax.activation.ActivationDataFlavor', 'javax.activation.DataContentHandler', 'javax.activation.DataHandler', diff --git a/x-pack/plugin/security/licenses/nimbus-jose-jwt-8.2.jar.sha1 b/x-pack/plugin/security/licenses/nimbus-jose-jwt-8.2.jar.sha1 deleted file mode 100644 index e2d35d19dab1e..0000000000000 --- a/x-pack/plugin/security/licenses/nimbus-jose-jwt-8.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3cc99de85969253f2f085c39d87124e21011ae74 \ No newline at end of file diff --git a/x-pack/plugin/security/licenses/nimbus-jose-jwt-8.6.jar.sha1 b/x-pack/plugin/security/licenses/nimbus-jose-jwt-8.6.jar.sha1 new file mode 100644 index 0000000000000..acc609990aa47 --- /dev/null +++ b/x-pack/plugin/security/licenses/nimbus-jose-jwt-8.6.jar.sha1 @@ -0,0 +1 @@ +93ae6d9f03a4160e5c3ca7d0c9e6b88efbfa26e7 \ No newline at end of file diff --git a/x-pack/plugin/security/licenses/oauth2-oidc-sdk-6.16.5.jar.sha1 b/x-pack/plugin/security/licenses/oauth2-oidc-sdk-6.16.5.jar.sha1 deleted file mode 100644 index bccf0afc4077d..0000000000000 --- a/x-pack/plugin/security/licenses/oauth2-oidc-sdk-6.16.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -690bf0290fe0c03dabfb43566dbd334f78ddce84 \ No newline at end of file diff --git a/x-pack/plugin/security/licenses/oauth2-oidc-sdk-7.0.2.jar.sha1 b/x-pack/plugin/security/licenses/oauth2-oidc-sdk-7.0.2.jar.sha1 new file mode 100644 index 0000000000000..a5c480a8ef152 --- /dev/null +++ b/x-pack/plugin/security/licenses/oauth2-oidc-sdk-7.0.2.jar.sha1 @@ -0,0 +1 @@ +3537c76a7ac72a1745f433cac63a254a45c57410 \ No newline at end of file diff --git a/x-pack/plugin/security/qa/build.gradle b/x-pack/plugin/security/qa/build.gradle index 46908e1d849b9..e69de29bb2d1d 100644 --- a/x-pack/plugin/security/qa/build.gradle +++ b/x-pack/plugin/security/qa/build.gradle @@ -1,11 +0,0 @@ -import org.elasticsearch.gradle.test.RestIntegTestTask - -subprojects { - project.tasks.withType(RestIntegTestTask) { - final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xPackResources) { - include 'rest-api-spec/api/**' - } - } -} - diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java index 41de06c9afeb0..1bf30ac116220 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectLogoutResponse; import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectPrepareAuthenticationResponse; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; @@ -34,6 +35,7 @@ import java.util.Collection; import java.util.Collections; import java.util.Date; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -47,6 +49,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Matchers.any; @@ -254,9 +257,33 @@ public void testBuildLogoutResponse() throws Exception { // Random strings, as we will not validate the token here final JWT idToken = generateIdToken(randomAlphaOfLength(8), randomAlphaOfLength(8), randomAlphaOfLength(8)); final OpenIdConnectLogoutResponse logoutResponse = realm.buildLogoutResponse(idToken); - assertThat(logoutResponse.getEndSessionUrl(), containsString("https://op.example.org/logout?id_token_hint=")); - assertThat(logoutResponse.getEndSessionUrl(), - containsString("&post_logout_redirect_uri=https%3A%2F%2Frp.elastic.co%2Fsucc_logout&state=")); + final String endSessionUrl = logoutResponse.getEndSessionUrl(); + final Map parameters = new HashMap<>(); + RestUtils.decodeQueryString(endSessionUrl, endSessionUrl.indexOf("?") + 1, parameters); + assertThat(parameters.size(), equalTo(3)); + assertThat(parameters, hasKey("id_token_hint")); + assertThat(parameters, hasKey("post_logout_redirect_uri")); + assertThat(parameters, hasKey("state")); + } + + public void testBuildLogoutResponseFromEndsessionEndpointWithExistingParameters() throws Exception { + final Settings.Builder realmSettingsWithFunkyEndpoint = getBasicRealmSettings(); + realmSettingsWithFunkyEndpoint.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ENDSESSION_ENDPOINT), + "https://op.example.org/logout?parameter=123"); + final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(realmSettingsWithFunkyEndpoint.build(), threadContext), null, + null); + + // Random strings, as we will not validate the token here + final JWT idToken = generateIdToken(randomAlphaOfLength(8), randomAlphaOfLength(8), randomAlphaOfLength(8)); + final OpenIdConnectLogoutResponse logoutResponse = realm.buildLogoutResponse(idToken); + final String endSessionUrl = logoutResponse.getEndSessionUrl(); + final Map parameters = new HashMap<>(); + RestUtils.decodeQueryString(endSessionUrl, endSessionUrl.indexOf("?") + 1, parameters); + assertThat(parameters.size(), equalTo(4)); + assertThat(parameters, hasKey("parameter")); + assertThat(parameters, hasKey("post_logout_redirect_uri")); + assertThat(parameters, hasKey("state")); + assertThat(parameters, hasKey("id_token_hint")); } public void testBuildingAuthenticationRequestWithExistingStateAndNonce() { diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index 5eb7e0054c429..9c23a832ec851 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -531,6 +531,30 @@ null |10 |null 1953-11-01T00:00:00.000Z|1 |1953-11-07T00:00:00.000Z ; +histogramOneDay +schema::h:ts|c:l|birth_date:ts +SELECT HISTOGRAM(birth_date, INTERVAL 1 DAY) AS h, COUNT(*) as c, birth_date FROM test_emp WHERE YEAR(birth_date) BETWEEN 1959 AND 1960 GROUP BY h, birth_date ORDER BY h ASC; + + h | c | birth_date +------------------------+---------------+------------------------ +1959-01-27T00:00:00.000Z|1 |1959-01-27T00:00:00.000Z +1959-04-07T00:00:00.000Z|1 |1959-04-07T00:00:00.000Z +1959-07-23T00:00:00.000Z|2 |1959-07-23T00:00:00.000Z +1959-08-10T00:00:00.000Z|1 |1959-08-10T00:00:00.000Z +1959-08-19T00:00:00.000Z|1 |1959-08-19T00:00:00.000Z +1959-10-01T00:00:00.000Z|1 |1959-10-01T00:00:00.000Z +1959-12-03T00:00:00.000Z|1 |1959-12-03T00:00:00.000Z +1959-12-25T00:00:00.000Z|1 |1959-12-25T00:00:00.000Z +1960-02-20T00:00:00.000Z|1 |1960-02-20T00:00:00.000Z +1960-03-09T00:00:00.000Z|1 |1960-03-09T00:00:00.000Z +1960-05-25T00:00:00.000Z|1 |1960-05-25T00:00:00.000Z +1960-07-20T00:00:00.000Z|1 |1960-07-20T00:00:00.000Z +1960-08-09T00:00:00.000Z|1 |1960-08-09T00:00:00.000Z +1960-09-06T00:00:00.000Z|1 |1960-09-06T00:00:00.000Z +1960-10-04T00:00:00.000Z|1 |1960-10-04T00:00:00.000Z +1960-12-17T00:00:00.000Z|1 |1960-12-17T00:00:00.000Z +; + histogramDateTimeWithMonthOnTop schema::h:i|c:l SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; diff --git a/x-pack/plugin/sql/src/main/bin/elasticsearch-sql-cli b/x-pack/plugin/sql/src/main/bin/elasticsearch-sql-cli index 676b842beedaa..47916a7975ee3 100755 --- a/x-pack/plugin/sql/src/main/bin/elasticsearch-sql-cli +++ b/x-pack/plugin/sql/src/main/bin/elasticsearch-sql-cli @@ -6,9 +6,9 @@ source "`dirname "$0"`"/elasticsearch-env -source "`dirname "$0"`"/x-pack-env +source "$ES_HOME"/bin/x-pack-env -CLI_JAR=$(ls $ES_HOME/bin/elasticsearch-sql-cli-*.jar) +CLI_JAR=$(ls "$ES_HOME"/bin/elasticsearch-sql-cli-*.jar) exec \ "$JAVA" \ diff --git a/x-pack/plugin/sql/src/main/bin/elasticsearch-sql-cli.bat b/x-pack/plugin/sql/src/main/bin/elasticsearch-sql-cli.bat index 48daf97b515ab..e9812981517e5 100644 --- a/x-pack/plugin/sql/src/main/bin/elasticsearch-sql-cli.bat +++ b/x-pack/plugin/sql/src/main/bin/elasticsearch-sql-cli.bat @@ -9,7 +9,7 @@ setlocal enableextensions call "%~dp0elasticsearch-env.bat" || exit /b 1 -call "%~dp0x-pack-env.bat" || exit /b 1 +call "%ES_HOME%/bin/x-pack-env.bat" || exit /b 1 set CLI_JAR=%ES_HOME%/bin/* diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java index e6ba5f6326cfa..623822ec713b3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -31,6 +31,7 @@ public class Histogram extends GroupingFunction { private final ZoneId zoneId; public static String YEAR_INTERVAL = DateHistogramInterval.YEAR.toString(); public static String MONTH_INTERVAL = DateHistogramInterval.MONTH.toString(); + public static String DAY_INTERVAL = DateHistogramInterval.DAY.toString(); public Histogram(Source source, Expression field, Expression interval, ZoneId zoneId) { super(source, field, Collections.singletonList(interval)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index f6987e88c9d0f..83b5ae9d626fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.TopHits; import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; +import org.elasticsearch.xpack.sql.expression.literal.interval.IntervalDayTime; import org.elasticsearch.xpack.sql.expression.literal.interval.IntervalYearMonth; import org.elasticsearch.xpack.sql.expression.literal.interval.Intervals; import org.elasticsearch.xpack.sql.plan.logical.Pivot; @@ -80,6 +81,7 @@ import org.elasticsearch.xpack.sql.util.Check; import org.elasticsearch.xpack.sql.util.DateUtils; +import java.time.Duration; import java.time.Period; import java.util.ArrayList; import java.util.Arrays; @@ -90,6 +92,7 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.ql.util.CollectionUtils.combine; +import static org.elasticsearch.xpack.sql.expression.function.grouping.Histogram.DAY_INTERVAL; import static org.elasticsearch.xpack.sql.expression.function.grouping.Histogram.MONTH_INTERVAL; import static org.elasticsearch.xpack.sql.expression.function.grouping.Histogram.YEAR_INTERVAL; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.toAgg; @@ -332,14 +335,24 @@ else if (exp instanceof GroupingFunction) { // When the histogram is `INTERVAL '1' YEAR` or `INTERVAL '1' MONTH`, the interval used in // the ES date_histogram will be a calendar_interval with value "1y" or "1M" respectively. - // All other intervals will be fixed_intervals expressed in ms. if (field instanceof FieldAttribute) { key = new GroupByDateHistogram(aggId, QueryTranslator.nameOf(field), calendarInterval, h.zoneId()); } else if (field instanceof Function) { key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), calendarInterval, h.zoneId()); } } - // typical interval + // interval of exactly 1 day + else if (value instanceof IntervalDayTime + && ((IntervalDayTime) value).interval().equals(Duration.ofDays(1))) { + // When the histogram is `INTERVAL '1' DAY` the interval used in + // the ES date_histogram will be a calendar_interval with value "1d" + if (field instanceof FieldAttribute) { + key = new GroupByDateHistogram(aggId, QueryTranslator.nameOf(field), DAY_INTERVAL, h.zoneId()); + } else if (field instanceof Function) { + key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), DAY_INTERVAL, h.zoneId()); + } + } + // All other intervals will be fixed_intervals expressed in ms. else { long intervalAsMillis = Intervals.inMillis(h.interval()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 8ac815febd1e0..c392e6cb5dd92 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -1078,6 +1078,45 @@ public void testGroupByMoreMonthsHistogramQueryTranslator() { + "\"fixed_interval\":\"12960000000ms\",\"time_zone\":\"Z\"}}}]}}}")); } + public void testGroupByOneDayHistogramQueryTranslator() { + PhysicalPlan p = optimizeAndPlan("SELECT HISTOGRAM(date, INTERVAL 1 DAY) AS h FROM test GROUP BY h"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec eqe = (EsQueryExec) p; + assertEquals(1, eqe.output().size()); + assertEquals("h", eqe.output().get(0).qualifiedName()); + assertEquals(DATETIME, eqe.output().get(0).dataType()); + assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), + endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," + + "\"calendar_interval\":\"1d\",\"time_zone\":\"Z\"}}}]}}}")); + } + + public void testGroupByMoreDaysHistogramQueryTranslator() { + PhysicalPlan p = optimizeAndPlan("SELECT HISTOGRAM(date, INTERVAL '1 5' DAY TO HOUR) AS h FROM test GROUP BY h"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec eqe = (EsQueryExec) p; + assertEquals(1, eqe.output().size()); + assertEquals("h", eqe.output().get(0).qualifiedName()); + assertEquals(DATETIME, eqe.output().get(0).dataType()); + assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), + endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," + + "\"fixed_interval\":\"104400000ms\",\"time_zone\":\"Z\"}}}]}}}")); + } + + public void testGroupByMoreDaysHistogram_WithFunction_QueryTranslator() { + PhysicalPlan p = optimizeAndPlan("SELECT HISTOGRAM(date + INTERVAL 5 DAYS, INTERVAL 1 DAY) AS h FROM test GROUP BY h"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec eqe = (EsQueryExec) p; + assertEquals(1, eqe.output().size()); + assertEquals("h", eqe.output().get(0).qualifiedName()); + assertEquals(DATETIME, eqe.output().get(0).dataType()); + assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), + endsWith("\"date_histogram\":{\"script\":{\"source\":\"InternalSqlScriptUtils.add(" + + "InternalSqlScriptUtils.docValue(doc,params.v0),InternalSqlScriptUtils.intervalDayTime(params.v1,params.v2))\"," + + "\"lang\":\"painless\",\"params\":{\"v0\":\"date\",\"v1\":\"PT120H\",\"v2\":\"INTERVAL_DAY\"}}," + + "\"missing_bucket\":true,\"value_type\":\"long\",\"order\":\"asc\"," + + "\"calendar_interval\":\"1d\",\"time_zone\":\"Z\"}}}]}}}")); + } + public void testGroupByYearAndScalarsQueryTranslator() { PhysicalPlan p = optimizeAndPlan("SELECT YEAR(CAST(date + INTERVAL 5 months AS DATE)) FROM test GROUP BY 1"); assertEquals(EsQueryExec.class, p.getClass()); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json index f8e3f4c46ea1e..072ba74f6f8e6 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_data_frame_analytics.json @@ -1,7 +1,7 @@ { "cat.ml_data_frame_analytics":{ "documentation":{ - "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics-stats.html" + "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-dfanalytics.html" }, "stability":"stable", "url":{ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_datafeeds.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_datafeeds.json index b94ace85ee1b7..d750ff56b8972 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_datafeeds.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/cat.ml_datafeeds.json @@ -1,7 +1,7 @@ { "cat.ml_datafeeds":{ "documentation":{ - "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html" + "url":"http://www.elastic.co/guide/en/elasticsearch/reference/current/cat-datafeeds.html" }, "stability":"stable", "url":{ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/monitoring.bulk.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/monitoring.bulk.json index 3e12be11ba75c..698b818b450af 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/monitoring.bulk.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/monitoring.bulk.json @@ -1,7 +1,7 @@ { "monitoring.bulk":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/es-monitoring.html" + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/monitor-elasticsearch-cluster.html" }, "stability":"experimental", "url":{ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/analytics/top_metrics.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/analytics/top_metrics.yml index 9c009cd20b561..4f06bb4886343 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/analytics/top_metrics.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/analytics/top_metrics.yml @@ -1,8 +1,5 @@ --- "sort by long field": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/52418" - do: bulk: index: test @@ -11,7 +8,7 @@ - '{"index": {}}' - '{"s": 1, "v": 3.1415}' - '{"index": {}}' - - '{"s": 2, "v": 1}' + - '{"s": 2, "v": 1.0}' - '{"index": {}}' - '{"s": 3, "v": 2.71828}' @@ -59,9 +56,6 @@ --- "sort by double field": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/52418" - do: indices.create: index: test @@ -79,7 +73,7 @@ - '{"index": {}}' - '{"s": 1.0, "v": 3.1415}' - '{"index": {}}' - - '{"s": 2.0, "v": 1}' + - '{"s": 2.0, "v": 1.0}' - '{"index": {}}' - '{"s": 3.0, "v": 2.71828}' @@ -112,9 +106,6 @@ --- "sort by scaled float field": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/52418" - do: indices.create: index: test @@ -132,7 +123,7 @@ - '{"index": {}}' - '{"s": 1, "v": 3.1415}' - '{"index": {}}' - - '{"s": 2, "v": 1}' + - '{"s": 2, "v": 1.0}' - '{"index": {}}' - '{"s": 3, "v": 2.71828}' @@ -165,9 +156,13 @@ --- "sort by keyword field fails": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/52409" + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 # The failure message isn't predictable with more than one shard + - do: bulk: index: test @@ -179,6 +174,7 @@ - do: catch: bad_request search: + index: test size: 0 body: aggs: @@ -191,9 +187,13 @@ --- "sort by score": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/52409" + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 # The score isn't predictable with more than one shard + - do: bulk: index: test @@ -202,7 +202,7 @@ - '{"index": {}}' - '{"s": "big cat", "v": 3.1415}' - '{"index": {}}' - - '{"s": "cat", "v": 1}' + - '{"s": "cat", "v": 1.0}' - '{"index": {}}' - '{"s": "the small dog", "v": 2.71828}' @@ -251,9 +251,13 @@ --- "sort by string script fails": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/52409" + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 # The failure message isn't predictable with more than one shard + - do: bulk: index: test @@ -266,6 +270,7 @@ catch: bad_request search: size: 0 + index: test body: aggs: tm: diff --git a/x-pack/qa/build.gradle b/x-pack/qa/build.gradle index 2555b0ef729dc..f933cac8d31cc 100644 --- a/x-pack/qa/build.gradle +++ b/x-pack/qa/build.gradle @@ -1,27 +1,9 @@ // this file must exist so that qa projects are found // by the elasticsearch x-plugins include mechanism -import org.elasticsearch.gradle.test.RestIntegTestTask - apply plugin: 'elasticsearch.build' test.enabled = false dependencies { compile project(':test:framework') } - -subprojects { - // HACK: please fix this - // we want to add the rest api specs for xpack to qa tests, but we - // need to wait until after the project is evaluated to only apply - // to those that rest tests. this used to be done automatically - // when xpack was a plugin, but now there is no place with xpack as a module. - // instead, we should package these and make them easy to use for rest tests, - // but currently, they must be copied into the resources of the test runner. - project.tasks.withType(RestIntegTestTask) { - File xpackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') - project.copyRestSpec.from(xpackResources) { - include 'rest-api-spec/api/**' - } - } -} diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index f273efaf5bcd2..4fc4428c8ed6f 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -6,8 +6,13 @@ dependencies { testCompile project(':x-pack:qa') } +restResources { + restTests { + includeCore '*' + } +} + integTest { - includePackaged = true runner { systemProperty 'tests.rest.blacklist', [ diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index 757d8cd298154..c0527f98766a6 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -1,8 +1,6 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.info.BuildParams import org.elasticsearch.gradle.testclusters.RestTestRunnerTask - apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-test' @@ -39,24 +37,6 @@ tasks.register("copyTestNodeKeyMaterial", Copy) { into outputDir } -configurations { - restSpec -} - -dependencies { - restSpec project(':rest-api-spec') -} - -processTestResources { - dependsOn configurations.restSpec - from({ zipTree(configurations.restSpec.singleFile) }) { - include 'rest-api-spec/api/**' - } - from(project(xpackModule('core')).sourceSets.test.resources) { - include 'rest-api-spec/api/**' - } -} - for (Version bwcVersion : bwcVersions.indexCompatible) { String baseName = "v${bwcVersion}" diff --git a/x-pack/qa/multi-cluster-search-security/build.gradle b/x-pack/qa/multi-cluster-search-security/build.gradle index 3f3d03a7e685e..715d8857bac1b 100644 --- a/x-pack/qa/multi-cluster-search-security/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/build.gradle @@ -7,6 +7,12 @@ dependencies { testCompile project(':x-pack:qa') } +restResources { + restApi { + includeXpack 'security' + } +} + task 'remote-cluster'(type: RestIntegTestTask) { mustRunAfter(precommit) runner { diff --git a/x-pack/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/qa/multi-cluster-tests-with-security/build.gradle index 74b15cd87a51f..b6a3088f250aa 100644 --- a/x-pack/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/qa/multi-cluster-tests-with-security/build.gradle @@ -8,6 +8,12 @@ dependencies { testCompile project(':client:rest-high-level') } +restResources { + restApi { + includeXpack 'security', 'transform' + } +} + task 'remote-cluster'(type: RestIntegTestTask) { mustRunAfter(precommit) runner { diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index 9367b74aae379..fd4c78f6ff8b8 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -1,5 +1,4 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.info.BuildParams import org.elasticsearch.gradle.testclusters.RestTestRunnerTask apply plugin: 'elasticsearch.testclusters' @@ -14,25 +13,6 @@ tasks.register("bwcTest") { group = 'verification' } -configurations { - restSpec -} - -dependencies { - restSpec project(':rest-api-spec') -} - -processTestResources { - dependsOn configurations.restSpec - from({ zipTree(configurations.restSpec.singleFile) }) { - include 'rest-api-spec/api/**' - } - from(project(xpackProject('plugin').path).sourceSets.test.resources) { - include 'rest-api-spec/api/**' - } -} - - for (Version bwcVersion : bwcVersions.wireCompatible) { String baseName = "v${bwcVersion}" diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle index bba1d1fd96e24..5f7c9b6af5662 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle +++ b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle @@ -1,5 +1,4 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.info.BuildParams import org.elasticsearch.gradle.testclusters.RestTestRunnerTask apply plugin: 'elasticsearch.testclusters' @@ -14,24 +13,6 @@ tasks.register("bwcTest") { group = 'verification' } -configurations { - restSpec -} - -dependencies { - restSpec project(':rest-api-spec') -} - -processTestResources { - dependsOn configurations.restSpec - from({ zipTree(configurations.restSpec.singleFile) }) { - include 'rest-api-spec/api/**' - } - from(project(xpackProject('plugin').path).sourceSets.test.resources) { - include 'rest-api-spec/api/**' - } -} - for (Version bwcVersion : bwcVersions.wireCompatible) { String baseName = "v${bwcVersion}" diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 6bda921a54596..4f69e0fb8a4a7 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -1,5 +1,4 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.info.BuildParams import org.elasticsearch.gradle.testclusters.RestTestRunnerTask apply plugin: 'elasticsearch.testclusters' @@ -10,6 +9,13 @@ dependencies { testCompile project(':client:rest-high-level') } +restResources { + restApi { + includeCore '*' + includeXpack '*' + } +} + forbiddenPatterns { exclude '**/system_key' } @@ -21,24 +27,6 @@ tasks.register("bwcTest") { group = 'verification' } -configurations { - restSpec -} - -dependencies { - restSpec project(':rest-api-spec') -} - -processTestResources { - dependsOn configurations.restSpec - from({ zipTree(configurations.restSpec.singleFile) }) { - include 'rest-api-spec/api/**' - } - from(project(xpackProject('plugin').path).sourceSets.test.resources) { - include 'rest-api-spec/api/**' - } -} - task copyTestNodeKeyMaterial(type: Copy) { from project(':x-pack:plugin:core').files('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem', 'src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt') diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index 6ff05357887ef..e84bd217bd257 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -8,6 +8,9 @@ setup: --- "Test old cluster datafeed without aggs": + - skip: + version: "all" + reason: "Awaits fix: https://github.com/elastic/elasticsearch/issues/52739" - do: ml.get_datafeeds: datafeed_id: old-cluster-datafeed-without-aggs diff --git a/x-pack/qa/smoke-test-security-with-mustache/build.gradle b/x-pack/qa/smoke-test-security-with-mustache/build.gradle index 748252044c36c..3c38b9ae8f30f 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/build.gradle +++ b/x-pack/qa/smoke-test-security-with-mustache/build.gradle @@ -6,6 +6,12 @@ dependencies { testCompile project(':x-pack:qa') } +restResources { + restApi { + includeXpack 'security' + } +} + testClusters.integTest { testDistribution = 'DEFAULT' setting 'xpack.watcher.enabled', 'false' diff --git a/x-pack/qa/smoke-test-watcher-with-security/build.gradle b/x-pack/qa/smoke-test-watcher-with-security/build.gradle index 236ec27cfa4e0..6fbbfa87ed557 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/build.gradle +++ b/x-pack/qa/smoke-test-watcher-with-security/build.gradle @@ -6,14 +6,15 @@ dependencies { testCompile project(':x-pack:qa') } -// bring in watcher rest test suite -task copyWatcherRestTests(type: Copy) { - into project.sourceSets.test.output.resourcesDir - from project(xpackProject('plugin').path).sourceSets.test.resources.srcDirs - include 'rest-api-spec/test/watcher/**' +restResources { + restApi { + includeXpack 'watcher', 'security', 'xpack' + } + restTests { + includeXpack 'watcher' + } } -integTest.runner.dependsOn copyWatcherRestTests testClusters.integTest { testDistribution = 'DEFAULT' setting 'xpack.ilm.enabled', 'false' diff --git a/x-pack/qa/smoke-test-watcher/build.gradle b/x-pack/qa/smoke-test-watcher/build.gradle index b3b638e938343..51d56669b837e 100644 --- a/x-pack/qa/smoke-test-watcher/build.gradle +++ b/x-pack/qa/smoke-test-watcher/build.gradle @@ -6,6 +6,12 @@ dependencies { testCompile project(':x-pack:qa') } +restResources { + restApi { + includeXpack 'watcher' + } +} + testClusters.integTest { testDistribution = 'DEFAULT' setting 'xpack.slm.enabled', 'false' diff --git a/x-pack/qa/third-party/jira/build.gradle b/x-pack/qa/third-party/jira/build.gradle index f724d5f1f3674..f2642d7150049 100644 --- a/x-pack/qa/third-party/jira/build.gradle +++ b/x-pack/qa/third-party/jira/build.gradle @@ -12,22 +12,17 @@ dependencies { testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } +restResources { + restApi { + includeXpack 'watcher' + } +} String jiraUrl = System.getenv('jira_url') String jiraUser = System.getenv('jira_user') String jiraPassword = System.getenv('jira_password') String jiraProject = System.getenv('jira_project') -testClusters.integTest { - setting 'xpack.security.enabled', 'false' - setting 'xpack.monitoring.enabled', 'false' - setting 'xpack.ml.enabled', 'false' - setting 'xpack.license.self_generated.type', 'trial' - setting 'logger.org.elasticsearch.xpack.watcher', 'DEBUG' - setting 'xpack.notification.jira.account.test.issue_defaults.issuetype.name', 'Bug' - setting 'xpack.notification.jira.account.test.issue_defaults.labels.0', 'integration-tests' -} - task cleanJira(type: DefaultTask) { doLast { List issues = jiraIssues(jiraProject) @@ -46,6 +41,14 @@ if (!jiraUrl && !jiraUser && !jiraPassword && !jiraProject) { testingConventions.enabled = false } else { testClusters.integTest { + testDistribution = 'DEFAULT' + setting 'xpack.security.enabled', 'false' + setting 'xpack.monitoring.enabled', 'false' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.license.self_generated.type', 'trial' + setting 'logger.org.elasticsearch.xpack.watcher', 'DEBUG' + setting 'xpack.notification.jira.account.test.issue_defaults.issuetype.name', 'Bug' + setting 'xpack.notification.jira.account.test.issue_defaults.labels.0', 'integration-tests' setting 'xpack.notification.jira.account.test.issue_defaults.project.key', jiraProject keystore 'xpack.notification.jira.account.test.secure_url', jiraUrl keystore 'xpack.notification.jira.account.test.secure_user', jiraUser diff --git a/x-pack/qa/third-party/pagerduty/build.gradle b/x-pack/qa/third-party/pagerduty/build.gradle index ebee73f9eaaed..c65f04118e0d3 100644 --- a/x-pack/qa/third-party/pagerduty/build.gradle +++ b/x-pack/qa/third-party/pagerduty/build.gradle @@ -9,11 +9,18 @@ dependencies { String pagerDutyServiceKey = System.getenv('pagerduty_service_api_key') +restResources { + restApi { + includeXpack 'watcher' + } +} + if (!pagerDutyServiceKey) { integTest.enabled = false testingConventions.enabled = false } else { testClusters.integTest { + testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.ml.enabled', 'false' diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index 815d1cb3bbfff..a1b92050482bb 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -7,6 +7,12 @@ dependencies { testCompile project(path: xpackModule('watcher'), configuration: 'runtime') } +restResources { + restApi { + includeXpack 'watcher' + } +} + String slackUrl = System.getenv('slack_url') if (!slackUrl) { @@ -14,6 +20,7 @@ if (!slackUrl) { testingConventions.enabled = false } else { testClusters.integTest { + testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' setting 'xpack.ml.enabled', 'false'