diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index abde05ec7919e..18a93c9b63a3e 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 53243c2c081eb..c306e1d9f63cb 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -305,8 +305,8 @@ steps: env: BWC_VERSION: 7.16.3 - - label: "{{matrix.image}} / 7.17.20 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.20 + - label: "{{matrix.image}} / 7.17.21 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.21 timeout_in_minutes: 300 matrix: setup: @@ -319,7 +319,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 7.17.20 + BWC_VERSION: 7.17.21 - label: "{{matrix.image}} / 8.0.1 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.1 @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.2 + - label: "{{matrix.image}} / 8.13.3 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.3 timeout_in_minutes: 300 matrix: setup: @@ -543,7 +543,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.2 + BWC_VERSION: 8.13.3 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index c5b9bb830a8d6..3410436eda2bf 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -172,8 +172,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.16.3 - - label: 7.17.20 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.20#bwcTest + - label: 7.17.21 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.21#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -181,7 +181,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 7.17.20 + BWC_VERSION: 7.17.21 - label: 8.0.1 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.1#bwcTest timeout_in_minutes: 300 @@ -312,8 +312,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.2 - - label: 8.13.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.2#bwcTest + - label: 8.13.3 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.3#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -321,7 +321,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.2 + BWC_VERSION: 8.13.3 - label: 8.14.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.0#bwcTest timeout_in_minutes: 300 @@ -396,7 +396,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -438,7 +438,7 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - BWC_VERSION: ["7.17.20", "8.13.2", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 2d8ace4845f4f..46165da472e74 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -16,7 +16,7 @@ BWC_VERSION: - "7.14.2" - "7.15.2" - "7.16.3" - - "7.17.20" + - "7.17.21" - "8.0.1" - "8.1.3" - "8.2.3" @@ -30,5 +30,5 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.2" + - "8.13.3" - "8.14.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 89449ff7f9f2f..dfd238a041b1e 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - - "7.17.20" - - "8.13.2" + - "7.17.21" + - "8.13.3" - "8.14.0" diff --git a/.gitignore b/.gitignore index 03e2446ec4f13..d1af97cbaea3b 100644 --- a/.gitignore +++ b/.gitignore @@ -68,4 +68,4 @@ testfixtures_shared/ # Generated checkstyle_ide.xml -x-pack/plugin/esql/gen/ +x-pack/plugin/esql/src/main/generated-src/generated/ diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 3519434e07d42..33f20df4eccca 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -12,6 +12,8 @@ apply plugin: org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin apply plugin: 'java-library' apply plugin: 'application' +var os = org.gradle.internal.os.OperatingSystem.current() + application { mainClass = 'org.openjdk.jmh.Main' } @@ -39,6 +41,7 @@ dependencies { api(project(':x-pack:plugin:ql')) api(project(':x-pack:plugin:esql')) api(project(':x-pack:plugin:esql:compute')) + implementation project(path: ':libs:elasticsearch-vec') expression(project(path: ':modules:lang-expression', configuration: 'zip')) painless(project(path: ':modules:lang-painless', configuration: 'zip')) api "org.openjdk.jmh:jmh-core:$versions.jmh" @@ -73,6 +76,16 @@ tasks.named("run").configure { executable = "${BuildParams.runtimeJavaHome}/bin/java" args << "-Dplugins.dir=${buildDir}/plugins" << "-Dtests.index=${buildDir}/index" dependsOn "copyExpression", "copyPainless" + systemProperty 'java.library.path', file("../libs/native/libraries/build/platform/${platformName()}-${os.arch}") +} + +String platformName() { + String name = System.getProperty("os.name"); + if (name.startsWith("Mac")) { + return "darwin"; + } else { + return name.toLowerCase(Locale.ROOT); + } } spotless { diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java new file mode 100644 index 0000000000000..47a8844658ff3 --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java @@ -0,0 +1,188 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.benchmark.vector; + +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.store.MMapDirectory; +import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; +import org.elasticsearch.common.logging.LogConfigurator; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.vec.VectorScorer; +import org.elasticsearch.vec.VectorScorerFactory; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Warmup; + +import java.io.IOException; +import java.nio.file.Files; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.vec.VectorSimilarityType.DOT_PRODUCT; +import static org.elasticsearch.vec.VectorSimilarityType.EUCLIDEAN; + +@Fork(value = 1, jvmArgsPrepend = { "--add-modules=jdk.incubator.vector" }) +@Warmup(iterations = 3, time = 3) +@Measurement(iterations = 5, time = 3) +@BenchmarkMode(Mode.Throughput) +@OutputTimeUnit(TimeUnit.MICROSECONDS) +@State(Scope.Thread) +/** + * Benchmark that compares various scalar quantized vector similarity function + * implementations;: scalar, lucene's panama-ized, and Elasticsearch's native. + * Run with ./gradlew -p benchmarks run --args 'VectorScorerBenchmark' + */ +public class VectorScorerBenchmark { + + static { + LogConfigurator.configureESLogging(); // native access requires logging to be initialized + } + + @Param({ "96", "768", "1024" }) + int dims; + int size = 2; // there are only two vectors to compare + + Directory dir; + IndexInput in; + VectorScorerFactory factory; + + byte[] vec1; + byte[] vec2; + float vec1Offset; + float vec2Offset; + float scoreCorrectionConstant; + + ScalarQuantizedVectorSimilarity luceneDotScorer; + ScalarQuantizedVectorSimilarity luceneSqrScorer; + VectorScorer nativeDotScorer; + VectorScorer nativeSqrScorer; + + @Setup + public void setup() throws IOException { + var optionalVectorScorerFactory = VectorScorerFactory.instance(); + if (optionalVectorScorerFactory.isEmpty()) { + String msg = "JDK=[" + + Runtime.version() + + "], os.name=[" + + System.getProperty("os.name") + + "], os.arch=[" + + System.getProperty("os.arch") + + "]"; + throw new AssertionError("Vector scorer factory not present. Cannot run the benchmark. " + msg); + } + factory = optionalVectorScorerFactory.get(); + scoreCorrectionConstant = 1f; + vec1 = new byte[dims]; + vec2 = new byte[dims]; + + ThreadLocalRandom.current().nextBytes(vec1); + ThreadLocalRandom.current().nextBytes(vec2); + vec1Offset = ThreadLocalRandom.current().nextFloat(); + vec2Offset = ThreadLocalRandom.current().nextFloat(); + + dir = new MMapDirectory(Files.createTempDirectory("nativeScalarQuantBench")); + try (IndexOutput out = dir.createOutput("vector.data", IOContext.DEFAULT)) { + out.writeBytes(vec1, 0, vec1.length); + out.writeInt(Float.floatToIntBits(vec1Offset)); + out.writeBytes(vec2, 0, vec2.length); + out.writeInt(Float.floatToIntBits(vec2Offset)); + } + in = dir.openInput("vector.data", IOContext.DEFAULT); + + luceneDotScorer = ScalarQuantizedVectorSimilarity.fromVectorSimilarity( + VectorSimilarityFunction.DOT_PRODUCT, + scoreCorrectionConstant + ); + luceneSqrScorer = ScalarQuantizedVectorSimilarity.fromVectorSimilarity(VectorSimilarityFunction.EUCLIDEAN, scoreCorrectionConstant); + nativeDotScorer = factory.getScalarQuantizedVectorScorer(dims, size, scoreCorrectionConstant, DOT_PRODUCT, in).get(); + nativeSqrScorer = factory.getScalarQuantizedVectorScorer(dims, size, scoreCorrectionConstant, EUCLIDEAN, in).get(); + + // sanity + var f1 = dotProductLucene(); + var f2 = dotProductNative(); + var f3 = dotProductScalar(); + if (f1 != f2) { + throw new AssertionError("lucene[" + f1 + "] != " + "native[" + f2 + "]"); + } + if (f1 != f3) { + throw new AssertionError("lucene[" + f1 + "] != " + "scalar[" + f3 + "]"); + } + // square distance + f1 = squareDistanceLucene(); + f2 = squareDistanceNative(); + f3 = squareDistanceScalar(); + if (f1 != f2) { + throw new AssertionError("lucene[" + f1 + "] != " + "native[" + f2 + "]"); + } + if (f1 != f3) { + throw new AssertionError("lucene[" + f1 + "] != " + "scalar[" + f3 + "]"); + } + } + + @TearDown + public void teardown() throws IOException { + IOUtils.close(dir, in); + } + + @Benchmark + public float dotProductLucene() { + return luceneDotScorer.score(vec1, vec1Offset, vec2, vec2Offset); + } + + @Benchmark + public float dotProductNative() throws IOException { + return nativeDotScorer.score(0, 1); + } + + @Benchmark + public float dotProductScalar() { + int dotProduct = 0; + for (int i = 0; i < vec1.length; i++) { + dotProduct += vec1[i] * vec2[i]; + } + float adjustedDistance = dotProduct * scoreCorrectionConstant + vec1Offset + vec2Offset; + return (1 + adjustedDistance) / 2; + } + + // -- square distance + + @Benchmark + public float squareDistanceLucene() { + return luceneSqrScorer.score(vec1, vec1Offset, vec2, vec2Offset); + } + + @Benchmark + public float squareDistanceNative() throws IOException { + return nativeSqrScorer.score(0, 1); + } + + @Benchmark + public float squareDistanceScalar() { + int squareDistance = 0; + for (int i = 0; i < vec1.length; i++) { + int diff = vec1[i] - vec2[i]; + squareDistance += diff * diff; + } + float adjustedDistance = squareDistance * scoreCorrectionConstant; + return 1 / (1f + adjustedDistance); + } +} diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy new file mode 100644 index 0000000000000..4c542d371c32c --- /dev/null +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPluginFuncTest.groovy @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import org.elasticsearch.gradle.fixtures.AbstractGradleInternalPluginFuncTest +import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin +import org.gradle.testkit.runner.TaskOutcome + +class DocsTestPluginFuncTest extends AbstractGradleInternalPluginFuncTest { + Class pluginClassUnderTest = DocsTestPlugin.class + + def setup() { + File docDir = new File(projectDir, 'doc'); + docDir.mkdirs() + addSampleDoc(docDir) + buildApiRestrictionsDisabled = true + configurationCacheCompatible = false; + buildFile << """ +tasks.named('listSnippets') { + docs = fileTree('doc') +} + +tasks.named('listConsoleCandidates') { + docs = fileTree('doc') +} +""" + } + + def "can list snippets"() { + when: + def result = gradleRunner("listSnippets").build() + then: + result.task(":listSnippets").outcome == TaskOutcome.SUCCESS + assertOutputContains(result.output, """ +> Task :listSnippets +mapper-annotated-text.asciidoc[37:39](Painless) +mapper-annotated-text.asciidoc[42:44](js) +mapper-annotated-text.asciidoc[51:69](console)// TEST[setup:seats] +""") + } + + def "can console candidates"() { + when: + def result = gradleRunner("listConsoleCandidates").build() + then: + result.task(":listConsoleCandidates").outcome == TaskOutcome.SUCCESS + assertOutputContains(result.output, """ +> Task :listConsoleCandidates +mapper-annotated-text.asciidoc[42:44](js) +""") + } + + void addSampleDoc(File docFolder) { + new File(docFolder, "mapper-annotated-text.asciidoc").text = """ +[[painless-filter-context]] +=== Filter context + +Use a Painless script as a {ref}/query-dsl-script-query.html[filter] in a +query to include and exclude documents. + + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +*Return* + +`boolean`:: + Return `true` if the current document should be returned as a result of + the query, and `false` otherwise. + + +*API* + +The standard <> is available. + +*Example* + +To run this example, first follow the steps in +<>. + +This script finds all unsold documents that cost less than \$25. + +[source,Painless] +---- +doc['sold'].value == false && doc['cost'].value < 25 +---- + +[source,js] +---- +curl 'hello world' +---- + +Defining `cost` as a script parameter enables the cost to be configured +in the script query request. For example, the following request finds +all available theatre seats for evening performances that are under \$25. + +[source,console] +---- +GET seats/_search +{ + "query": { + "bool": { + "filter": { + "script": { + "script": { + "source": "doc['sold'].value == false && doc['cost'].value < params.cost", + "params": { + "cost": 25 + } + } + } + } + } + } +} +---- +// TEST[setup:seats] +""" + } +} diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index ccbe9cd2f4a2b..6cb22dad9bc79 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -130,7 +130,8 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { ':server:generateModulesList', ':server:generatePluginsList', ':generateProviderImpls', - ':libs:elasticsearch-native:elasticsearch-native-libraries:extractLibs'].collect { elasticsearchProject.right()?.task(it) ?: it }) + ':libs:elasticsearch-native:elasticsearch-native-libraries:extractLibs', + ':x-pack:libs:es-opensaml-security-api:shadowJar'].collect { elasticsearchProject.right()?.task(it) ?: it }) } // this path is produced by the extractLibs task above @@ -239,20 +240,22 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { * but before the XML document, e.g. a doctype or comment */ void modifyXml(Object path, Action action, String preface = null) { - Node xml = parseXml(path) - action.execute(xml) + if (project.file(path).exists()) { + Node xml = parseXml(path) + action.execute(xml) - File xmlFile = project.file(path) - xmlFile.withPrintWriter { writer -> - def printer = new XmlNodePrinter(writer) - printer.namespaceAware = true - printer.preserveWhitespace = true - writer.write("\n") + File xmlFile = project.file(path) + xmlFile.withPrintWriter { writer -> + def printer = new XmlNodePrinter(writer) + printer.namespaceAware = true + printer.preserveWhitespace = true + writer.write("\n") - if (preface != null) { - writer.write(preface) + if (preface != null) { + writer.write(preface) + } + printer.print(xml) } - printer.print(xml) } } diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy deleted file mode 100644 index 38b4cb499eeb9..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc - -import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.test.rest.CopyRestApiTask -import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask -import org.gradle.api.Action -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.file.Directory -import org.gradle.api.file.ProjectLayout -import org.gradle.api.internal.file.FileOperations -import org.gradle.api.provider.Provider -import org.gradle.api.tasks.TaskProvider - -import javax.inject.Inject - -/** - * Sets up tests for documentation. - */ -class DocsTestPlugin implements Plugin { - - private FileOperations fileOperations - private ProjectLayout projectLayout - - @Inject - DocsTestPlugin(FileOperations fileOperations, ProjectLayout projectLayout) { - this.projectLayout = projectLayout - this.fileOperations = fileOperations - } - - @Override - void apply(Project project) { - project.pluginManager.apply('elasticsearch.legacy-yaml-rest-test') - - String distribution = System.getProperty('tests.distribution', 'default') - // The distribution can be configured with -Dtests.distribution on the command line - project.testClusters.matching { it.name.equals("yamlRestTest") }.configureEach { testDistribution = distribution.toUpperCase() } - project.testClusters.matching { it.name.equals("yamlRestTest") }.configureEach { nameCustomization = { it.replace("yamlRestTest", "node") } } - // Docs are published separately so no need to assemble - project.tasks.named("assemble").configure {enabled = false } - Map commonDefaultSubstitutions = [ - /* These match up with the asciidoc syntax for substitutions but - * the values may differ. In particular {version} needs to resolve - * to the version being built for testing but needs to resolve to - * the last released version for docs. */ - '\\{version\\}': Version.fromString(VersionProperties.elasticsearch).toString(), - '\\{version_qualified\\}': VersionProperties.elasticsearch, - '\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''), - '\\{build_flavor\\}' : distribution, - '\\{build_type\\}' : OS.conditionalString().onWindows({"zip"}).onUnix({"tar"}).supply(), - ] - project.tasks.register('listSnippets', SnippetsTask) { - group 'Docs' - description 'List each snippet' - defaultSubstitutions = commonDefaultSubstitutions - perSnippet = new Action() { - @Override - void execute(SnippetsTask.Snippet snippet) { - println(snippet.toString()) - } - } - } - project.tasks.register('listConsoleCandidates', SnippetsTask) { - group 'Docs' - description - 'List snippets that probably should be marked // CONSOLE' - defaultSubstitutions = commonDefaultSubstitutions - perSnippet = new Action() { - @Override - void execute(SnippetsTask.Snippet snippet) { - if (RestTestsFromSnippetsTask.isConsoleCandidate(it)) { - println(it.toString()) - } - } - } - } - - Provider restRootDir = projectLayout.buildDirectory.dir("rest") - TaskProvider buildRestTests = project.tasks.register('buildRestTests', RestTestsFromSnippetsTask) { - defaultSubstitutions = commonDefaultSubstitutions - testRoot.convention(restRootDir) - doFirst { - getFileOperations().delete(testRoot.get()) - } - } - - // TODO: This effectively makes testRoot not customizable, which we don't do anyway atm - project.sourceSets.yamlRestTest.output.dir(restRootDir, builtBy: buildRestTests) - } -} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy deleted file mode 100644 index 81207181dc9a7..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromSnippetsTask.groovy +++ /dev/null @@ -1,503 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.doc - -import groovy.transform.PackageScope -import org.elasticsearch.gradle.internal.doc.SnippetsTask.Snippet -import org.gradle.api.Action -import org.gradle.api.InvalidUserDataException -import org.gradle.api.file.DirectoryProperty -import org.gradle.api.internal.file.FileOperations -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.OutputDirectory -import org.gradle.api.model.ObjectFactory - -import javax.inject.Inject; -import java.nio.file.Files -import java.nio.file.Path - -/** - * Generates REST tests for each snippet marked // TEST. - */ -abstract class RestTestsFromSnippetsTask extends SnippetsTask { - /** - * These languages aren't supported by the syntax highlighter so we - * shouldn't use them. - */ - private static final List BAD_LANGUAGES = ['json', 'javascript'] - - /** - * Test setups defined in the build instead of the docs so they can be - * shared between many doc files. - */ - @Input - Map setups = new HashMap() - - /** - * Test teardowns defined in the build instead of the docs so they can be - * shared between many doc files. - */ - @Input - Map teardowns = new HashMap() - - /** - * A list of files that contain snippets that *probably* should be - * converted to `// CONSOLE` but have yet to be converted. If a file is in - * this list and doesn't contain unconverted snippets this task will fail. - * If there are unconverted snippets not in this list then this task will - * fail. All files are paths relative to the docs dir. - */ - @Input - List expectedUnconvertedCandidates = [] - - /** - * Root directory of the tests being generated. To make rest tests happy - * we generate them in a testRoot which is contained in this directory. - */ - private DirectoryProperty testRoot - - @Internal - Set names = new HashSet<>() - - @Inject - abstract FileOperations getFileOperations(); - - @Inject - RestTestsFromSnippetsTask(ObjectFactory objectFactory) { - testRoot = objectFactory.directoryProperty() - TestBuilder builder = new TestBuilder() - perSnippet = new Action() { - @Override - void execute(Snippet snippet) { - builder.handleSnippet(snippet) - } - } - doLast { - builder.checkUnconverted() - builder.finishLastTest() - } - } - - /** - * Root directory containing all the files generated by this task. It is - * contained within testRoot. - */ - File outputRoot() { - return new File(testRoot.get().asFile, '/rest-api-spec/test') - } - - @OutputDirectory - DirectoryProperty getTestRoot() { - return testRoot - } -/** - * Is this snippet a candidate for conversion to `// CONSOLE`? - */ - static isConsoleCandidate(Snippet snippet) { - /* Snippets that are responses or already marked as `// CONSOLE` or - * `// NOTCONSOLE` are not candidates. */ - if (snippet.console != null || snippet.testResponse) { - return false - } - /* js snippets almost always should be marked with `// CONSOLE`. js - * snippets that shouldn't be marked `// CONSOLE`, like examples for - * js client, should always be marked with `// NOTCONSOLE`. - * - * `sh` snippets that contain `curl` almost always should be marked - * with `// CONSOLE`. In the exceptionally rare cases where they are - * not communicating with Elasticsearch, like the examples in the ec2 - * and gce discovery plugins, the snippets should be marked - * `// NOTCONSOLE`. */ - return snippet.language == 'js' || snippet.curl - } - - /** - * Certain requests should not have the shard failure check because the - * format of the response is incompatible i.e. it is not a JSON object. - */ - static shouldAddShardFailureCheck(String path) { - return path.startsWith('_cat') == false && path.startsWith('_ml/datafeeds/') == false - } - - /** - * Converts Kibana's block quoted strings into standard JSON. These - * {@code """} delimited strings can be embedded in CONSOLE and can - * contain newlines and {@code "} without the normal JSON escaping. - * This has to add it. - */ - @PackageScope - static String replaceBlockQuote(String body) { - int start = body.indexOf('"""'); - if (start < 0) { - return body - } - /* - * 1.3 is a fairly wild guess of the extra space needed to hold - * the escaped string. - */ - StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); - int startOfNormal = 0; - while (start >= 0) { - int end = body.indexOf('"""', start + 3); - if (end < 0) { - throw new InvalidUserDataException( - "Invalid block quote starting at $start in:\n$body") - } - result.append(body.substring(startOfNormal, start)); - result.append('"'); - result.append(body.substring(start + 3, end) - .replace('"', '\\"') - .replace("\n", "\\n")); - result.append('"'); - startOfNormal = end + 3; - start = body.indexOf('"""', startOfNormal); - } - result.append(body.substring(startOfNormal)); - return result.toString(); - } - - private class TestBuilder { - private static final String SYNTAX = { - String method = /(?GET|PUT|POST|HEAD|OPTIONS|DELETE)/ - String pathAndQuery = /(?[^\n]+)/ - String badBody = /GET|PUT|POST|HEAD|OPTIONS|DELETE|startyaml|#/ - String body = /(?(?:\n(?!$badBody)[^\n]+)+)/ - String rawRequest = /(?:$method\s+$pathAndQuery$body?)/ - String yamlRequest = /(?:startyaml(?s)(?.+?)(?-s)endyaml)/ - String nonComment = /(?:$rawRequest|$yamlRequest)/ - String comment = /(?#.+)/ - /(?:$comment|$nonComment)\n+/ - }() - - /** - * The file in which we saw the last snippet that made a test. - */ - Path lastDocsPath - - /** - * The file we're building. - */ - PrintWriter current - - /** - * Files containing all snippets that *probably* should be converted - * to `// CONSOLE` but have yet to be converted. All files are paths - * relative to the docs dir. - */ - Set unconvertedCandidates = new HashSet<>() - - /** - * The last non-TESTRESPONSE snippet. - */ - Snippet previousTest - - /** - * Called each time a snippet is encountered. Tracks the snippets and - * calls buildTest to actually build the test. - */ - - void handleSnippet(Snippet snippet) { - if (RestTestsFromSnippetsTask.isConsoleCandidate(snippet)) { - unconvertedCandidates.add(snippet.path.toString() - .replace('\\', '/')) - } - if (BAD_LANGUAGES.contains(snippet.language)) { - throw new InvalidUserDataException( - "$snippet: Use `js` instead of `${snippet.language}`.") - } - if (snippet.testSetup) { - testSetup(snippet) - previousTest = snippet - return - } - if (snippet.testTearDown) { - testTearDown(snippet) - previousTest = snippet - return - } - if (snippet.testResponse || snippet.language == 'console-result') { - if (previousTest == null) { - throw new InvalidUserDataException("$snippet: No paired previous test") - } - if (previousTest.path != snippet.path) { - throw new InvalidUserDataException("$snippet: Result can't be first in file") - } - response(snippet) - return - } - if ((snippet.language == 'js') && (snippet.console)) { - throw new InvalidUserDataException( - "$snippet: Use `[source,console]` instead of `// CONSOLE`.") - } - if (snippet.test || snippet.language == 'console') { - test(snippet) - previousTest = snippet - return - } - // Must be an unmarked snippet.... - } - - private void test(Snippet test) { - setupCurrent(test) - - if (test.continued) { - /* Catch some difficult to debug errors with // TEST[continued] - * and throw a helpful error message. */ - if (previousTest == null || previousTest.path != test.path) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot be on first snippet in a file: $test") - } - if (previousTest != null && previousTest.testSetup) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot immediately follow // TESTSETUP: $test") - } - if (previousTest != null && previousTest.testTearDown) { - throw new InvalidUserDataException("// TEST[continued] " + - "cannot immediately follow // TEARDOWN: $test") - } - } else { - current.println('---') - if (test.name != null && test.name.isBlank() == false) { - if(names.add(test.name) == false) { - throw new InvalidUserDataException("Duplicated snippet name '$test.name': $test") - } - current.println("\"$test.name\":") - } else { - current.println("\"line_$test.start\":") - } - /* The Elasticsearch test runner doesn't support quite a few - * constructs unless we output this skip. We don't know if - * we're going to use these constructs, but we might so we - * output the skip just in case. */ - current.println(" - skip:") - current.println(" features: ") - current.println(" - default_shards") - current.println(" - stash_in_key") - current.println(" - stash_in_path") - current.println(" - stash_path_replace") - current.println(" - warnings") - } - if (test.skip) { - if (test.continued) { - throw new InvalidUserDataException("Continued snippets " - + "can't be skipped") - } - current.println(" - always_skip") - current.println(" reason: $test.skip") - } - if (test.setup != null) { - setup(test) - } - - body(test, false) - - if (test.teardown != null) { - teardown(test) - } - } - - private void setup(final Snippet snippet) { - // insert a setup defined outside of the docs - for (final String name : snippet.setup.split(',')) { - final String setup = setups[name] - if (setup == null) { - throw new InvalidUserDataException( - "Couldn't find named setup $name for $snippet" - ) - } - current.println("# Named setup ${name}") - current.println(setup) - } - } - - private void teardown(final Snippet snippet) { - // insert a teardown defined outside of the docs - for (final String name : snippet.teardown.split(',')) { - final String teardown = teardowns[name] - if (teardown == null) { - throw new InvalidUserDataException( - "Couldn't find named teardown $name for $snippet" - ) - } - current.println("# Named teardown ${name}") - current.println(teardown) - } - } - - private void response(Snippet response) { - if (null == response.skip) { - current.println(" - match: ") - current.println(" \$body: ") - replaceBlockQuote(response.contents).eachLine { - current.println(" $it") - } - } - } - - void emitDo(String method, String pathAndQuery, String body, - String catchPart, List warnings, boolean inSetup, boolean skipShardFailures) { - def (String path, String query) = pathAndQuery.tokenize('?') - if (path == null) { - path = '' // Catch requests to the root... - } else { - path = path.replace('<', '%3C').replace('>', '%3E') - } - current.println(" - do:") - if (catchPart != null) { - current.println(" catch: $catchPart") - } - if (false == warnings.isEmpty()) { - current.println(" warnings:") - for (String warning in warnings) { - // Escape " because we're going to quote the warning - String escaped = warning.replaceAll('"', '\\\\"') - /* Quote the warning in case it starts with [ which makes - * it look too much like an array. */ - current.println(" - \"$escaped\"") - } - } - current.println(" raw:") - current.println(" method: $method") - current.println(" path: \"$path\"") - if (query != null) { - for (String param: query.tokenize('&')) { - def (String name, String value) = param.tokenize('=') - if (value == null) { - value = '' - } - current.println(" $name: \"$value\"") - } - } - if (body != null) { - // Throw out the leading newline we get from parsing the body - body = body.substring(1) - // Replace """ quoted strings with valid json ones - body = replaceBlockQuote(body) - current.println(" body: |") - body.eachLine { current.println(" $it") } - } - /* Catch any shard failures. These only cause a non-200 response if - * no shard succeeds. But we need to fail the tests on all of these - * because they mean invalid syntax or broken queries or something - * else that we don't want to teach people to do. The REST test - * framework doesn't allow us to have assertions in the setup - * section so we have to skip it there. We also omit the assertion - * from APIs that don't return a JSON object - */ - if (false == inSetup && skipShardFailures == false && shouldAddShardFailureCheck(path)) { - current.println(" - is_false: _shards.failures") - } - } - - private void testSetup(Snippet snippet) { - if (lastDocsPath == snippet.path) { - throw new InvalidUserDataException("$snippet: wasn't first. TESTSETUP can only be used in the first snippet of a document.") - } - setupCurrent(snippet) - current.println('---') - current.println("setup:") - if (snippet.setup != null) { - setup(snippet) - } - body(snippet, true) - } - - private void testTearDown(Snippet snippet) { - if (previousTest != null && previousTest.testSetup == false && lastDocsPath == snippet.path) { - throw new InvalidUserDataException("$snippet must follow test setup or be first") - } - setupCurrent(snippet) - current.println('---') - current.println('teardown:') - body(snippet, true) - } - - private void body(Snippet snippet, boolean inSetup) { - parse("$snippet", snippet.contents, SYNTAX) { matcher, last -> - if (matcher.group("comment") != null) { - // Comment - return - } - String yamlRequest = matcher.group("yaml"); - if (yamlRequest != null) { - current.println(yamlRequest) - return - } - String method = matcher.group("method") - String pathAndQuery = matcher.group("pathAndQuery") - String body = matcher.group("body") - String catchPart = last ? snippet.catchPart : null - if (pathAndQuery.startsWith('/')) { - // Leading '/'s break the generated paths - pathAndQuery = pathAndQuery.substring(1) - } - emitDo(method, pathAndQuery, body, catchPart, snippet.warnings, - inSetup, snippet.skipShardsFailures) - } - } - - private PrintWriter setupCurrent(Snippet test) { - if (lastDocsPath == test.path) { - return - } - names.clear() - finishLastTest() - lastDocsPath = test.path - - // Make the destination file: - // Shift the path into the destination directory tree - Path dest = outputRoot().toPath().resolve(test.path) - // Replace the extension - String fileName = dest.getName(dest.nameCount - 1) - dest = dest.parent.resolve(fileName.replace('.asciidoc', '.yml')) - - // Now setup the writer - Files.createDirectories(dest.parent) - current = dest.newPrintWriter('UTF-8') - } - - void finishLastTest() { - if (current != null) { - current.close() - current = null - } - } - - void checkUnconverted() { - List listedButNotFound = [] - for (String listed : expectedUnconvertedCandidates) { - if (false == unconvertedCandidates.remove(listed)) { - listedButNotFound.add(listed) - } - } - String message = "" - if (false == listedButNotFound.isEmpty()) { - Collections.sort(listedButNotFound) - listedButNotFound = listedButNotFound.collect {' ' + it} - message += "Expected unconverted snippets but none found in:\n" - message += listedButNotFound.join("\n") - } - if (false == unconvertedCandidates.isEmpty()) { - List foundButNotListed = - new ArrayList<>(unconvertedCandidates) - Collections.sort(foundButNotListed) - foundButNotListed = foundButNotListed.collect {' ' + it} - if (false == "".equals(message)) { - message += "\n" - } - message += "Unexpected unconverted snippets:\n" - message += foundButNotListed.join("\n") - } - if (false == "".equals(message)) { - throw new InvalidUserDataException(message); - } - } - } -} diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy deleted file mode 100644 index 3e4ad91024082..0000000000000 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/doc/SnippetsTask.groovy +++ /dev/null @@ -1,438 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.doc - -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.JsonToken - -import org.gradle.api.Action; -import org.gradle.api.DefaultTask -import org.gradle.api.InvalidUserDataException -import org.gradle.api.file.ConfigurableFileTree -import org.gradle.api.tasks.Input -import org.gradle.api.tasks.InputFiles -import org.gradle.api.tasks.Internal -import org.gradle.api.tasks.TaskAction - -import java.nio.file.Path -import java.util.regex.Matcher - -/** - * A task which will run a closure on each snippet in the documentation. - */ -class SnippetsTask extends DefaultTask { - private static final String SCHAR = /(?:\\\/|[^\/])/ - private static final String SUBSTITUTION = /s\/($SCHAR+)\/($SCHAR*)\// - private static final String CATCH = /catch:\s*((?:\/[^\/]+\/)|[^ \]]+)/ - private static final String SKIP_REGEX = /skip:([^\]]+)/ - private static final String SETUP = /setup:([^ \]]+)/ - private static final String TEARDOWN = /teardown:([^ \]]+)/ - private static final String WARNING = /warning:(.+)/ - private static final String NON_JSON = /(non_json)/ - private static final String TEST_SYNTAX = - /(?:$CATCH|$SUBSTITUTION|$SKIP_REGEX|(continued)|$SETUP|$TEARDOWN|$WARNING|(skip_shard_failures)) ?/ - - /** - * Action to take on each snippet. Called with a single parameter, an - * instance of Snippet. - */ - @Internal - Action perSnippet - - /** - * The docs to scan. Defaults to every file in the directory exception the - * build.gradle file because that is appropriate for Elasticsearch's docs - * directory. - */ - @InputFiles - ConfigurableFileTree docs - - /** - * Substitutions done on every snippet's contents. - */ - @Input - Map defaultSubstitutions = [:] - - @TaskAction - void executeTask() { - /* - * Walks each line of each file, building snippets as it encounters - * the lines that make up the snippet. - */ - for (File file: docs) { - String lastLanguage - String name - int lastLanguageLine - Snippet snippet = null - StringBuilder contents = null - List substitutions = null - Closure emit = { - snippet.contents = contents.toString() - contents = null - Closure doSubstitution = { String pattern, String subst -> - /* - * $body is really common but it looks like a - * backreference so we just escape it here to make the - * tests cleaner. - */ - subst = subst.replace('$body', '\\$body') - subst = subst.replace('$_path', '\\$_path') - // \n is a new line.... - subst = subst.replace('\\n', '\n') - snippet.contents = snippet.contents.replaceAll( - pattern, subst) - } - defaultSubstitutions.each doSubstitution - if (substitutions != null) { - substitutions.each doSubstitution - substitutions = null - } - if (snippet.language == null) { - throw new InvalidUserDataException("$snippet: " - + "Snippet missing a language. This is required by " - + "Elasticsearch's doc testing infrastructure so we " - + "be sure we don't accidentally forget to test a " - + "snippet.") - } - // Try to detect snippets that contain `curl` - if (snippet.language == 'sh' || snippet.language == 'shell') { - snippet.curl = snippet.contents.contains('curl') - if (snippet.console == false && snippet.curl == false) { - throw new InvalidUserDataException("$snippet: " - + "No need for NOTCONSOLE if snippet doesn't " - + "contain `curl`.") - } - } - if (snippet.testResponse - && ('js' == snippet.language || 'console-result' == snippet.language) - && null == snippet.skip) { - String quoted = snippet.contents - // quote values starting with $ - .replaceAll(/([:,])\s*(\$[^ ,\n}]+)/, '$1 "$2"') - // quote fields starting with $ - .replaceAll(/(\$[^ ,\n}]+)\s*:/, '"$1":') - - JsonFactory jf = new JsonFactory(); - jf.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,true); - JsonParser jsonParser; - - try { - jsonParser = jf.createParser(quoted); - while(jsonParser.isClosed() == false) { - jsonParser.nextToken(); - } - } catch (JsonParseException e) { - throw new InvalidUserDataException("Invalid json in " - + snippet.toString() + ". The error is:\n" + e.getMessage() + ".\n" - + "After substitutions and munging, the json looks like:\n" + quoted, e); - } - } - perSnippet.execute(snippet) - snippet = null - } - file.eachLine('UTF-8') { String line, int lineNumber -> - Matcher matcher - if (line ==~ /-{4,}\s*/) { // Four dashes looks like a snippet - if (snippet == null) { - Path path = docs.dir.toPath().relativize(file.toPath()) - snippet = new Snippet(path: path, start: lineNumber, name: name) - if (lastLanguageLine == lineNumber - 1) { - snippet.language = lastLanguage - } - name = null - } else { - snippet.end = lineNumber - } - return - } - def source = matchSource(line) - if (source.matches) { - lastLanguage = source.language - lastLanguageLine = lineNumber - name = source.name - return - } - if (line ==~ /\/\/\s*AUTOSENSE\s*/) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "AUTOSENSE has been replaced by CONSOLE.") - } - if (line ==~ /\/\/\s*CONSOLE\s*/) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "CONSOLE not paired with a snippet") - } - if (snippet.console != null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "Can't be both CONSOLE and NOTCONSOLE") - } - snippet.console = true - return - } - if (line ==~ /\/\/\s*NOTCONSOLE\s*/) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "NOTCONSOLE not paired with a snippet") - } - if (snippet.console != null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "Can't be both CONSOLE and NOTCONSOLE") - } - snippet.console = false - return - } - matcher = line =~ /\/\/\s*TEST(\[(.+)\])?\s*/ - if (matcher.matches()) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "TEST not paired with a snippet at ") - } - snippet.test = true - if (matcher.group(2) != null) { - String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), TEST_SYNTAX) { - if (it.group(1) != null) { - snippet.catchPart = it.group(1) - return - } - if (it.group(2) != null) { - if (substitutions == null) { - substitutions = [] - } - substitutions.add([it.group(2), it.group(3)]) - return - } - if (it.group(4) != null) { - snippet.skip = it.group(4) - return - } - if (it.group(5) != null) { - snippet.continued = true - return - } - if (it.group(6) != null) { - snippet.setup = it.group(6) - return - } - if (it.group(7) != null) { - snippet.teardown = it.group(7) - return - } - if (it.group(8) != null) { - snippet.warnings.add(it.group(8)) - return - } - if (it.group(9) != null) { - snippet.skipShardsFailures = true - return - } - throw new InvalidUserDataException( - "Invalid test marker: $line") - } - } - return - } - matcher = line =~ /\/\/\s*TESTRESPONSE(\[(.+)\])?\s*/ - if (matcher.matches()) { - if (snippet == null) { - throw new InvalidUserDataException("$file:$lineNumber: " - + "TESTRESPONSE not paired with a snippet") - } - snippet.testResponse = true - if (matcher.group(2) != null) { - if (substitutions == null) { - substitutions = [] - } - String loc = "$file:$lineNumber" - parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$NON_JSON|$SKIP_REGEX) ?/) { - if (it.group(1) != null) { - // TESTRESPONSE[s/adsf/jkl/] - substitutions.add([it.group(1), it.group(2)]) - } else if (it.group(3) != null) { - // TESTRESPONSE[non_json] - substitutions.add(['^', '/']) - substitutions.add(['\n$', '\\\\s*/']) - substitutions.add(['( +)', '$1\\\\s+']) - substitutions.add(['\n', '\\\\s*\n ']) - } else if (it.group(4) != null) { - // TESTRESPONSE[skip:reason] - snippet.skip = it.group(4) - } - } - } - return - } - if (line ==~ /\/\/\s*TESTSETUP\s*/) { - snippet.testSetup = true - return - } - if (line ==~ /\/\/\s*TEARDOWN\s*/) { - snippet.testTearDown = true - return - } - if (snippet == null) { - // Outside - return - } - if (snippet.end == Snippet.NOT_FINISHED) { - // Inside - if (contents == null) { - contents = new StringBuilder() - } - // We don't need the annotations - line = line.replaceAll(/<\d+>/, '') - // Nor any trailing spaces - line = line.replaceAll(/\s+$/, '') - contents.append(line).append('\n') - return - } - // Allow line continuations for console snippets within lists - if (snippet != null && line.trim() == '+') { - return - } - // Just finished - emit() - } - if (snippet != null) emit() - } - } - - static Source matchSource(String line) { - def matcher = line =~ /\["?source"?(?:\.[^,]+)?,\s*"?([-\w]+)"?(,((?!id=).)*(id="?([-\w]+)"?)?(.*))?].*/ - if(matcher.matches()){ - return new Source(matches: true, language: matcher.group(1), name: matcher.group(5)) - } - return new Source(matches: false) - } - - static class Source { - boolean matches - String language - String name - } - - static class Snippet { - static final int NOT_FINISHED = -1 - - /** - * Path to the file containing this snippet. Relative to docs.dir of the - * SnippetsTask that created it. - */ - Path path - int start - int end = NOT_FINISHED - String contents - - Boolean console = null - boolean test = false - boolean testResponse = false - boolean testSetup = false - boolean testTearDown = false - String skip = null - boolean continued = false - String language = null - String catchPart = null - String setup = null - String teardown = null - boolean curl - List warnings = new ArrayList() - boolean skipShardsFailures = false - String name - - @Override - public String toString() { - String result = "$path[$start:$end]" - if (language != null) { - result += "($language)" - } - if (console != null) { - result += console ? '// CONSOLE' : '// NOTCONSOLE' - } - if (test) { - result += '// TEST' - if (catchPart) { - result += "[catch: $catchPart]" - } - if (skip) { - result += "[skip=$skip]" - } - if (continued) { - result += '[continued]' - } - if (setup) { - result += "[setup:$setup]" - } - if (teardown) { - result += "[teardown:$teardown]" - } - for (String warning in warnings) { - result += "[warning:$warning]" - } - if (skipShardsFailures) { - result += '[skip_shard_failures]' - } - } - if (testResponse) { - result += '// TESTRESPONSE' - if (skip) { - result += "[skip=$skip]" - } - } - if (testSetup) { - result += '// TESTSETUP' - } - if (curl) { - result += '(curl)' - } - return result - } - } - - /** - * Repeatedly match the pattern to the string, calling the closure with the - * matchers each time there is a match. If there are characters that don't - * match then blow up. If the closure takes two parameters then the second - * one is "is this the last match?". - */ - protected parse(String location, String s, String pattern, Closure c) { - if (s == null) { - return // Silly null, only real stuff gets to match! - } - Matcher m = s =~ pattern - int offset = 0 - Closure extraContent = { message -> - StringBuilder cutOut = new StringBuilder() - cutOut.append(s[offset - 6..offset - 1]) - cutOut.append('*') - cutOut.append(s[offset..Math.min(offset + 5, s.length() - 1)]) - String cutOutNoNl = cutOut.toString().replace('\n', '\\n') - throw new InvalidUserDataException("$location: Extra content " - + "$message ('$cutOutNoNl') matching [$pattern]: $s") - } - while (m.find()) { - if (m.start() != offset) { - extraContent("between [$offset] and [${m.start()}]") - } - offset = m.end() - if (c.maximumNumberOfParameters == 1) { - c(m) - } else { - c(m, offset == s.length()) - } - } - if (offset == 0) { - throw new InvalidUserDataException("$location: Didn't match " - + "$pattern: $s") - } - if (offset != s.length()) { - extraContent("after [$offset]") - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index d342ebc435197..4f9498c8f33a6 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -142,10 +142,17 @@ public void execute(BuildFinishedFlowAction.Parameters parameters) throws FileNo uploadFile.getParentFile().mkdirs(); createBuildArchiveTar(parameters.getFilteredFiles().get(), parameters.getProjectDir().get(), uploadFile); if (uploadFile.exists() && "true".equals(System.getenv("BUILDKITE"))) { - String uploadFilePath = "build/" + uploadFile.getName(); + String uploadFilePath = uploadFile.getName(); + File uploadFileDir = uploadFile.getParentFile(); try { System.out.println("Uploading buildkite artifact: " + uploadFilePath + "..."); - new ProcessBuilder("buildkite-agent", "artifact", "upload", uploadFilePath).start().waitFor(); + ProcessBuilder pb = new ProcessBuilder("buildkite-agent", "artifact", "upload", uploadFilePath); + // If we don't switch to the build directory first, the uploaded file will have a `build/` prefix + // Buildkite will flip the `/` to a `\` at upload time on Windows, which will make the search command below fail + // So, if you change this such that the artifact will have a slash/directory in it, you'll need to update the logic + // below as well + pb.directory(uploadFileDir); + pb.start().waitFor(); System.out.println("Generating buildscan link for artifact..."); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionModuleCheckTaskProvider.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionModuleCheckTaskProvider.java index f94cc2c133acd..da8cd783d0365 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionModuleCheckTaskProvider.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionModuleCheckTaskProvider.java @@ -63,6 +63,7 @@ public class InternalDistributionModuleCheckTaskProvider { "org.elasticsearch.securesm", "org.elasticsearch.server", "org.elasticsearch.tdigest", + "org.elasticsearch.vec", "org.elasticsearch.xcontent" ); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 6524247c4c8f6..16c286bfdd3f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -49,6 +49,7 @@ public class MrjarPlugin implements Plugin { private static final Pattern MRJAR_SOURCESET_PATTERN = Pattern.compile("main(\\d{2})"); + private static final String MRJAR_IDEA_ENABLED = "org.gradle.mrjar.idea.enabled"; private final JavaToolchainService javaToolchains; @@ -61,23 +62,30 @@ public class MrjarPlugin implements Plugin { public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); - - List mainVersions = findSourceVersions(project); - List mainSourceSets = new ArrayList<>(); - mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); - List testSourceSets = new ArrayList<>(mainSourceSets); - testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); - for (int javaVersion : mainVersions) { - String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; - SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); - configureSourceSetInJar(project, mainSourceSet, javaVersion); - mainSourceSets.add(mainSourceSetName); - testSourceSets.add(mainSourceSetName); - - String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; - SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); - testSourceSets.add(testSourceSetName); - createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + var isIdeaSync = System.getProperty("idea.sync.active", "false").equals("true"); + var ideaSourceSetsEnabled = project.hasProperty(MRJAR_IDEA_ENABLED) && project.property(MRJAR_IDEA_ENABLED).equals("true"); + + // Ignore version-specific source sets if we are importing into IntelliJ and have not explicitly enabled this. + // Avoids an IntelliJ bug: + // https://youtrack.jetbrains.com/issue/IDEA-285640/Compiler-Options-Settings-language-level-is-set-incorrectly-with-JDK-19ea + if (isIdeaSync == false || ideaSourceSetsEnabled) { + List mainVersions = findSourceVersions(project); + List mainSourceSets = new ArrayList<>(); + mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); + List testSourceSets = new ArrayList<>(mainSourceSets); + testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); + for (int javaVersion : mainVersions) { + String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; + SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); + configureSourceSetInJar(project, mainSourceSet, javaVersion); + mainSourceSets.add(mainSourceSetName); + testSourceSets.add(mainSourceSetName); + + String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; + SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); + testSourceSets.add(testSourceSetName); + createTestTask(project, testSourceSet, javaVersion, mainSourceSets); + } } configureMrjar(project); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java new file mode 100644 index 0000000000000..7b35fd29fbd1a --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/AsciidocSnippetParser.java @@ -0,0 +1,306 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.gradle.api.InvalidUserDataException; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class AsciidocSnippetParser implements SnippetParser { + public static final Pattern SNIPPET_PATTERN = Pattern.compile("-{4,}\\s*"); + + private static final String CATCH = "catch:\\s*((?:\\/[^\\/]+\\/)|[^ \\]]+)"; + private static final String SKIP_REGEX = "skip:([^\\]]+)"; + private static final String SETUP = "setup:([^ \\]]+)"; + private static final String TEARDOWN = "teardown:([^ \\]]+)"; + private static final String WARNING = "warning:(.+)"; + private static final String NON_JSON = "(non_json)"; + private static final String SCHAR = "(?:\\\\\\/|[^\\/])"; + private static final String SUBSTITUTION = "s\\/(" + SCHAR + "+)\\/(" + SCHAR + "*)\\/"; + private static final String TEST_SYNTAX = "(?:" + + CATCH + + "|" + + SUBSTITUTION + + "|" + + SKIP_REGEX + + "|(continued)|" + + SETUP + + "|" + + TEARDOWN + + "|" + + WARNING + + "|(skip_shard_failures)) ?"; + + private final Map defaultSubstitutions; + + public AsciidocSnippetParser(Map defaultSubstitutions) { + this.defaultSubstitutions = defaultSubstitutions; + } + + @Override + public List parseDoc(File rootDir, File docFile, List> substitutions) { + String lastLanguage = null; + Snippet snippet = null; + String name = null; + int lastLanguageLine = 0; + StringBuilder contents = null; + List snippets = new ArrayList<>(); + + try (Stream lines = Files.lines(docFile.toPath(), StandardCharsets.UTF_8)) { + List linesList = lines.collect(Collectors.toList()); + for (int lineNumber = 0; lineNumber < linesList.size(); lineNumber++) { + String line = linesList.get(lineNumber); + if (SNIPPET_PATTERN.matcher(line).matches()) { + if (snippet == null) { + Path path = rootDir.toPath().relativize(docFile.toPath()); + snippet = new Snippet(path, lineNumber + 1, name); + snippets.add(snippet); + if (lastLanguageLine == lineNumber - 1) { + snippet.language = lastLanguage; + } + name = null; + } else { + snippet.end = lineNumber + 1; + } + continue; + } + + Source source = matchSource(line); + if (source.matches) { + lastLanguage = source.language; + lastLanguageLine = lineNumber; + name = source.name; + continue; + } + if (consoleHandled(docFile.getName(), lineNumber, line, snippet)) { + continue; + } + if (testHandled(docFile.getName(), lineNumber, line, snippet, substitutions)) { + continue; + } + if (testResponseHandled(docFile.getName(), lineNumber, line, snippet, substitutions)) { + continue; + } + if (line.matches("\\/\\/\s*TESTSETUP\s*")) { + snippet.testSetup = true; + continue; + } + if (line.matches("\\/\\/\s*TEARDOWN\s*")) { + snippet.testTearDown = true; + continue; + } + if (snippet == null) { + // Outside + continue; + } + if (snippet.end == Snippet.NOT_FINISHED) { + // Inside + if (contents == null) { + contents = new StringBuilder(); + } + // We don't need the annotations + line = line.replaceAll("<\\d+>", ""); + // Nor any trailing spaces + line = line.replaceAll("\s+$", ""); + contents.append(line).append("\n"); + continue; + } + // Allow line continuations for console snippets within lists + if (snippet != null && line.trim().equals("+")) { + continue; + } + finalizeSnippet(snippet, contents.toString(), defaultSubstitutions, substitutions); + substitutions = new ArrayList<>(); + ; + snippet = null; + contents = null; + } + if (snippet != null) { + finalizeSnippet(snippet, contents.toString(), defaultSubstitutions, substitutions); + contents = null; + snippet = null; + substitutions = new ArrayList<>(); + } + } catch (IOException e) { + e.printStackTrace(); + } + return snippets; + } + + static Snippet finalizeSnippet( + final Snippet snippet, + String contents, + Map defaultSubstitutions, + Collection> substitutions + ) { + snippet.contents = contents.toString(); + snippet.validate(); + escapeSubstitutions(snippet, defaultSubstitutions, substitutions); + return snippet; + } + + private static void escapeSubstitutions( + Snippet snippet, + Map defaultSubstitutions, + Collection> substitutions + ) { + BiConsumer doSubstitution = (pattern, subst) -> { + /* + * $body is really common but it looks like a + * backreference so we just escape it here to make the + * tests cleaner. + */ + subst = subst.replace("$body", "\\$body"); + subst = subst.replace("$_path", "\\$_path"); + subst = subst.replace("\\n", "\n"); + snippet.contents = snippet.contents.replaceAll(pattern, subst); + }; + defaultSubstitutions.forEach(doSubstitution); + + if (substitutions != null) { + substitutions.forEach(e -> doSubstitution.accept(e.getKey(), e.getValue())); + } + } + + private boolean testResponseHandled( + String name, + int lineNumber, + String line, + Snippet snippet, + final List> substitutions + ) { + Matcher matcher = Pattern.compile("\\/\\/\s*TESTRESPONSE(\\[(.+)\\])?\s*").matcher(line); + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException(name + ":" + lineNumber + ": TESTRESPONSE not paired with a snippet at "); + } + snippet.testResponse = true; + if (matcher.group(2) != null) { + String loc = name + ":" + lineNumber; + ParsingUtils.parse( + loc, + matcher.group(2), + "(?:" + SUBSTITUTION + "|" + NON_JSON + "|" + SKIP_REGEX + ") ?", + (Matcher m, Boolean last) -> { + if (m.group(1) != null) { + // TESTRESPONSE[s/adsf/jkl/] + substitutions.add(Map.entry(m.group(1), m.group(2))); + } else if (m.group(3) != null) { + // TESTRESPONSE[non_json] + substitutions.add(Map.entry("^", "/")); + substitutions.add(Map.entry("\n$", "\\\\s*/")); + substitutions.add(Map.entry("( +)", "$1\\\\s+")); + substitutions.add(Map.entry("\n", "\\\\s*\n ")); + } else if (m.group(4) != null) { + // TESTRESPONSE[skip:reason] + snippet.skip = m.group(4); + } + } + ); + } + return true; + } + return false; + } + + private boolean testHandled(String name, int lineNumber, String line, Snippet snippet, List> substitutions) { + Matcher matcher = Pattern.compile("\\/\\/\s*TEST(\\[(.+)\\])?\s*").matcher(line); + if (matcher.matches()) { + if (snippet == null) { + throw new InvalidUserDataException(name + ":" + lineNumber + ": TEST not paired with a snippet at "); + } + snippet.test = true; + if (matcher.group(2) != null) { + String loc = name + ":" + lineNumber; + ParsingUtils.parse(loc, matcher.group(2), TEST_SYNTAX, (Matcher m, Boolean last) -> { + if (m.group(1) != null) { + snippet.catchPart = m.group(1); + return; + } + if (m.group(2) != null) { + substitutions.add(Map.entry(m.group(2), m.group(3))); + return; + } + if (m.group(4) != null) { + snippet.skip = m.group(4); + return; + } + if (m.group(5) != null) { + snippet.continued = true; + return; + } + if (m.group(6) != null) { + snippet.setup = m.group(6); + return; + } + if (m.group(7) != null) { + snippet.teardown = m.group(7); + return; + } + if (m.group(8) != null) { + snippet.warnings.add(m.group(8)); + return; + } + if (m.group(9) != null) { + snippet.skipShardsFailures = true; + return; + } + throw new InvalidUserDataException("Invalid test marker: " + line); + }); + } + return true; + } + return false; + } + + private boolean consoleHandled(String fileName, int lineNumber, String line, Snippet snippet) { + if (line.matches("\\/\\/\s*CONSOLE\s*")) { + if (snippet == null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": CONSOLE not paired with a snippet"); + } + if (snippet.console != null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": Can't be both CONSOLE and NOTCONSOLE"); + } + snippet.console = true; + return true; + } else if (line.matches("\\/\\/\s*NOTCONSOLE\s*")) { + if (snippet == null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": NOTCONSOLE not paired with a snippet"); + } + if (snippet.console != null) { + throw new InvalidUserDataException(fileName + ":" + lineNumber + ": Can't be both CONSOLE and NOTCONSOLE"); + } + snippet.console = false; + return true; + } + return false; + } + + static Source matchSource(String line) { + Pattern pattern = Pattern.compile("\\[\"?source\"?(?:\\.[^,]+)?,\\s*\"?([-\\w]+)\"?(,((?!id=).)*(id=\"?([-\\w]+)\"?)?(.*))?].*"); + Matcher matcher = pattern.matcher(line); + if (matcher.matches()) { + return new Source(true, matcher.group(1), matcher.group(5)); + } + return new Source(false, null, null); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java new file mode 100644 index 0000000000000..87f0621d53fba --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocSnippetTask.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.apache.commons.collections.map.HashedMap; +import org.gradle.api.Action; +import org.gradle.api.DefaultTask; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.ConfigurableFileTree; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.TaskAction; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public abstract class DocSnippetTask extends DefaultTask { + + /** + * Action to take on each snippet. Called with a single parameter, an + * instance of Snippet. + */ + private Action perSnippet; + + /** + * The docs to scan. Defaults to every file in the directory exception the + * build.gradle file because that is appropriate for Elasticsearch's docs + * directory. + */ + private ConfigurableFileTree docs; + private Map defaultSubstitutions = new HashedMap(); + + @InputFiles + public ConfigurableFileTree getDocs() { + return docs; + } + + public void setDocs(ConfigurableFileTree docs) { + this.docs = docs; + } + + /** + * Substitutions done on every snippet's contents. + */ + @Input + public Map getDefaultSubstitutions() { + return defaultSubstitutions; + } + + @TaskAction + void executeTask() { + for (File file : docs) { + List snippets = parseDocFile(docs.getDir(), file, new ArrayList<>()); + if (perSnippet != null) { + snippets.forEach(perSnippet::execute); + } + } + } + + List parseDocFile(File rootDir, File docFile, List> substitutions) { + SnippetParser parser = parserForFileType(docFile); + return parser.parseDoc(rootDir, docFile, substitutions); + } + + private SnippetParser parserForFileType(File docFile) { + if (docFile.getName().endsWith(".asciidoc")) { + return new AsciidocSnippetParser(defaultSubstitutions); + } + throw new InvalidUserDataException("Unsupported file type: " + docFile.getName()); + } + + public void setDefaultSubstitutions(Map defaultSubstitutions) { + this.defaultSubstitutions = defaultSubstitutions; + } + + public void setPerSnippet(Action perSnippet) { + this.perSnippet = perSnippet; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java new file mode 100644 index 0000000000000..bbb5102dd6699 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/DocsTestPlugin.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.elasticsearch.gradle.OS; +import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.testclusters.ElasticsearchCluster; +import org.elasticsearch.gradle.testclusters.TestClustersPlugin; +import org.elasticsearch.gradle.testclusters.TestDistribution; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.file.Directory; +import org.gradle.api.file.ProjectLayout; +import org.gradle.api.internal.file.FileOperations; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.TaskProvider; + +import java.util.Map; + +import javax.inject.Inject; + +public class DocsTestPlugin implements Plugin { + private FileOperations fileOperations; + private ProjectLayout projectLayout; + + @Inject + DocsTestPlugin(FileOperations fileOperations, ProjectLayout projectLayout) { + this.projectLayout = projectLayout; + this.fileOperations = fileOperations; + } + + @Override + public void apply(Project project) { + project.getPluginManager().apply("elasticsearch.legacy-yaml-rest-test"); + + String distribution = System.getProperty("tests.distribution", "default"); + // The distribution can be configured with -Dtests.distribution on the command line + NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project + .getExtensions() + .getByName(TestClustersPlugin.EXTENSION_NAME); + + testClusters.matching((c) -> c.getName().equals("yamlRestTest")).configureEach(c -> { + c.setTestDistribution(TestDistribution.valueOf(distribution.toUpperCase())); + c.setNameCustomization((name) -> name.replace("yamlRestTest", "node")); + }); + + project.getTasks().named("assemble").configure(task -> { task.setEnabled(false); }); + + Map commonDefaultSubstitutions = Map.of( + /* These match up with the asciidoc syntax for substitutions but + * the values may differ. In particular {version} needs to resolve + * to the version being built for testing but needs to resolve to + * the last released version for docs. */ + "\\{version\\}", + Version.fromString(VersionProperties.getElasticsearch()).toString(), + "\\{version_qualified\\}", + VersionProperties.getElasticsearch(), + "\\{lucene_version\\}", + VersionProperties.getLucene().replaceAll("-snapshot-\\w+$", ""), + "\\{build_flavor\\}", + distribution, + "\\{build_type\\}", + OS.conditionalString().onWindows(() -> "zip").onUnix(() -> "tar").supply() + ); + + project.getTasks().register("listSnippets", DocSnippetTask.class, task -> { + task.setGroup("Docs"); + task.setDescription("List each snippet"); + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.setPerSnippet(snippet -> System.out.println(snippet)); + }); + + project.getTasks().register("listConsoleCandidates", DocSnippetTask.class, task -> { + task.setGroup("Docs"); + task.setDescription("List snippets that probably should be marked // CONSOLE"); + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.setPerSnippet(snippet -> { + if (snippet.isConsoleCandidate()) { + System.out.println(snippet); + } + }); + }); + + Provider restRootDir = projectLayout.getBuildDirectory().dir("rest"); + TaskProvider buildRestTests = project.getTasks() + .register("buildRestTests", RestTestsFromDocSnippetTask.class, task -> { + task.setDefaultSubstitutions(commonDefaultSubstitutions); + task.getTestRoot().convention(restRootDir); + task.doFirst(task1 -> fileOperations.delete(restRootDir.get())); + }); + + // TODO: This effectively makes testRoot not customizable, which we don't do anyway atm + JavaPluginExtension byType = project.getExtensions().getByType(JavaPluginExtension.class); + byType.getSourceSets().getByName("yamlRestTest").getOutput().dir(Map.of("builtBy", buildRestTests), restRootDir); + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java new file mode 100644 index 0000000000000..b17dd4c7e21d3 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/ParsingUtils.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import org.gradle.api.InvalidUserDataException; + +import java.util.function.BiConsumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ParsingUtils { + + static void extraContent(String message, String content, int offset, String location, String pattern) { + StringBuilder cutOut = new StringBuilder(); + cutOut.append(content.substring(offset - 6, offset)); + cutOut.append('*'); + cutOut.append(content.substring(offset, Math.min(offset + 5, content.length()))); + String cutOutNoNl = cutOut.toString().replace("\n", "\\n"); + throw new InvalidUserDataException( + location + ": Extra content " + message + " ('" + cutOutNoNl + "') matching [" + pattern + "]: " + content + ); + } + + /** + * Repeatedly match the pattern to the string, calling the closure with the + * matchers each time there is a match. If there are characters that don't + * match then blow up. If the closure takes two parameters then the second + * one is "is this the last match?". + */ + static void parse(String location, String content, String pattern, BiConsumer testHandler) { + if (content == null) { + return; // Silly null, only real stuff gets to match! + } + Matcher m = Pattern.compile(pattern).matcher(content); + int offset = 0; + while (m.find()) { + if (m.start() != offset) { + extraContent("between [$offset] and [${m.start()}]", content, offset, location, pattern); + } + offset = m.end(); + testHandler.accept(m, offset == content.length()); + } + if (offset == 0) { + throw new InvalidUserDataException(location + ": Didn't match " + pattern + ": " + content); + } + if (offset != content.length()) { + extraContent("after [" + offset + "]", content, offset, location, pattern); + } + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java new file mode 100644 index 0000000000000..c5b1d67627dd9 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTask.java @@ -0,0 +1,526 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import groovy.transform.PackageScope; + +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.internal.file.FileOperations; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.OutputDirectory; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import javax.inject.Inject; + +public abstract class RestTestsFromDocSnippetTask extends DocSnippetTask { + + private Map setups = new HashMap<>(); + + private Map teardowns = new HashMap(); + + /** + * Test setups defined in the build instead of the docs so they can be + * shared between many doc files. + */ + @Input + public Map getSetups() { + return setups; + } + + public void setSetups(Map setups) { + this.setups = setups; + } + + /** + * Test teardowns defined in the build instead of the docs so they can be + * shared between many doc files. + */ + @Input + public Map getTeardowns() { + return teardowns; + } + + public void setTeardowns(Map teardowns) { + this.teardowns = teardowns; + } + + /** + * A list of files that contain snippets that *probably* should be + * converted to `// CONSOLE` but have yet to be converted. If a file is in + * this list and doesn't contain unconverted snippets this task will fail. + * If there are unconverted snippets not in this list then this task will + * fail. All files are paths relative to the docs dir. + */ + private List expectedUnconvertedCandidates; + + @Input + public List getExpectedUnconvertedCandidates() { + return expectedUnconvertedCandidates; + } + + public void setExpectedUnconvertedCandidates(List expectedUnconvertedCandidates) { + this.expectedUnconvertedCandidates = expectedUnconvertedCandidates; + } + + /** + * Root directory of the tests being generated. To make rest tests happy + * we generate them in a testRoot which is contained in this directory. + */ + private DirectoryProperty testRoot; + + private Set names = new HashSet<>(); + + @Internal + public Set getNames() { + return names; + } + + public void setNames(Set names) { + this.names = names; + } + + @Inject + public abstract FileOperations getFileOperations(); + + /** + * Root directory containing all the files generated by this task. It is + * contained within testRoot. + */ + @OutputDirectory + File getOutputRoot() { + return new File(testRoot.get().getAsFile(), "/rest-api-spec/test"); + } + + @OutputDirectory + DirectoryProperty getTestRoot() { + return testRoot; + } + + @Inject + public RestTestsFromDocSnippetTask(ObjectFactory objectFactory) { + testRoot = objectFactory.directoryProperty(); + TestBuilder builder = new TestBuilder(); + + setPerSnippet(snippet -> builder.handleSnippet(snippet)); + doLast(task -> { + builder.finishLastTest(); + builder.checkUnconverted(); + }); + } + + /** + * Certain requests should not have the shard failure check because the + * format of the response is incompatible i.e. it is not a JSON object. + */ + static boolean shouldAddShardFailureCheck(String path) { + return path.startsWith("_cat") == false && path.startsWith("_ml/datafeeds/") == false; + } + + /** + * Converts Kibana's block quoted strings into standard JSON. These + * {@code """} delimited strings can be embedded in CONSOLE and can + * contain newlines and {@code "} without the normal JSON escaping. + * This has to add it. + */ + @PackageScope + static String replaceBlockQuote(String body) { + int start = body.indexOf("\"\"\""); + if (start < 0) { + return body; + } + /* + * 1.3 is a fairly wild guess of the extra space needed to hold + * the escaped string. + */ + StringBuilder result = new StringBuilder((int) (body.length() * 1.3)); + int startOfNormal = 0; + while (start >= 0) { + int end = body.indexOf("\"\"\"", start + 3); + if (end < 0) { + throw new InvalidUserDataException("Invalid block quote starting at " + start + " in:\n" + body); + } + result.append(body.substring(startOfNormal, start)); + result.append('"'); + result.append(body.substring(start + 3, end).replace("\"", "\\\"").replace("\n", "\\n")); + result.append('"'); + startOfNormal = end + 3; + start = body.indexOf("\"\"\"", startOfNormal); + } + result.append(body.substring(startOfNormal)); + return result.toString(); + } + + private class TestBuilder { + /** + * These languages aren't supported by the syntax highlighter so we + * shouldn't use them. + */ + private static final List BAD_LANGUAGES = List.of("json", "javascript"); + + String method = "(?GET|PUT|POST|HEAD|OPTIONS|DELETE)"; + String pathAndQuery = "(?[^\\n]+)"; + + String badBody = "GET|PUT|POST|HEAD|OPTIONS|DELETE|startyaml|#"; + String body = "(?(?:\\n(?!" + badBody + ")[^\\n]+)+)"; + + String rawRequest = "(?:" + method + "\\s+" + pathAndQuery + body + "?)"; + + String yamlRequest = "(?:startyaml(?s)(?.+?)(?-s)endyaml)"; + String nonComment = "(?:" + rawRequest + "|" + yamlRequest + ")"; + String comment = "(?#.+)"; + + String SYNTAX = "(?:" + comment + "|" + nonComment + ")\\n+"; + + /** + * Files containing all snippets that *probably* should be converted + * to `// CONSOLE` but have yet to be converted. All files are paths + * relative to the docs dir. + */ + private Set unconvertedCandidates = new HashSet<>(); + + /** + * The last non-TESTRESPONSE snippet. + */ + Snippet previousTest; + + /** + * The file in which we saw the last snippet that made a test. + */ + Path lastDocsPath; + + /** + * The file we're building. + */ + PrintWriter current; + + Set names = new HashSet<>(); + + /** + * Called each time a snippet is encountered. Tracks the snippets and + * calls buildTest to actually build the test. + */ + public void handleSnippet(Snippet snippet) { + if (snippet.isConsoleCandidate()) { + unconvertedCandidates.add(snippet.path.toString().replace('\\', '/')); + } + if (BAD_LANGUAGES.contains(snippet.language)) { + throw new InvalidUserDataException(snippet + ": Use `js` instead of `" + snippet.language + "`."); + } + if (snippet.testSetup) { + testSetup(snippet); + previousTest = snippet; + return; + } + if (snippet.testTearDown) { + testTearDown(snippet); + previousTest = snippet; + return; + } + if (snippet.testResponse || snippet.language.equals("console-result")) { + if (previousTest == null) { + throw new InvalidUserDataException(snippet + ": No paired previous test"); + } + if (previousTest.path.equals(snippet.path) == false) { + throw new InvalidUserDataException(snippet + ": Result can't be first in file"); + } + response(snippet); + return; + } + if (("js".equals(snippet.language)) && snippet.console != null && snippet.console) { + throw new InvalidUserDataException(snippet + ": Use `[source,console]` instead of `// CONSOLE`."); + } + if (snippet.test || snippet.language.equals("console")) { + test(snippet); + previousTest = snippet; + return; + } + // Must be an unmarked snippet.... + } + + private void test(Snippet test) { + setupCurrent(test); + + if (test.continued) { + /* Catch some difficult to debug errors with // TEST[continued] + * and throw a helpful error message. */ + if (previousTest == null || previousTest.path.equals(test.path) == false) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot be on first snippet in a file: " + test); + } + if (previousTest != null && previousTest.testSetup) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot immediately follow // TESTSETUP: " + test); + } + if (previousTest != null && previousTest.testTearDown) { + throw new InvalidUserDataException("// TEST[continued] " + "cannot immediately follow // TEARDOWN: " + test); + } + } else { + current.println("---"); + if (test.name != null && test.name.isBlank() == false) { + if (names.add(test.name) == false) { + throw new InvalidUserDataException("Duplicated snippet name '" + test.name + "': " + test); + } + current.println("\"" + test.name + "\":"); + } else { + current.println("\"line_" + test.start + "\":"); + } + /* The Elasticsearch test runner doesn't support quite a few + * constructs unless we output this skip. We don't know if + * we're going to use these constructs, but we might so we + * output the skip just in case. */ + current.println(" - skip:"); + current.println(" features:"); + current.println(" - default_shards"); + current.println(" - stash_in_key"); + current.println(" - stash_in_path"); + current.println(" - stash_path_replace"); + current.println(" - warnings"); + } + if (test.skip != null) { + if (test.continued) { + throw new InvalidUserDataException("Continued snippets " + "can't be skipped"); + } + current.println(" - always_skip"); + current.println(" reason: " + test.skip); + } + if (test.setup != null) { + setup(test); + } + + body(test, false); + + if (test.teardown != null) { + teardown(test); + } + } + + private void response(Snippet response) { + if (null == response.skip) { + current.println(" - match:"); + current.println(" $body:"); + replaceBlockQuote(response.contents).lines().forEach(line -> current.println(" " + line)); + } + } + + private void teardown(final Snippet snippet) { + // insert a teardown defined outside of the docs + for (final String name : snippet.teardown.split(",")) { + final String teardown = teardowns.get(name); + if (teardown == null) { + throw new InvalidUserDataException("Couldn't find named teardown $name for " + snippet); + } + current.println("# Named teardown " + name); + current.println(teardown); + } + } + + private void testTearDown(Snippet snippet) { + if (previousTest != null && previousTest.testSetup == false && lastDocsPath == snippet.path) { + throw new InvalidUserDataException(snippet + " must follow test setup or be first"); + } + setupCurrent(snippet); + current.println("---"); + current.println("teardown:"); + body(snippet, true); + } + + void emitDo( + String method, + String pathAndQuery, + String body, + String catchPart, + List warnings, + boolean inSetup, + boolean skipShardFailures + ) { + String[] tokenized = pathAndQuery.split("\\?"); + String path = tokenized[0]; + String query = tokenized.length > 1 ? tokenized[1] : null; + if (path == null) { + path = ""; // Catch requests to the root... + } else { + path = path.replace("<", "%3C").replace(">", "%3E"); + } + current.println(" - do:"); + if (catchPart != null) { + current.println(" catch: " + catchPart); + } + if (false == warnings.isEmpty()) { + current.println(" warnings:"); + for (String warning : warnings) { + // Escape " because we're going to quote the warning + String escaped = warning.replaceAll("\"", "\\\\\""); + /* Quote the warning in case it starts with [ which makes + * it look too much like an array. */ + current.println(" - \"" + escaped + "\""); + } + } + current.println(" raw:"); + current.println(" method: " + method); + current.println(" path: \"" + path + "\""); + if (query != null) { + for (String param : query.split("&")) { + String[] tokenizedQuery = param.split("="); + String paramName = tokenizedQuery[0]; + String paramValue = tokenizedQuery.length > 1 ? tokenizedQuery[1] : null; + if (paramValue == null) { + paramValue = ""; + } + current.println(" " + paramName + ": \"" + paramValue + "\""); + } + } + if (body != null) { + // Throw out the leading newline we get from parsing the body + body = body.substring(1); + // Replace """ quoted strings with valid json ones + body = replaceBlockQuote(body); + current.println(" body: |"); + body.lines().forEach(line -> current.println(" " + line)); + } + /* Catch any shard failures. These only cause a non-200 response if + * no shard succeeds. But we need to fail the tests on all of these + * because they mean invalid syntax or broken queries or something + * else that we don't want to teach people to do. The REST test + * framework doesn't allow us to have assertions in the setup + * section so we have to skip it there. We also omit the assertion + * from APIs that don't return a JSON object + */ + if (false == inSetup && skipShardFailures == false && shouldAddShardFailureCheck(path)) { + current.println(" - is_false: _shards.failures"); + } + } + + private void body(Snippet snippet, boolean inSetup) { + ParsingUtils.parse(snippet.getLocation(), snippet.contents, SYNTAX, (matcher, last) -> { + if (matcher.group("comment") != null) { + // Comment + return; + } + String yamlRequest = matcher.group("yaml"); + if (yamlRequest != null) { + current.println(yamlRequest); + return; + } + String method = matcher.group("method"); + String pathAndQuery = matcher.group("pathAndQuery"); + String body = matcher.group("body"); + String catchPart = last ? snippet.catchPart : null; + if (pathAndQuery.startsWith("/")) { + // Leading '/'s break the generated paths + pathAndQuery = pathAndQuery.substring(1); + } + emitDo(method, pathAndQuery, body, catchPart, snippet.warnings, inSetup, snippet.skipShardsFailures); + }); + + } + + private PrintWriter setupCurrent(Snippet test) { + if (test.path.equals(lastDocsPath)) { + return current; + } + names.clear(); + finishLastTest(); + lastDocsPath = test.path; + + // Make the destination file: + // Shift the path into the destination directory tree + Path dest = getOutputRoot().toPath().resolve(test.path); + // Replace the extension + String fileName = dest.getName(dest.getNameCount() - 1).toString(); + dest = dest.getParent().resolve(fileName.replace(".asciidoc", ".yml")); + + // Now setup the writer + try { + Files.createDirectories(dest.getParent()); + current = new PrintWriter(dest.toFile(), "UTF-8"); + return current; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void testSetup(Snippet snippet) { + if (lastDocsPath == snippet.path) { + throw new InvalidUserDataException( + snippet + ": wasn't first. TESTSETUP can only be used in the first snippet of a document." + ); + } + setupCurrent(snippet); + current.println("---"); + current.println("setup:"); + if (snippet.setup != null) { + setup(snippet); + } + body(snippet, true); + } + + private void setup(final Snippet snippet) { + // insert a setup defined outside of the docs + for (final String name : snippet.setup.split(",")) { + final String setup = setups.get(name); + if (setup == null) { + throw new InvalidUserDataException("Couldn't find named setup " + name + " for " + snippet); + } + current.println("# Named setup " + name); + current.println(setup); + } + } + + public void checkUnconverted() { + List listedButNotFound = new ArrayList<>(); + for (String listed : expectedUnconvertedCandidates) { + if (false == unconvertedCandidates.remove(listed)) { + listedButNotFound.add(listed); + } + } + String message = ""; + if (false == listedButNotFound.isEmpty()) { + Collections.sort(listedButNotFound); + listedButNotFound = listedButNotFound.stream().map(notfound -> " " + notfound).collect(Collectors.toList()); + message += "Expected unconverted snippets but none found in:\n"; + message += listedButNotFound.stream().collect(Collectors.joining("\n")); + } + if (false == unconvertedCandidates.isEmpty()) { + List foundButNotListed = new ArrayList<>(unconvertedCandidates); + Collections.sort(foundButNotListed); + foundButNotListed = foundButNotListed.stream().map(f -> " " + f).collect(Collectors.toList()); + if (false == "".equals(message)) { + message += "\n"; + } + message += "Unexpected unconverted snippets:\n"; + message += foundButNotListed.stream().collect(Collectors.joining("\n")); + } + if (false == "".equals(message)) { + throw new InvalidUserDataException(message); + } + } + + public void finishLastTest() { + if (current != null) { + current.close(); + current = null; + } + } + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java new file mode 100644 index 0000000000000..b8aa864734f44 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Snippet.java @@ -0,0 +1,188 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; + +import org.gradle.api.InvalidUserDataException; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +public class Snippet { + static final int NOT_FINISHED = -1; + + /** + * Path to the file containing this snippet. Relative to docs.dir of the + * SnippetsTask that created it. + */ + Path path; + int start; + int end = NOT_FINISHED; + public String contents; + + Boolean console = null; + boolean test = false; + boolean testResponse = false; + boolean testSetup = false; + boolean testTearDown = false; + String skip = null; + boolean continued = false; + String language = null; + String catchPart = null; + String setup = null; + String teardown = null; + boolean curl; + List warnings = new ArrayList(); + boolean skipShardsFailures = false; + String name; + + public Snippet(Path path, int start, String name) { + this.path = path; + this.start = start; + this.name = name; + } + + public void validate() { + if (language == null) { + throw new InvalidUserDataException( + name + + ": " + + "Snippet missing a language. This is required by " + + "Elasticsearch's doc testing infrastructure so we " + + "be sure we don't accidentally forget to test a " + + "snippet." + ); + } + assertValidCurlInput(); + assertValidJsonInput(); + } + + String getLocation() { + return path + "[" + start + ":" + end + "]"; + } + + private void assertValidCurlInput() { + // Try to detect snippets that contain `curl` + if ("sh".equals(language) || "shell".equals(language)) { + curl = contents.contains("curl"); + if (console == Boolean.FALSE && curl == false) { + throw new InvalidUserDataException(name + ": " + "No need for NOTCONSOLE if snippet doesn't " + "contain `curl`."); + } + } + } + + private void assertValidJsonInput() { + if (testResponse && ("js" == language || "console-result" == language) && null == skip) { + String quoted = contents + // quote values starting with $ + .replaceAll("([:,])\\s*(\\$[^ ,\\n}]+)", "$1 \"$2\"") + // quote fields starting with $ + .replaceAll("(\\$[^ ,\\n}]+)\\s*:", "\"$1\":"); + + JsonFactory jf = new JsonFactory(); + jf.configure(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER, true); + JsonParser jsonParser; + + try { + jsonParser = jf.createParser(quoted); + while (jsonParser.isClosed() == false) { + jsonParser.nextToken(); + } + } catch (JsonParseException e) { + throw new InvalidUserDataException( + "Invalid json in " + + name + + ". The error is:\n" + + e.getMessage() + + ".\n" + + "After substitutions and munging, the json looks like:\n" + + quoted, + e + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public String toString() { + String result = path + "[" + start + ":" + end + "]"; + if (language != null) { + result += "(" + language + ")"; + } + if (console != null) { + result += console ? "// CONSOLE" : "// NOTCONSOLE"; + } + if (test) { + result += "// TEST"; + if (catchPart != null) { + result += "[catch: " + catchPart + "]"; + } + if (skip != null) { + result += "[skip=" + skip + "]"; + } + if (continued) { + result += "[continued]"; + } + if (setup != null) { + result += "[setup:" + setup + "]"; + } + if (teardown != null) { + result += "[teardown:" + teardown + "]"; + } + for (String warning : warnings) { + result += "[warning:" + warning + "]"; + } + if (skipShardsFailures) { + result += "[skip_shard_failures]"; + } + } + if (testResponse) { + result += "// TESTRESPONSE"; + if (skip != null) { + result += "[skip=" + skip + "]"; + } + } + if (testSetup) { + result += "// TESTSETUP"; + } + if (curl) { + result += "(curl)"; + } + return result; + } + + /** + * Is this snippet a candidate for conversion to `// CONSOLE`? + */ + boolean isConsoleCandidate() { + /* Snippets that are responses or already marked as `// CONSOLE` or + * `// NOTCONSOLE` are not candidates. */ + if (console != null || testResponse) { + return false; + } + /* js snippets almost always should be marked with `// CONSOLE`. js + * snippets that shouldn't be marked `// CONSOLE`, like examples for + * js client, should always be marked with `// NOTCONSOLE`. + * + * `sh` snippets that contain `curl` almost always should be marked + * with `// CONSOLE`. In the exceptionally rare cases where they are + * not communicating with Elasticsearch, like the examples in the ec2 + * and gce discovery plugins, the snippets should be marked + * `// NOTCONSOLE`. */ + return language.equals("js") || curl; + } + +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java new file mode 100644 index 0000000000000..064c1c460febf --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/SnippetParser.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import java.io.File; +import java.util.List; +import java.util.Map; + +public interface SnippetParser { + List parseDoc(File rootDir, File docFile, List> substitutions); +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java new file mode 100644 index 0000000000000..b7f2f01aa7987 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/doc/Source.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +public final class Source { + boolean matches; + String language; + String name; + + public Source(boolean matches, String language, String name) { + this.matches = matches; + this.language = language; + this.name = name; + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java index 89a40711c9a19..0270ee22ca8c5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.compress.utils.Lists; import org.gradle.jvm.toolchain.JavaLanguageVersion; import org.gradle.jvm.toolchain.JavaToolchainDownload; import org.gradle.jvm.toolchain.JavaToolchainRequest; @@ -20,17 +21,17 @@ import java.io.IOException; import java.net.URI; import java.net.URL; +import java.util.Comparator; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.StreamSupport; import static org.gradle.jvm.toolchain.JavaToolchainDownload.fromUri; public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToolchainResolver { // package protected for better testing - final Map> CACHED_RELEASES = new ConcurrentHashMap<>(); + final Map> CACHED_SEMVERS = new ConcurrentHashMap<>(); @Override public Optional resolve(JavaToolchainRequest request) { @@ -38,7 +39,7 @@ public Optional resolve(JavaToolchainRequest request) { return Optional.empty(); } AdoptiumVersionRequest versionRequestKey = toVersionRequest(request); - Optional versionInfo = CACHED_RELEASES.computeIfAbsent( + Optional versionInfo = CACHED_SEMVERS.computeIfAbsent( versionRequestKey, (r) -> resolveAvailableVersion(versionRequestKey) ); @@ -53,12 +54,12 @@ private AdoptiumVersionRequest toVersionRequest(JavaToolchainRequest request) { return new AdoptiumVersionRequest(platform, arch, javaLanguageVersion); } - private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { + private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { ObjectMapper mapper = new ObjectMapper(); try { int languageVersion = requestKey.languageVersion.asInt(); URL source = new URL( - "https://api.adoptium.net/v3/info/release_names?architecture=" + "https://api.adoptium.net/v3/info/release_versions?architecture=" + requestKey.arch + "&image_type=jdk&os=" + requestKey.platform @@ -70,8 +71,14 @@ private Optional resolveAvailableVersion(AdoptiumVersionRequest requestK + ")" ); JsonNode jsonNode = mapper.readTree(source); - JsonNode versionsNode = jsonNode.get("releases"); - return StreamSupport.stream(versionsNode.spliterator(), false).map(JsonNode::textValue).findFirst(); + JsonNode versionsNode = jsonNode.get("versions"); + return Optional.of( + Lists.newArrayList(versionsNode.iterator()) + .stream() + .map(this::toVersionInfo) + .max(Comparator.comparing(AdoptiumVersionInfo::semver)) + .get() + ); } catch (FileNotFoundException e) { // request combo not supported (e.g. aarch64 + windows return Optional.empty(); @@ -80,10 +87,21 @@ private Optional resolveAvailableVersion(AdoptiumVersionRequest requestK } } - private URI resolveDownloadURI(AdoptiumVersionRequest request, String version) { + private AdoptiumVersionInfo toVersionInfo(JsonNode node) { + return new AdoptiumVersionInfo( + node.get("build").asInt(), + node.get("major").asInt(), + node.get("minor").asInt(), + node.get("openjdk_version").asText(), + node.get("security").asInt(), + node.get("semver").asText() + ); + } + + private URI resolveDownloadURI(AdoptiumVersionRequest request, AdoptiumVersionInfo versionInfo) { return URI.create( - "https://api.adoptium.net/v3/binary/version/" - + version + "https://api.adoptium.net/v3/binary/version/jdk-" + + versionInfo.semver + "/" + request.platform + "/" @@ -100,5 +118,7 @@ private boolean requestIsSupported(JavaToolchainRequest request) { return anyVendorOr(request.getJavaToolchainSpec().getVendor().get(), JvmVendorSpec.ADOPTIUM); } + record AdoptiumVersionInfo(int build, int major, int minor, String openjdkVersion, int security, String semver) {} + record AdoptiumVersionRequest(String platform, String arch, JavaLanguageVersion languageVersion) {} } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java index 162895fd486cf..818cb040c172e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java @@ -39,7 +39,11 @@ record JdkBuild(JavaLanguageVersion languageVersion, String version, String buil ); // package private so it can be replaced by tests - List builds = List.of(getBundledJdkBuild()); + List builds = List.of( + getBundledJdkBuild(), + // 22 release candidate + new JdkBuild(JavaLanguageVersion.of(22), "22", "36", "830ec9fcccef480bb3e73fb7ecafe059") + ); private JdkBuild getBundledJdkBuild() { String bundledJdkVersion = VersionProperties.getBundledJdkVersion(); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy new file mode 100644 index 0000000000000..b7ac363ef7ad3 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/AsciidocParserSpec.groovy @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc; + +import spock.lang.Specification +import spock.lang.Unroll + +import org.gradle.api.InvalidUserDataException + +import static org.elasticsearch.gradle.internal.doc.AsciidocSnippetParser.finalizeSnippet; +import static org.elasticsearch.gradle.internal.doc.AsciidocSnippetParser.matchSource; + +class AsciidocParserSpec extends Specification { + + def testMatchSource() { + expect: + with(matchSource("[source,console]")) { + matches == true + language == "console" + name == null + } + + with(matchSource("[source,console,id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source, console, id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source, console, id=snippet-name-1]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console,attr=5,id=snippet-name-1,attr2=6]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console, attr=5, id=snippet-name-1, attr2=6]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[\"source\",\"console\",id=\"snippet-name-1\"]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + + with(matchSource("[source,console,id=\"snippet-name-1\"]")) { + matches == true + language == "console" + name == "snippet-name-1" + } + with(matchSource("[source.merge.styled,esql]")) { + matches == true + language == "esql" + } + + with(matchSource("[source.merge.styled,foo-bar]")) { + matches == true + language == "foo-bar" + } + } + + @Unroll + def "checks for valid json for #languageParam"() { + given: + def snippet = snippet() { + language = languageParam + testResponse = true + } + def json = """{ + "name": "John Doe", + "age": 30, + "isMarried": true, + "address": { + "street": "123 Main Street", + "city": "Springfield", + "state": "IL", + "zip": "62701" + }, + "hobbies": ["Reading", "Cooking", "Traveling"] +}""" + when: + def result = finalizeSnippet(snippet, json, [:], [:].entrySet()) + then: + result != null + + when: + finalizeSnippet(snippet, "some no valid json", [:], [:].entrySet()) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("Invalid json in") + + when: + snippet.skip = "true" + result = finalizeSnippet(snippet, "some no valid json", [:], [:].entrySet()) + then: + result != null + + where: + languageParam << ["js", "console-result"] + } + + def "test finalized snippet handles substitutions"() { + given: + def snippet = snippet() { + language = "console" + } + when: + finalizeSnippet(snippet, "snippet-content substDefault subst", [substDefault: "\$body"], [subst: 'substValue'].entrySet()) + then: + snippet.contents == "snippet-content \$body substValue" + } + + def snippetMustHaveLanguage() { + given: + def snippet = snippet() + when: + finalizeSnippet(snippet, "snippet-content", [:], []) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("Snippet missing a language.") + } + + def testEmit() { + given: + def snippet = snippet() { + language = "console" + } + when: + finalizeSnippet(snippet, "snippet-content", [:], []) + then: + snippet.contents == "snippet-content" + } + + def testSnippetsWithCurl() { + given: + def snippet = snippet() { + language = "sh" + name = "snippet-name-1" + } + when: + finalizeSnippet(snippet, "curl substDefault subst", [:], [:].entrySet()) + then: + snippet.curl == true + } + + def "test snippets with no curl no console"() { + given: + def snippet = snippet() { + console = false + language = "shell" + } + when: + finalizeSnippet(snippet, "hello substDefault subst", [:], [:].entrySet()) + then: + def e = thrown(InvalidUserDataException) + e.message.contains("No need for NOTCONSOLE if snippet doesn't contain `curl`") + } + + Snippet snippet(Closure configClosure = {}) { + def snippet = new Snippet(new File("SomePath").toPath(), 0, "snippet-name-1") + configClosure.delegate = snippet + configClosure() + return snippet + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy new file mode 100644 index 0000000000000..96888357d8433 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy @@ -0,0 +1,575 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import spock.lang.Specification +import spock.lang.TempDir + +import org.gradle.api.InvalidUserDataException +import org.gradle.testfixtures.ProjectBuilder + +import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString + +class DocSnippetTaskSpec extends Specification { + + @TempDir + File tempDir + + def "handling test parsing multiple snippets per file"() { + given: + def project = ProjectBuilder.builder().build() + def task = project.tasks.register("docSnippetTask", DocSnippetTask).get() + when: + def substitutions = [] + def snippets = task.parseDocFile( + tempDir, docFile( + """ +[[mapper-annotated-text]] +=== Mapper annotated text plugin + +experimental[] + +The mapper-annotated-text plugin provides the ability to index text that is a +combination of free-text and special markup that is typically used to identify +items of interest such as people or organisations (see NER or Named Entity Recognition +tools). + + +The elasticsearch markup allows one or more additional tokens to be injected, unchanged, into the token +stream at the same position as the underlying text it annotates. + +:plugin_name: mapper-annotated-text +include::install_remove.asciidoc[] + +[[mapper-annotated-text-usage]] +==== Using the `annotated-text` field + +The `annotated-text` tokenizes text content as per the more common {ref}/text.html[`text`] field (see +"limitations" below) but also injects any marked-up annotation tokens directly into +the search index: + +[source,console] +-------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "my_field": { + "type": "annotated_text" + } + } + } +} +-------------------------- + +Such a mapping would allow marked-up text eg wikipedia articles to be indexed as both text +and structured tokens. The annotations use a markdown-like syntax using URL encoding of +one or more values separated by the `&` symbol. + + +We can use the "_analyze" api to test how an example annotation would be stored as tokens +in the search index: + + +[source,js] +-------------------------- +GET my-index-000001/_analyze +{ + "field": "my_field", + "text":"Investors in [Apple](Apple+Inc.) rejoiced." +} +-------------------------- +// NOTCONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "investors", + "start_offset": 0, + "end_offset": 9, + "type": "", + "position": 0 + }, + { + "token": "in", + "start_offset": 10, + "end_offset": 12, + "type": "", + "position": 1 + }, + { + "token": "Apple Inc.", <1> + "start_offset": 13, + "end_offset": 18, + "type": "annotation", + "position": 2 + }, + { + "token": "apple", + "start_offset": 13, + "end_offset": 18, + "type": "", + "position": 2 + }, + { + "token": "rejoiced", + "start_offset": 19, + "end_offset": 27, + "type": "", + "position": 3 + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +<1> Note the whole annotation token `Apple Inc.` is placed, unchanged as a single token in +the token stream and at the same position (position 2) as the text token (`apple`) it annotates. + + +We can now perform searches for annotations using regular `term` queries that don't tokenize +the provided search values. Annotations are a more precise way of matching as can be seen +in this example where a search for `Beck` will not match `Jeff Beck` : + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_field": "[Beck](Beck) announced a new tour"<1> +} + +PUT my-index-000001/_doc/2 +{ + "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<2> +} + +# Example search +GET my-index-000001/_search +{ + "query": { + "term": { + "my_field": "Beck" <3> + } + } +} +-------------------------- + +<1> As well as tokenising the plain text into single words e.g. `beck`, here we +inject the single token value `Beck` at the same position as `beck` in the token stream. +<2> Note annotations can inject multiple tokens at the same position - here we inject both +the very specific value `Jeff Beck` and the broader term `Guitarist`. This enables +broader positional queries e.g. finding mentions of a `Guitarist` near to `strat`. +<3> A benefit of searching with these carefully defined annotation tokens is that a query for +`Beck` will not match document 2 that contains the tokens `jeff`, `beck` and `Jeff Beck` + +WARNING: Any use of `=` signs in annotation values eg `[Prince](person=Prince)` will +cause the document to be rejected with a parse failure. In future we hope to have a use for +the equals signs so wil actively reject documents that contain this today. + + +[[mapper-annotated-text-tips]] +==== Data modelling tips +===== Use structured and unstructured fields + +Annotations are normally a way of weaving structured information into unstructured text for +higher-precision search. + +`Entity resolution` is a form of document enrichment undertaken by specialist software or people +where references to entities in a document are disambiguated by attaching a canonical ID. +The ID is used to resolve any number of aliases or distinguish between people with the +same name. The hyperlinks connecting Wikipedia's articles are a good example of resolved +entity IDs woven into text. + +These IDs can be embedded as annotations in an annotated_text field but it often makes +sense to include them in dedicated structured fields to support discovery via aggregations: + +[source,console] +-------------------------- +PUT my-index-000001 +{ + "mappings": { + "properties": { + "my_unstructured_text_field": { + "type": "annotated_text" + }, + "my_structured_people_field": { + "type": "text", + "fields": { + "keyword" : { + "type": "keyword" + } + } + } + } + } +} +-------------------------- + +Applications would then typically provide content and discover it as follows: + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_unstructured_text_field": "[Shay](%40kimchy) created elasticsearch", + "my_twitter_handles": ["@kimchy"] <1> +} + +GET my-index-000001/_search +{ + "query": { + "query_string": { + "query": "elasticsearch OR logstash OR kibana",<2> + "default_field": "my_unstructured_text_field" + } + }, + "aggregations": { + \t"top_people" :{ + \t "significant_terms" : { <3> +\t "field" : "my_twitter_handles.keyword" + \t } + \t} + } +} +-------------------------- + +<1> Note the `my_twitter_handles` contains a list of the annotation values +also used in the unstructured text. (Note the annotated_text syntax requires escaping). +By repeating the annotation values in a structured field this application has ensured that +the tokens discovered in the structured field can be used for search and highlighting +in the unstructured field. +<2> In this example we search for documents that talk about components of the elastic stack +<3> We use the `my_twitter_handles` field here to discover people who are significantly +associated with the elastic stack. + +===== Avoiding over-matching annotations +By design, the regular text tokens and the annotation tokens co-exist in the same indexed +field but in rare cases this can lead to some over-matching. + +The value of an annotation often denotes a _named entity_ (a person, place or company). +The tokens for these named entities are inserted untokenized, and differ from typical text +tokens because they are normally: + +* Mixed case e.g. `Madonna` +* Multiple words e.g. `Jeff Beck` +* Can have punctuation or numbers e.g. `Apple Inc.` or `@kimchy` + +This means, for the most part, a search for a named entity in the annotated text field will +not have any false positives e.g. when selecting `Apple Inc.` from an aggregation result +you can drill down to highlight uses in the text without "over matching" on any text tokens +like the word `apple` in this context: + + the apple was very juicy + +However, a problem arises if your named entity happens to be a single term and lower-case e.g. the +company `elastic`. In this case, a search on the annotated text field for the token `elastic` +may match a text document such as this: + + they fired an elastic band + +To avoid such false matches users should consider prefixing annotation values to ensure +they don't name clash with text tokens e.g. + + [elastic](Company_elastic) released version 7.0 of the elastic stack today + + + + +[[mapper-annotated-text-highlighter]] +==== Using the `annotated` highlighter + +The `annotated-text` plugin includes a custom highlighter designed to mark up search hits +in a way which is respectful of the original markup: + +[source,console] +-------------------------- +# Example documents +PUT my-index-000001/_doc/1 +{ + "my_field": "The cat sat on the [mat](sku3578)" +} + +GET my-index-000001/_search +{ + "query": { + "query_string": { + "query": "cats" + } + }, + "highlight": { + "fields": { + "my_field": { + "type": "annotated", <1> + "require_field_match": false + } + } + } +} +-------------------------- + +<1> The `annotated` highlighter type is designed for use with annotated_text fields + +The annotated highlighter is based on the `unified` highlighter and supports the same +settings but does not use the `pre_tags` or `post_tags` parameters. Rather than using +html-like markup such as `cat` the annotated highlighter uses the same +markdown-like syntax used for annotations and injects a key=value annotation where `_hit_term` +is the key and the matched search term is the value e.g. + + The [cat](_hit_term=cat) sat on the [mat](sku3578) + +The annotated highlighter tries to be respectful of any existing markup in the original +text: + +* If the search term matches exactly the location of an existing annotation then the +`_hit_term` key is merged into the url-like syntax used in the `(...)` part of the +existing annotation. +* However, if the search term overlaps the span of an existing annotation it would break +the markup formatting so the original annotation is removed in favour of a new annotation +with just the search hit information in the results. +* Any non-overlapping annotations in the original text are preserved in highlighter +selections + + +[[mapper-annotated-text-limitations]] +==== Limitations + +The annotated_text field type supports the same mapping settings as the `text` field type +but with the following exceptions: + +* No support for `fielddata` or `fielddata_frequency_filter` +* No support for `index_prefixes` or `index_phrases` indexing + +""" + ), substitutions + ) + then: + snippets*.test == [false, false, false, false, false, false, false] + snippets*.catchPart == [null, null, null, null, null, null, null] + } + + def "handling test parsing"() { + when: + def substitutions = [] + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TEST[s/_explain\\/1/_explain\\/1?error_trace=false/ catch:/painless_explain_error/] +""" + ), substitutions + ) + then: + snippets*.test == [true] + snippets*.catchPart == ["/painless_explain_error/"] + substitutions.size() == 1 + substitutions[0].key == "_explain\\/1" + substitutions[0].value == "_explain\\/1?error_trace=false" + + when: + substitutions = [] + snippets = task().parseDocFile( + tempDir, docFile( + """ + +[source,console] +---- +PUT _snapshot/my_hdfs_repository +{ + "type": "hdfs", + "settings": { + "uri": "hdfs://namenode:8020/", + "path": "elasticsearch/repositories/my_hdfs_repository", + "conf.dfs.client.read.shortcircuit": "true" + } +} +---- +// TEST[skip:we don't have hdfs set up while testing this] +""" + ), substitutions + ) + then: + snippets*.test == [true] + snippets*.skip == ["we don't have hdfs set up while testing this"] + } + + def "handling testresponse parsing"() { + when: + def substitutions = [] + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason/] +""" + ), substitutions + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + substitutions.size() == 1 + substitutions[0].key == "\\.\\.\\." + substitutions[0].value == + "\"script_stack\": \$body.error.caused_by.script_stack, \"script\": \$body.error.caused_by.script, \"lang\": \$body.error.caused_by.lang, \"position\": \$body.error.caused_by.position, \"caused_by\": \$body.error.caused_by.caused_by, \"reason\": \$body.error.caused_by.reason" + + when: + snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +POST logs-my_app-default/_rollover/ +---- +// TESTRESPONSE[skip:no setup made for this example yet] +""" + ), [] + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + snippets*.skip == ["no setup made for this example yet"] + + when: + substitutions = [] + snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,txt] +--------------------------------------------------------------------------- +my-index-000001 0 p RELOCATING 3014 31.1mb 192.168.56.10 H5dfFeA -> -> 192.168.56.30 bGG90GE +--------------------------------------------------------------------------- +// TESTRESPONSE[non_json] +""" + ), substitutions + ) + then: + snippets*.test == [false] + snippets*.testResponse == [true] + substitutions.size() == 4 + } + + + def "handling console parsing"() { + when: + def snippets = task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- + +// $firstToken +---- +""" + ), [] + ) + then: + snippets*.console == [firstToken.equals("CONSOLE")] + + + when: + task().parseDocFile( + tempDir, docFile( + """ +[source,console] +---- +// $firstToken +// $secondToken +---- +""" + ), [] + ) + then: + def e = thrown(InvalidUserDataException) + e.message == "mapping-charfilter.asciidoc:4: Can't be both CONSOLE and NOTCONSOLE" + + when: + task().parseDocFile( + tempDir, docFile( + """ +// $firstToken +// $secondToken +""" + ), [] + ) + then: + e = thrown(InvalidUserDataException) + e.message == "mapping-charfilter.asciidoc:1: $firstToken not paired with a snippet" + + where: + firstToken << ["CONSOLE", "NOTCONSOLE"] + secondToken << ["NOTCONSOLE", "CONSOLE"] + } + + def "test parsing snippet from doc"() { + def doc = docFile( + """ +[source,console] +---- +GET /_analyze +{ + "tokenizer": "keyword", + "char_filter": [ + { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2" + ] + } + ], + "text": "My license plate is empty" +} +---- +""" + ) + def snippets = task().parseDocFile(tempDir, doc, []) + expect: + snippets[0].start == 3 + snippets[0].language == "console" + normalizeString(snippets[0].contents, tempDir) == """GET /_analyze +{ + "tokenizer": "keyword", + "char_filter": [ + { + "type": "mapping", + "mappings": [ + "٠ => 0", + "١ => 1", + "٢ => 2" + ] + } + ], + "text": "My license plate is empty" +}""" + } + + File docFile(String docContent) { + def file = tempDir.toPath().resolve("mapping-charfilter.asciidoc").toFile() + file.text = docContent + return file + } + + + private DocSnippetTask task() { + ProjectBuilder.builder().build().tasks.register("docSnippetTask", DocSnippetTask).get() + } + +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy new file mode 100644 index 0000000000000..6e86cba235886 --- /dev/null +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy @@ -0,0 +1,833 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.doc + +import spock.lang.Specification +import spock.lang.TempDir + +import org.gradle.api.InvalidUserDataException +import org.gradle.testfixtures.ProjectBuilder + +import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.replaceBlockQuote +import static org.elasticsearch.gradle.internal.doc.RestTestsFromDocSnippetTask.shouldAddShardFailureCheck +import static org.elasticsearch.gradle.internal.test.TestUtils.normalizeString + +class RestTestsFromDocSnippetTaskSpec extends Specification { + + @TempDir + File tempDir; + + def "test simple block quote"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"") == "\"foo\": \"bort baz\"" + } + + def "test multiple block quotes"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"") == "\"foo\": \"bort baz\", \"bar\": \"other\"" + } + + def "test escaping in block quote"() { + expect: + replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"") == "\"foo\": \"bort\\\" baz\"" + replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"") == "\"foo\": \"bort\\n baz\"" + } + + def "test invalid block quotes"() { + given: + String input = "\"foo\": \"\"\"bar\""; + when: + RestTestsFromDocSnippetTask.replaceBlockQuote(input); + then: + def e = thrown(InvalidUserDataException) + e.message == "Invalid block quote starting at 7 in:\n" + input + } + + def "test is doc write request"() { + expect: + shouldAddShardFailureCheck("doc-index/_search") == true + shouldAddShardFailureCheck("_cat") == false + shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview") == false + } + + def "can create rest tests from docs"() { + def build = ProjectBuilder.builder().build() + given: + def task = build.tasks.create("restTestFromSnippet", RestTestsFromDocSnippetTask) + task.expectedUnconvertedCandidates = ["ml-update-snapshot.asciidoc", "reference/security/authorization/run-as-privilege.asciidoc"] + docs() + task.docs = build.fileTree(new File(tempDir, "docs")) + task.testRoot.convention(build.getLayout().buildDirectory.dir("rest-tests")); + + when: + task.getActions().forEach { it.execute(task) } + def restSpec = new File(task.getTestRoot().get().getAsFile(), "rest-api-spec/test/painless-debugging.yml") + + then: + restSpec.exists() + normalizeString(restSpec.text, tempDir) == """--- +"line_22": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: PUT + path: "hockey/_doc/1" + refresh: "" + body: | + {"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} + - is_false: _shards.failures + - do: + catch: /painless_explain_error/ + raw: + method: POST + path: "hockey/_explain/1" + error_trace: "false" + body: | + { + "query": { + "script": { + "script": "Debug.explain(doc.goals)" + } + } + } + - is_false: _shards.failures + - match: + \$body: + { + "error": { + "type": "script_exception", + "to_string": "[1, 9, 27]", + "painless_class": "org.elasticsearch.index.fielddata.ScriptDocValues.Longs", + "java_class": "org.elasticsearch.index.fielddata.ScriptDocValues\$Longs", + "script_stack": \$body.error.script_stack, "script": \$body.error.script, "lang": \$body.error.lang, "position": \$body.error.position, "caused_by": \$body.error.caused_by, "root_cause": \$body.error.root_cause, "reason": \$body.error.reason + }, + "status": 400 + } + - do: + catch: /painless_explain_error/ + raw: + method: POST + path: "hockey/_update/1" + error_trace: "false" + body: | + { + "script": "Debug.explain(ctx._source)" + } + - is_false: _shards.failures + - match: + \$body: + { + "error" : { + "root_cause": \$body.error.root_cause, + "type": "illegal_argument_exception", + "reason": "failed to execute script", + "caused_by": { + "type": "script_exception", + "to_string": \$body.error.caused_by.to_string, + "painless_class": "java.util.LinkedHashMap", + "java_class": "java.util.LinkedHashMap", + "script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason + } + }, + "status": 400 + }""" + def restSpec2 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/ml-update-snapshot.yml") + restSpec2.exists() + normalizeString(restSpec2.text, tempDir) == """--- +"line_50": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - always_skip + reason: todo + - do: + raw: + method: POST + path: "_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update" + body: | + { + "description": "Snapshot 1", + "retain": true + } + - is_false: _shards.failures""" + def restSpec3 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/sql/getting-started.yml") + restSpec3.exists() + normalizeString(restSpec3.text, tempDir) == """--- +"line_10": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: PUT + path: "library/_bulk" + refresh: "" + body: | + {"index":{"_id": "Leviathan Wakes"}} + {"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} + {"index":{"_id": "Hyperion"}} + {"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} + {"index":{"_id": "Dune"}} + {"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} + - is_false: _shards.failures + - do: + raw: + method: POST + path: "_sql" + format: "txt" + body: | + { + "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" + } + - is_false: _shards.failures + - match: + \$body: + / /s+author /s+/| /s+name /s+/| /s+page_count /s+/| /s+release_date/s* + ---------------/+---------------/+---------------/+------------------------/s* + Dan /s+Simmons /s+/|Hyperion /s+/|482 /s+/|1989-05-26T00:00:00.000Z/s* + Frank /s+Herbert /s+/|Dune /s+/|604 /s+/|1965-06-01T00:00:00.000Z/s*/""" + def restSpec4 = new File(task.testRoot.get().getAsFile(), "rest-api-spec/test/reference/security/authorization/run-as-privilege.yml") + restSpec4.exists() + normalizeString(restSpec4.text, tempDir) == """--- +"line_51": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_director" + refresh: "true" + body: | + { + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "run_as": [ "jacknich", "rdeniro" ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_114": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_admin_role" + refresh: "true" + body: | + { + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "admin", "read" ], + "resources": [ "*" ] + } + ], + "run_as": [ "analyst_user" ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_143": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/role/my_analyst_role" + refresh: "true" + body: | + { + "cluster": [ "monitor"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["manage"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "read" ], + "resources": [ "*" ] + } + ], + "metadata" : { + "version" : 1 + } + } + - is_false: _shards.failures +--- +"line_170": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/user/admin_user" + refresh: "true" + body: | + { + "password": "l0ng-r4nd0m-p@ssw0rd", + "roles": [ "my_admin_role" ], + "full_name": "Eirian Zola", + "metadata": { "intelligence" : 7} + } + - is_false: _shards.failures +--- +"line_184": + - skip: + features: + - default_shards + - stash_in_key + - stash_in_path + - stash_path_replace + - warnings + - do: + raw: + method: POST + path: "_security/user/analyst_user" + refresh: "true" + body: | + { + "password": "l0nger-r4nd0mer-p@ssw0rd", + "roles": [ "my_analyst_role" ], + "full_name": "Monday Jaffe", + "metadata": { "innovation" : 8} + } + - is_false: _shards.failures""" +} + + File docFile(String fileName, String docContent) { + def file = tempDir.toPath().resolve(fileName).toFile() + file.parentFile.mkdirs() + file.text = docContent + return file + } + + + void docs() { + docFile( + "docs/reference/sql/getting-started.asciidoc", """ +[role="xpack"] +[[sql-getting-started]] +== Getting Started with SQL + +To start using {es-sql}, create +an index with some data to experiment with: + +[source,console] +-------------------------------------------------- +PUT /library/_bulk?refresh +{"index":{"_id": "Leviathan Wakes"}} +{"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} +{"index":{"_id": "Hyperion"}} +{"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} +{"index":{"_id": "Dune"}} +{"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} +-------------------------------------------------- + +And now you can execute SQL using the <>: + +[source,console] +-------------------------------------------------- +POST /_sql?format=txt +{ + "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" +} +-------------------------------------------------- +// TEST[continued] + +Which should return something along the lines of: + +[source,text] +-------------------------------------------------- + author | name | page_count | release_date +---------------+---------------+---------------+------------------------ +Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +-------------------------------------------------- +// TESTRESPONSE[s/\\|/\\\\|/ s/\\+/\\\\+/] +// TESTRESPONSE[non_json] + +You can also use the <>. There is a script to start it +shipped in x-pack's bin directory: + +[source,bash] +-------------------------------------------------- +\$ ./bin/elasticsearch-sql-cli +-------------------------------------------------- + +From there you can run the same query: + +[source,sqlcli] +-------------------------------------------------- +sql> SELECT * FROM library WHERE release_date < '2000-01-01'; + author | name | page_count | release_date +---------------+---------------+---------------+------------------------ +Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z +-------------------------------------------------- +""" + ) + docFile( + "docs/ml-update-snapshot.asciidoc", + """ +[role="xpack"] +[[ml-update-snapshot]] += Update model snapshots API +++++ +Update model snapshots +++++ + +Updates certain properties of a snapshot. + +[[ml-update-snapshot-request]] +== {api-request-title} + +`POST _ml/anomaly_detectors//model_snapshots//_update` + +[[ml-update-snapshot-prereqs]] +== {api-prereq-title} + +Requires the `manage_ml` cluster privilege. This privilege is included in the +`machine_learning_admin` built-in role. + +[[ml-update-snapshot-path-parms]] +== {api-path-parms-title} + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] + +``:: +(Required, string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] + +[[ml-update-snapshot-request-body]] +== {api-request-body-title} + +The following properties can be updated after the model snapshot is created: + +`description`:: +(Optional, string) A description of the model snapshot. + +`retain`:: +(Optional, Boolean) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] + + +[[ml-update-snapshot-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +POST +_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update +{ + "description": "Snapshot 1", + "retain": true +} +-------------------------------------------------- +// TEST[skip:todo] + +When the snapshot is updated, you receive the following results: +[source,js] +---- +{ + "acknowledged": true, + "model": { + "job_id": "it_ops_new_logs", + "timestamp": 1491852978000, + "description": "Snapshot 1", +... + "retain": true + } +} +---- + +""" + ) + + docFile( + "docs/painless-debugging.asciidoc", + """ + +[[painless-debugging]] +=== Painless Debugging + +==== Debug.Explain + +Painless doesn't have a +{wikipedia}/Read%E2%80%93eval%E2%80%93print_loop[REPL] +and while it'd be nice for it to have one day, it wouldn't tell you the +whole story around debugging painless scripts embedded in Elasticsearch because +the data that the scripts have access to or "context" is so important. For now +the best way to debug embedded scripts is by throwing exceptions at choice +places. While you can throw your own exceptions +(`throw new Exception('whatever')`), Painless's sandbox prevents you from +accessing useful information like the type of an object. So Painless has a +utility method, `Debug.explain` which throws the exception for you. For +example, you can use {ref}/search-explain.html[`_explain`] to explore the +context available to a {ref}/query-dsl-script-query.html[script query]. + +[source,console] +--------------------------------------------------------- +PUT /hockey/_doc/1?refresh +{"first":"johnny","last":"gaudreau","goals":[9,27,1],"assists":[17,46,0],"gp":[26,82,1]} + +POST /hockey/_explain/1 +{ + "query": { + "script": { + "script": "Debug.explain(doc.goals)" + } + } +} +--------------------------------------------------------- +// TEST[s/_explain\\/1/_explain\\/1?error_trace=false/ catch:/painless_explain_error/] +// The test system sends error_trace=true by default for easier debugging so +// we have to override it to get a normal shaped response + +Which shows that the class of `doc.first` is +`org.elasticsearch.index.fielddata.ScriptDocValues.Longs` by responding with: + +[source,console-result] +--------------------------------------------------------- +{ + "error": { + "type": "script_exception", + "to_string": "[1, 9, 27]", + "painless_class": "org.elasticsearch.index.fielddata.ScriptDocValues.Longs", + "java_class": "org.elasticsearch.index.fielddata.ScriptDocValues\$Longs", + ... + }, + "status": 400 +} +--------------------------------------------------------- +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.script_stack, "script": \$body.error.script, "lang": \$body.error.lang, "position": \$body.error.position, "caused_by": \$body.error.caused_by, "root_cause": \$body.error.root_cause, "reason": \$body.error.reason/] + +You can use the same trick to see that `_source` is a `LinkedHashMap` +in the `_update` API: + +[source,console] +--------------------------------------------------------- +POST /hockey/_update/1 +{ + "script": "Debug.explain(ctx._source)" +} +--------------------------------------------------------- +// TEST[continued s/_update\\/1/_update\\/1?error_trace=false/ catch:/painless_explain_error/] + +The response looks like: + +[source,console-result] +--------------------------------------------------------- +{ + "error" : { + "root_cause": ..., + "type": "illegal_argument_exception", + "reason": "failed to execute script", + "caused_by": { + "type": "script_exception", + "to_string": "{gp=[26, 82, 1], last=gaudreau, assists=[17, 46, 0], first=johnny, goals=[9, 27, 1]}", + "painless_class": "java.util.LinkedHashMap", + "java_class": "java.util.LinkedHashMap", + ... + } + }, + "status": 400 +} +--------------------------------------------------------- +// TESTRESPONSE[s/"root_cause": \\.\\.\\./"root_cause": \$body.error.root_cause/] +// TESTRESPONSE[s/\\.\\.\\./"script_stack": \$body.error.caused_by.script_stack, "script": \$body.error.caused_by.script, "lang": \$body.error.caused_by.lang, "position": \$body.error.caused_by.position, "caused_by": \$body.error.caused_by.caused_by, "reason": \$body.error.caused_by.reason/] +// TESTRESPONSE[s/"to_string": ".+"/"to_string": \$body.error.caused_by.to_string/] + +Once you have a class you can go to <> to see a list of +available methods. + +""" + ) + docFile( + "docs/reference/security/authorization/run-as-privilege.asciidoc", + """[role="xpack"] +[[run-as-privilege]] += Submitting requests on behalf of other users + +{es} roles support a `run_as` privilege that enables an authenticated user to +submit requests on behalf of other users. For example, if your external +application is trusted to authenticate users, {es} can authenticate the external +application and use the _run as_ mechanism to issue authorized requests as +other users without having to re-authenticate each user. + +To "run as" (impersonate) another user, the first user (the authenticating user) +must be authenticated by a mechanism that supports run-as delegation. The second +user (the `run_as` user) must be authorized by a mechanism that supports +delegated run-as lookups by username. + +The `run_as` privilege essentially operates like a secondary form of +<>. Delegated authorization applies +to the authenticating user, and the `run_as` privilege applies to the user who +is being impersonated. + +Authenticating user:: +-- +For the authenticating user, the following realms (plus API keys) all support +`run_as` delegation: `native`, `file`, Active Directory, JWT, Kerberos, LDAP and +PKI. + +Service tokens, the {es} Token Service, SAML 2.0, and OIDC 1.0 do not +support `run_as` delegation. +-- + +`run_as` user:: +-- +{es} supports `run_as` for any realm that supports user lookup. +Not all realms support user lookup. Refer to the list of <> +and ensure that the realm you wish to use is configured in a manner that +supports user lookup. + +The `run_as` user must be retrieved from a <> - it is not +possible to run as a +<>, +<> or +<>. +-- + +To submit requests on behalf of other users, you need to have the `run_as` +privilege in your <>. For example, the following request +creates a `my_director` role that grants permission to submit request on behalf +of `jacknich` or `redeniro`: + +[source,console] +---- +POST /_security/role/my_director?refresh=true +{ + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "run_as": [ "jacknich", "rdeniro" ], + "metadata" : { + "version" : 1 + } +} +---- + +To submit a request as another user, you specify the user in the +`es-security-runas-user` request header. For example: + +[source,sh] +---- +curl -H "es-security-runas-user: jacknich" -u es-admin -X GET http://localhost:9200/ +---- + +The `run_as` user passed in through the `es-security-runas-user` header must be +available from a realm that supports delegated authorization lookup by username. +Realms that don't support user lookup can't be used by `run_as` delegation from +other realms. + +For example, JWT realms can authenticate external users specified in JWTs, and +execute requests as a `run_as` user in the `native` realm. {es} will retrieve the +indicated `runas` user and execute the request as that user using their roles. + +[[run-as-privilege-apply]] +== Apply the `run_as` privilege to roles +You can apply the `run_as` privilege when creating roles with the +<>. Users who are assigned +a role that contains the `run_as` privilege inherit all privileges from their +role, and can also submit requests on behalf of the indicated users. + +NOTE: Roles for the authenticated user and the `run_as` user are not merged. If +a user authenticates without specifying the `run_as` parameter, only the +authenticated user's roles are used. If a user authenticates and their roles +include the `run_as` parameter, only the `run_as` user's roles are used. + +After a user successfully authenticates to {es}, an authorization process determines whether the user behind an incoming request is allowed to run +that request. If the authenticated user has the `run_as` privilege in their list +of permissions and specifies the run-as header, {es} _discards_ the authenticated +user and associated roles. It then looks in each of the configured realms in the +realm chain until it finds the username that's associated with the `run_as` user, +and uses those roles to execute any requests. + +Consider an admin role and an analyst role. The admin role has higher privileges, +but might also want to submit requests as another user to test and verify their +permissions. + +First, we'll create an admin role named `my_admin_role`. This role has `manage` +<> on the entire cluster, and on a subset of +indices. This role also contains the `run_as` privilege, which enables any user +with this role to submit requests on behalf of the specified `analyst_user`. + +[source,console] +---- +POST /_security/role/my_admin_role?refresh=true +{ + "cluster": ["manage"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": [ "manage" ] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "admin", "read" ], + "resources": [ "*" ] + } + ], + "run_as": [ "analyst_user" ], + "metadata" : { + "version" : 1 + } +} +---- + +Next, we'll create an analyst role named `my_analyst_role`, which has more +restricted `monitor` cluster privileges and `manage` privileges on a subset of +indices. + +[source,console] +---- +POST /_security/role/my_analyst_role?refresh=true +{ + "cluster": [ "monitor"], + "indices": [ + { + "names": [ "index1", "index2" ], + "privileges": ["manage"] + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ "read" ], + "resources": [ "*" ] + } + ], + "metadata" : { + "version" : 1 + } +} +---- + +We'll create an administrator user and assign them the role named `my_admin_role`, +which allows this user to submit requests as the `analyst_user`. + +[source,console] +---- +POST /_security/user/admin_user?refresh=true +{ + "password": "l0ng-r4nd0m-p@ssw0rd", + "roles": [ "my_admin_role" ], + "full_name": "Eirian Zola", + "metadata": { "intelligence" : 7} +} +---- + +We can also create an analyst user and assign them the role named +`my_analyst_role`. + +[source,console] +---- +POST /_security/user/analyst_user?refresh=true +{ + "password": "l0nger-r4nd0mer-p@ssw0rd", + "roles": [ "my_analyst_role" ], + "full_name": "Monday Jaffe", + "metadata": { "innovation" : 8} +} +---- + +You can then authenticate to {es} as the `admin_user` or `analyst_user`. However, the `admin_user` could optionally submit requests on +behalf of the `analyst_user`. The following request authenticates to {es} with a +`Basic` authorization token and submits the request as the `analyst_user`: + +[source,sh] +---- +curl -s -X GET -H "Authorization: Basic YWRtaW5fdXNlcjpsMG5nLXI0bmQwbS1wQHNzdzByZA==" -H "es-security-runas-user: analyst_user" https://localhost:9200/_security/_authenticate +---- + +The response indicates that the `analyst_user` submitted this request, using the +`my_analyst_role` that's assigned to that user. When the `admin_user` submitted +the request, {es} authenticated that user, discarded their roles, and then used +the roles of the `run_as` user. + +[source,sh] +---- +{"username":"analyst_user","roles":["my_analyst_role"],"full_name":"Monday Jaffe","email":null, +"metadata":{"innovation":8},"enabled":true,"authentication_realm":{"name":"native", +"type":"native"},"lookup_realm":{"name":"native","type":"native"},"authentication_type":"realm"} +% +---- + +The `authentication_realm` and `lookup_realm` in the response both specify +the `native` realm because both the `admin_user` and `analyst_user` are from +that realm. If the two users are in different realms, the values for +`authentication_realm` and `lookup_realm` are different (such as `pki` and +`native`). +""" + ) + + } +} diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy index fe4a644ddfc1d..6383d577f027f 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy @@ -11,6 +11,7 @@ package org.elasticsearch.gradle.internal.toolchain import org.gradle.api.services.BuildServiceParameters import org.gradle.jvm.toolchain.JavaLanguageVersion import org.gradle.jvm.toolchain.JavaToolchainResolver +import org.gradle.platform.OperatingSystem import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toArchString import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toOsString @@ -37,7 +38,12 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { toOsString(it[2], it[1]), toArchString(it[3]), languageVersion); - resolver.CACHED_RELEASES.put(request, Optional.of('jdk-' + languageVersion.asInt() + '.1.1.1+37.1')) + resolver.CACHED_SEMVERS.put(request, Optional.of(new AdoptiumJdkToolchainResolver.AdoptiumVersionInfo(languageVersion.asInt(), + 1, + 1, + "" + languageVersion.asInt() + ".1.1.1+37", + 0, "" + languageVersion.asInt() + ".1.1.1+37.1" + ))) } return resolver diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java deleted file mode 100644 index 534134e78d40b..0000000000000 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/RestTestFromSnippetsTaskTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc; - -import org.gradle.api.InvalidUserDataException; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import static org.elasticsearch.gradle.internal.doc.RestTestsFromSnippetsTask.replaceBlockQuote; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -public class RestTestFromSnippetsTaskTests { - @Rule - public ExpectedException expectedEx = ExpectedException.none(); - - @Test - public void testInvalidBlockQuote() { - String input = "\"foo\": \"\"\"bar\""; - expectedEx.expect(InvalidUserDataException.class); - expectedEx.expectMessage("Invalid block quote starting at 7 in:\n" + input); - replaceBlockQuote(input); - } - - @Test - public void testSimpleBlockQuote() { - assertEquals("\"foo\": \"bort baz\"", replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\"")); - } - - @Test - public void testMultipleBlockQuotes() { - assertEquals( - "\"foo\": \"bort baz\", \"bar\": \"other\"", - replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\"") - ); - } - - @Test - public void testEscapingInBlockQuote() { - assertEquals("\"foo\": \"bort\\\" baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\"")); - assertEquals("\"foo\": \"bort\\n baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\"")); - } - - @Test - public void testIsDocWriteRequest() { - assertTrue((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("doc-index/_search")); - assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_cat")); - assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview")); - } -} diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java deleted file mode 100644 index 0acae6ca03297..0000000000000 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/doc/SnippetsTaskTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.gradle.internal.doc; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -public class SnippetsTaskTests { - - @Test - public void testMatchSource() { - SnippetsTask.Source source = SnippetsTask.matchSource("[source,console]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertNull(source.getName()); - - source = SnippetsTask.matchSource("[source,console,id=snippet-name-1]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source, console, id=snippet-name-1]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console,attr=5,id=snippet-name-1,attr2=6]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console, attr=5, id=snippet-name-1, attr2=6]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[\"source\",\"console\",id=\"snippet-name-1\"]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source,console,id=\"snippet-name-1\"]"); - assertTrue(source.getMatches()); - assertEquals("console", source.getLanguage()); - assertEquals("snippet-name-1", source.getName()); - - source = SnippetsTask.matchSource("[source.merge.styled,esql]"); - assertTrue(source.getMatches()); - assertEquals("esql", source.getLanguage()); - - source = SnippetsTask.matchSource("[source.merge.styled,foo-bar]"); - assertTrue(source.getMatches()); - assertEquals("foo-bar", source.getLanguage()); - } -} diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a2e8651810042..0883097e75aad 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -2,7 +2,7 @@ elasticsearch = 8.14.0 lucene = 9.10.0 bundled_jdk_vendor = openjdk -bundled_jdk = 22+36@830ec9fcccef480bb3e73fb7ecafe059 +bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac # optional dependencies spatial4j = 0.7 jts = 1.15.0 diff --git a/dev-tools/publish_zstd_binaries.sh b/dev-tools/publish_zstd_binaries.sh new file mode 100755 index 0000000000000..ad41353856c87 --- /dev/null +++ b/dev-tools/publish_zstd_binaries.sh @@ -0,0 +1,110 @@ +#!/usr/bin/env bash +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the Server Side Public License, v 1; you may not use this file except +# in compliance with, at your election, the Elastic License 2.0 or the Server +# Side Public License, v 1. +# + +set -e + +if [ "$#" -ne 1 ]; then + printf 'Usage: %s \n' "$(basename "$0")" + exit 0; +fi + +if [ $(docker buildx inspect --bootstrap | grep -c 'Platforms:.*linux/arm64') -ne 1 ]; then + echo 'Error: No Docker support for linux/arm64 detected' + echo 'For more information see https://docs.docker.com/build/building/multi-platform' + exit 1; +fi + +if [ -z "$ARTIFACTORY_API_KEY" ]; then + echo 'Error: The ARTIFACTORY_API_KEY environment variable must be set.' + exit 1; +fi + +VERSION="$1" +ARTIFACTORY_REPOSITORY="${ARTIFACTORY_REPOSITORY:-https://artifactory.elastic.dev/artifactory/elasticsearch-native/}" +TEMP=$(mktemp -d) + +fetch_homebrew_artifact() { + DIGEST=$(curl -sS --retry 3 -H "Accept: application/vnd.oci.image.index.v1+json" -H "Authorization: Bearer QQ==" \ + --location "https://ghcr.io/v2/homebrew/core/zstd/manifests/$VERSION" | jq -r \ + ".manifests[] | select(.platform.os == \"darwin\" and .platform.architecture == \"$1\" and .platform.\"os.version\" == \"macOS 13\") | .annotations.\"sh.brew.bottle.digest\"") + + OUTPUT_FILE="$TEMP/zstd-$VERSION-darwin-$1.tar.gz" + curl -sS --retry 3 -H "Authorization: Bearer QQ==" --output "$OUTPUT_FILE" --location "https://ghcr.io/v2/homebrew/core/zstd/blobs/sha256:$DIGEST" + echo $OUTPUT_FILE +} + +download_license() { + curl -sS --retry 3 --location https://raw.githubusercontent.com/facebook/zstd/v${VERSION}/LICENSE --output $1 +} + +echo 'Downloading MacOS zstd binaries...' +DARWIN_ARM_BREW=$(fetch_homebrew_artifact 'arm64') +DARWIN_X86_BREW=$(fetch_homebrew_artifact 'amd64') + +build_darwin_jar() { + ARTIFACT="$TEMP/zstd-$VERSION-darwin-$2.jar" + TAR_DIR="$TEMP/darwin-$2" + mkdir $TAR_DIR + tar zxf $1 --strip-components=2 --include="*/LICENSE" --include="*/libzstd.$VERSION.dylib" -C $TAR_DIR && rm $1 + mv $TAR_DIR/lib/libzstd.$VERSION.dylib $TAR_DIR/libzstd.dylib && rm -rf $TAR_DIR/lib + FILE_COUNT=$(ls -1 $TAR_DIR | wc -l | xargs) + if [ "$FILE_COUNT" -ne 2 ]; then + >&2 echo "ERROR: Expected 2 files in $TAR_DIR but found $FILE_COUNT" + exit 1 + fi + (cd $TAR_DIR/../ && zip -rq - $(basename $TAR_DIR)) > $ARTIFACT && rm -rf $TAR_DIR + echo $ARTIFACT +} + +echo 'Building MacOS jars...' +DARWIN_ARM_JAR=$(build_darwin_jar $DARWIN_ARM_BREW "aarch64") +DARWIN_X86_JAR=$(build_darwin_jar $DARWIN_X86_BREW "x86-64") + +build_linux_jar() { + ARTIFACT="$TEMP/zstd-$VERSION-linux-$2.jar" + OUTPUT_DIR="$TEMP/linux-$2" + mkdir $OUTPUT_DIR + DOCKER_IMAGE=$(docker build --build-arg="ZSTD_VERSION=1.5.5" --file zstd.Dockerfile --platform $1 --quiet .) + docker run --platform $1 $DOCKER_IMAGE > $OUTPUT_DIR/libzstd.so + download_license $OUTPUT_DIR/LICENSE + (cd $OUTPUT_DIR/../ && zip -rq - $(basename $OUTPUT_DIR)) > $ARTIFACT && rm -rf $OUTPUT_DIR + echo $ARTIFACT +} + +echo 'Building Linux jars...' +LINUX_ARM_JAR=$(build_linux_jar "linux/amd64" "x86-64") +LINUX_X86_JAR=$(build_linux_jar "linux/arm64" "aarch64") + +build_windows_jar() { + ARTIFACT="$TEMP/zstd-$VERSION-windows-x86-64.jar" + OUTPUT_DIR="$TEMP/win32-x86-64" + mkdir $OUTPUT_DIR + curl -sS --retry 3 --location https://github.com/facebook/zstd/releases/download/v${VERSION}/zstd-v${VERSION}-win64.zip --output $OUTPUT_DIR/zstd.zip + unzip -jq $OUTPUT_DIR/zstd.zip zstd-v${VERSION}-win64/dll/libzstd.dll -d $OUTPUT_DIR && rm $OUTPUT_DIR/zstd.zip + mv $OUTPUT_DIR/libzstd.dll $OUTPUT_DIR/zstd.dll + download_license $OUTPUT_DIR/LICENSE + (cd $OUTPUT_DIR/../ && zip -rq - $(basename $OUTPUT_DIR)) > $ARTIFACT && rm -rf $OUTPUT_DIR + echo $ARTIFACT +} + +echo 'Building Windows jar...' +WINDOWS_X86_JAR=$(build_windows_jar) + +upload_artifact() { + curl -sS -X PUT -H "X-JFrog-Art-Api: ${ARTIFACTORY_API_KEY}" --data-binary "@$1" --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/zstd/${VERSION}/$(basename $1)" +} + +echo 'Uploading artifacts...' +upload_artifact ${DARWIN_ARM_JAR} +upload_artifact ${DARWIN_X86_JAR} +upload_artifact ${LINUX_ARM_JAR} +upload_artifact ${LINUX_X86_JAR} +upload_artifact ${WINDOWS_X86_JAR} + +rm -rf $TEMP diff --git a/dev-tools/zstd.Dockerfile b/dev-tools/zstd.Dockerfile new file mode 100644 index 0000000000000..0dbc8eb8a9023 --- /dev/null +++ b/dev-tools/zstd.Dockerfile @@ -0,0 +1,11 @@ +FROM centos:7 +ARG ZSTD_VERSION + +RUN yum install -y git gcc gcc-c++ make +RUN git clone --depth 1 --branch v${ZSTD_VERSION} https://github.com/facebook/zstd.git +WORKDIR zstd +RUN make lib-release && strip --strip-unneeded lib/libzstd.so.${ZSTD_VERSION} + +ENV ZSTD_VERSION=${ZSTD_VERSION} + +CMD cat lib/libzstd.so.${ZSTD_VERSION} diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java index f6e3578811688..3004494262e6b 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/KeyStoreWrapperTests.java @@ -457,6 +457,26 @@ public void testLegacyV3() throws GeneralSecurityException, IOException { assertThat(toByteArray(wrapper.getFile("file_setting")), equalTo("file_value".getBytes(StandardCharsets.UTF_8))); } + public void testLegacyV5() throws GeneralSecurityException, IOException { + final Path configDir = createTempDir(); + final Path keystore = configDir.resolve("elasticsearch.keystore"); + try ( + InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v5-with-password-elasticsearch.keystore"); + OutputStream os = Files.newOutputStream(keystore) + ) { + final byte[] buffer = new byte[4096]; + int readBytes; + while ((readBytes = is.read(buffer)) > 0) { + os.write(buffer, 0, readBytes); + } + } + final KeyStoreWrapper wrapper = KeyStoreWrapper.load(configDir); + assertNotNull(wrapper); + wrapper.decrypt("keystorepassword".toCharArray()); + assertThat(wrapper.getFormatVersion(), equalTo(5)); + assertThat(wrapper.getSettingNames(), equalTo(Set.of("keystore.seed"))); + } + public void testSerializationNewlyCreated() throws Exception { final KeyStoreWrapper wrapper = KeyStoreWrapper.create(); wrapper.setString("string_setting", "string_value".toCharArray()); diff --git a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java index ae19fa0b94b83..979b118a887e5 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/UpgradeKeyStoreCommandTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cli.ProcessInfo; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.core.Nullable; import org.elasticsearch.env.Environment; import java.io.InputStream; @@ -46,8 +47,20 @@ public void testKeystoreUpgradeV4() throws Exception { assertKeystoreUpgrade("/format-v4-elasticsearch.keystore", KeyStoreWrapper.V4_VERSION); } + public void testKeystoreUpgradeV5() throws Exception { + assertKeystoreUpgradeWithPassword("/format-v5-with-password-elasticsearch.keystore", KeyStoreWrapper.LE_VERSION); + } + private void assertKeystoreUpgrade(String file, int version) throws Exception { assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm()); + assertKeystoreUpgrade(file, version, null); + } + + private void assertKeystoreUpgradeWithPassword(String file, int version) throws Exception { + assertKeystoreUpgrade(file, version, "keystorepassword"); + } + + private void assertKeystoreUpgrade(String file, int version, @Nullable String password) throws Exception { final Path keystore = KeyStoreWrapper.keystorePath(env.configFile()); try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream(file); OutputStream os = Files.newOutputStream(keystore)) { is.transferTo(os); @@ -56,11 +69,17 @@ private void assertKeystoreUpgrade(String file, int version) throws Exception { assertNotNull(beforeUpgrade); assertThat(beforeUpgrade.getFormatVersion(), equalTo(version)); } + if (password != null) { + terminal.addSecretInput(password); + terminal.addSecretInput(password); + } execute(); + terminal.reset(); + try (KeyStoreWrapper afterUpgrade = KeyStoreWrapper.load(env.configFile())) { assertNotNull(afterUpgrade); assertThat(afterUpgrade.getFormatVersion(), equalTo(KeyStoreWrapper.CURRENT_VERSION)); - afterUpgrade.decrypt(new char[0]); + afterUpgrade.decrypt(password != null ? password.toCharArray() : new char[0]); assertThat(afterUpgrade.getSettingNames(), hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); } } @@ -69,5 +88,4 @@ public void testKeystoreDoesNotExist() { final UserException e = expectThrows(UserException.class, this::execute); assertThat(e, hasToString(containsString("keystore not found at [" + KeyStoreWrapper.keystorePath(env.configFile()) + "]"))); } - } diff --git a/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore b/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore new file mode 100644 index 0000000000000..0547db46eb1ef Binary files /dev/null and b/distribution/tools/keystore-cli/src/test/resources/format-v5-with-password-elasticsearch.keystore differ diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java index 3972095a3a5c0..fa948572e7675 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java @@ -90,6 +90,15 @@ public synchronized void stop() { waitFor(); // ignore exit code, we are already shutting down } + /** + * Stop the subprocess, sending a SIGKILL. + */ + public void forceStop() { + assert detached == false; + jvmProcess.destroyForcibly(); + waitFor(); + } + private void sendShutdownMarker() { try { OutputStream os = jvmProcess.getOutputStream(); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index 0e95021a3af7e..04079284b3ec9 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -73,6 +73,7 @@ static List systemJvmOptions(Settings nodeSettings, final Map sysprops) { } String archname = sysprops.get("os.arch"); String arch; - if (archname.equals("amd64")) { + if (archname.equals("amd64") || archname.equals("x86_64")) { arch = "x64"; } else if (archname.equals("aarch64")) { arch = archname; diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmOptionsParserTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmOptionsParserTests.java index c24623c75b5c2..87b7894a9135a 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmOptionsParserTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmOptionsParserTests.java @@ -394,9 +394,12 @@ public void testCommandLineDistributionType() { public void testLibraryPath() { assertLibraryPath("Mac OS", "aarch64", "darwin-aarch64"); assertLibraryPath("Mac OS", "amd64", "darwin-x64"); + assertLibraryPath("Mac OS", "x86_64", "darwin-x64"); assertLibraryPath("Linux", "aarch64", "linux-aarch64"); assertLibraryPath("Linux", "amd64", "linux-x64"); + assertLibraryPath("Linux", "x86_64", "linux-x64"); assertLibraryPath("Windows", "amd64", "windows-x64"); + assertLibraryPath("Windows", "x86_64", "windows-x64"); assertLibraryPath("Unknown", "aarch64", "unsupported_os[Unknown]-aarch64"); assertLibraryPath("Mac OS", "Unknown", "darwin-unsupported_arch[Unknown]"); } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index 6497b3ed8a432..b9f2eb73b30b5 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -37,6 +37,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.CancellationException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -66,6 +67,7 @@ public class ServerProcessTests extends ESTestCase { Settings.Builder nodeSettings; ProcessValidator processValidator; MainMethod mainCallback; + Runnable forceStopCallback; MockElasticsearchProcess process; SecureSettings secrets; @@ -93,6 +95,7 @@ public void resetEnv() { nodeSettings = Settings.builder(); processValidator = null; mainCallback = null; + forceStopCallback = null; secrets = KeyStoreWrapper.create(); } @@ -162,6 +165,8 @@ public int waitFor() throws InterruptedException { main.get(); } catch (ExecutionException e) { throw new AssertionError(e); + } catch (CancellationException e) { + return 137; // process killed } if (processException.get() != null) { throw new AssertionError("Process failed", processException.get()); @@ -187,6 +192,8 @@ public void destroy() { public Process destroyForcibly() { main.cancel(true); + IOUtils.closeWhileHandlingException(stdin, stderr); + forceStopCallback.run(); return this; } } @@ -361,6 +368,22 @@ public void testStop() throws Exception { assertThat(terminal.getErrorOutput(), containsString("final message")); } + public void testForceStop() throws Exception { + CountDownLatch blockMain = new CountDownLatch(1); + CountDownLatch inMain = new CountDownLatch(1); + mainCallback = (args, stdin, stderr, exitCode) -> { + stderr.println(SERVER_READY_MARKER); + inMain.countDown(); + nonInterruptibleVoid(blockMain::await); + }; + var server = startProcess(false, false); + nonInterruptibleVoid(inMain::await); + forceStopCallback = blockMain::countDown; + server.forceStop(); + + assertThat(process.main.isCancelled(), is(true)); // stop should have waited + } + public void testWaitFor() throws Exception { CountDownLatch mainReady = new CountDownLatch(1); mainCallback = (args, stdin, stderr, exitCode) -> { diff --git a/docs/changelog/103374.yaml b/docs/changelog/103374.yaml new file mode 100644 index 0000000000000..fcdee9185eb92 --- /dev/null +++ b/docs/changelog/103374.yaml @@ -0,0 +1,16 @@ +pr: 103374 +summary: Cut over stored fields to ZSTD for compression +area: Search +type: enhancement +issues: [] +highlight: + title: Stored fields are now compressed with ZStandard instead of LZ4/DEFLATE + body: |- + Stored fields are now compressed by splitting documents into blocks, which + are then compressed independently with ZStandard. `index.codec: default` + (default) uses blocks of at most 14kB or 128 documents compressed with level + 0, while `index.codec: best_compression` uses blocks of at most 240kB or + 2048 documents compressed at level 3. On most datasets that we tested + against, this yielded storage improvements in the order of 10%, slightly + faster indexing and similar retrieval latencies. + notable: true diff --git a/docs/changelog/106077.yaml b/docs/changelog/106077.yaml new file mode 100644 index 0000000000000..eb987cd9617f8 --- /dev/null +++ b/docs/changelog/106077.yaml @@ -0,0 +1,7 @@ +pr: 106077 +summary: Fix merging component templates with a mix of dotted and nested object mapper + definitions +area: Mapping +type: bug +issues: + - 105482 diff --git a/docs/changelog/106133.yaml b/docs/changelog/106133.yaml new file mode 100644 index 0000000000000..fe71992a0f4f0 --- /dev/null +++ b/docs/changelog/106133.yaml @@ -0,0 +1,5 @@ +pr: 106133 +summary: Add an optimised vector distance function for aarch64 +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/106247.yaml b/docs/changelog/106247.yaml deleted file mode 100644 index 5895dffd685a4..0000000000000 --- a/docs/changelog/106247.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106247 -summary: Fix a downsample persistent task assignment bug -area: Downsampling -type: bug -issues: [] diff --git a/docs/changelog/106378.yaml b/docs/changelog/106378.yaml new file mode 100644 index 0000000000000..b54760553d184 --- /dev/null +++ b/docs/changelog/106378.yaml @@ -0,0 +1,5 @@ +pr: 106378 +summary: Add Cohere rerank to `_inference` service +area: Machine Learning +type: feature +issues: [] diff --git a/docs/changelog/106514.yaml b/docs/changelog/106514.yaml new file mode 100644 index 0000000000000..5b25f40db2742 --- /dev/null +++ b/docs/changelog/106514.yaml @@ -0,0 +1,6 @@ +pr: 106514 +summary: Add granular error list to alias action response +area: Indices APIs +type: feature +issues: + - 94478 diff --git a/docs/changelog/106579.yaml b/docs/changelog/106579.yaml new file mode 100644 index 0000000000000..104ed3066a6f6 --- /dev/null +++ b/docs/changelog/106579.yaml @@ -0,0 +1,5 @@ +pr: 106579 +summary: "ESQL: Allow grouping key inside stats expressions" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/106720.yaml b/docs/changelog/106720.yaml new file mode 100644 index 0000000000000..93358ed1d3dff --- /dev/null +++ b/docs/changelog/106720.yaml @@ -0,0 +1,5 @@ +pr: 106720 +summary: "ESQL: Fix treating all fields as MV in COUNT pushdown" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/106796.yaml b/docs/changelog/106796.yaml new file mode 100644 index 0000000000000..83eb99dba1603 --- /dev/null +++ b/docs/changelog/106796.yaml @@ -0,0 +1,5 @@ +pr: 106796 +summary: Bulk loading enrich fields in ESQL +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/106824.yaml b/docs/changelog/106824.yaml new file mode 100644 index 0000000000000..0a2001df5039a --- /dev/null +++ b/docs/changelog/106824.yaml @@ -0,0 +1,5 @@ +pr: 106824 +summary: "ESQL: Introduce language versioning to REST API" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/106851.yaml b/docs/changelog/106851.yaml new file mode 100644 index 0000000000000..2ada6a6a4e088 --- /dev/null +++ b/docs/changelog/106851.yaml @@ -0,0 +1,5 @@ +pr: 106851 +summary: Catching `StackOverflowErrors` from bad regexes in `GsubProcessor` +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/106860.yaml b/docs/changelog/106860.yaml new file mode 100644 index 0000000000000..376f8753023b9 --- /dev/null +++ b/docs/changelog/106860.yaml @@ -0,0 +1,5 @@ +pr: 106860 +summary: "[Profiling] Add TopN Functions API" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/106866.yaml b/docs/changelog/106866.yaml new file mode 100644 index 0000000000000..ffc34e5962850 --- /dev/null +++ b/docs/changelog/106866.yaml @@ -0,0 +1,5 @@ +pr: 106866 +summary: Add ES|QL signum function +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/106873.yaml b/docs/changelog/106873.yaml deleted file mode 100644 index f823caff7aefe..0000000000000 --- a/docs/changelog/106873.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106873 -summary: Query API Key Information API support for the `typed_keys` request parameter -area: Security -type: enhancement -issues: - - 106817 diff --git a/docs/changelog/106899.yaml b/docs/changelog/106899.yaml new file mode 100644 index 0000000000000..a2db24236a47e --- /dev/null +++ b/docs/changelog/106899.yaml @@ -0,0 +1,6 @@ +pr: 106899 +summary: Add ES|QL Locate function +area: ES|QL +type: enhancement +issues: + - 106818 diff --git a/docs/changelog/106975.yaml b/docs/changelog/106975.yaml new file mode 100644 index 0000000000000..bd32b3574c4f9 --- /dev/null +++ b/docs/changelog/106975.yaml @@ -0,0 +1,5 @@ +pr: 106975 +summary: GET /_all should return hidden indices with visible aliases +area: Indices APIs +type: bug +issues: [] diff --git a/docs/changelog/106989.yaml b/docs/changelog/106989.yaml new file mode 100644 index 0000000000000..47df5fe5b47d7 --- /dev/null +++ b/docs/changelog/106989.yaml @@ -0,0 +1,7 @@ +pr: 106989 +summary: Make force-stopping the transform always remove persistent task from cluster + state +area: Transform +type: bug +issues: + - 106811 diff --git a/docs/changelog/107007.yaml b/docs/changelog/107007.yaml new file mode 100644 index 0000000000000..b2a755171725b --- /dev/null +++ b/docs/changelog/107007.yaml @@ -0,0 +1,5 @@ +pr: 107007 +summary: "ESQL: Support ST_DISJOINT" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/107041.yaml b/docs/changelog/107041.yaml new file mode 100644 index 0000000000000..b8b4f3d7c5690 --- /dev/null +++ b/docs/changelog/107041.yaml @@ -0,0 +1,6 @@ +pr: 107041 +summary: '`DocumentParsingObserver` to accept an `indexName` to allow skipping system + indices' +area: Infra/Metrics +type: enhancement +issues: [] diff --git a/docs/changelog/107050.yaml b/docs/changelog/107050.yaml new file mode 100644 index 0000000000000..ecb375967ae44 --- /dev/null +++ b/docs/changelog/107050.yaml @@ -0,0 +1,5 @@ +pr: 107050 +summary: Fix support for infinite `?master_timeout` +area: Cluster Coordination +type: bug +issues: [] diff --git a/docs/changelog/107054.yaml b/docs/changelog/107054.yaml deleted file mode 100644 index 6511cb5185492..0000000000000 --- a/docs/changelog/107054.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 107054 -summary: Query API Keys support for both `aggs` and `aggregations` keywords -area: Security -type: enhancement -issues: - - 106839 diff --git a/docs/changelog/107107.yaml b/docs/changelog/107107.yaml new file mode 100644 index 0000000000000..5ca611befeb5d --- /dev/null +++ b/docs/changelog/107107.yaml @@ -0,0 +1,5 @@ +pr: 107107 +summary: Increase KDF iteration count in `KeyStoreWrapper` +area: Infra/CLI +type: enhancement +issues: [] diff --git a/docs/changelog/107122.yaml b/docs/changelog/107122.yaml new file mode 100644 index 0000000000000..e227bfd45b939 --- /dev/null +++ b/docs/changelog/107122.yaml @@ -0,0 +1,5 @@ +pr: 107122 +summary: Avoid unintentionally clearing the `DataStream.rolloverOnWrite` flag +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/107129.yaml b/docs/changelog/107129.yaml new file mode 100644 index 0000000000000..6c9b9094962c1 --- /dev/null +++ b/docs/changelog/107129.yaml @@ -0,0 +1,5 @@ +pr: 107129 +summary: Track ongoing search tasks +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/107131.yaml b/docs/changelog/107131.yaml new file mode 100644 index 0000000000000..ebb696931777b --- /dev/null +++ b/docs/changelog/107131.yaml @@ -0,0 +1,6 @@ +pr: 107131 +summary: "ESQL: Fix bug when combining projections" +area: ES|QL +type: bug +issues: + - 107083 diff --git a/docs/changelog/107158.yaml b/docs/changelog/107158.yaml new file mode 100644 index 0000000000000..9589fe7e7264b --- /dev/null +++ b/docs/changelog/107158.yaml @@ -0,0 +1,5 @@ +pr: 107158 +summary: "ESQL: allow sorting by expressions and not only regular fields" +area: ES|QL +type: feature +issues: [] diff --git a/docs/changelog/107183.yaml b/docs/changelog/107183.yaml new file mode 100644 index 0000000000000..226d036456858 --- /dev/null +++ b/docs/changelog/107183.yaml @@ -0,0 +1,5 @@ +pr: 107183 +summary: ES|QL fix no-length substring with supplementary (4-byte) character +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/107196.yaml b/docs/changelog/107196.yaml new file mode 100644 index 0000000000000..9892ccf71856f --- /dev/null +++ b/docs/changelog/107196.yaml @@ -0,0 +1,5 @@ +pr: 107196 +summary: Add metric for calculating index flush time excluding waiting on locks +area: Engine +type: enhancement +issues: [] diff --git a/docs/changelog/107224.yaml b/docs/changelog/107224.yaml new file mode 100644 index 0000000000000..b0d40c09b758a --- /dev/null +++ b/docs/changelog/107224.yaml @@ -0,0 +1,6 @@ +pr: 107224 +summary: "Enable 'encoder' and 'tags_schema' highlighting settings at field level" +area: Highlighting +type: enhancement +issues: + - 94028 diff --git a/docs/changelog/107232.yaml b/docs/changelog/107232.yaml new file mode 100644 index 0000000000000..1422848cb1c91 --- /dev/null +++ b/docs/changelog/107232.yaml @@ -0,0 +1,6 @@ +pr: 107232 +summary: Only trigger action once per thread +area: Transform +type: bug +issues: + - 107215 diff --git a/docs/changelog/107242.yaml b/docs/changelog/107242.yaml new file mode 100644 index 0000000000000..4a5e9821a1fa9 --- /dev/null +++ b/docs/changelog/107242.yaml @@ -0,0 +1,5 @@ +pr: 107242 +summary: Added a timeout parameter to the inference API +area: Machine Learning +type: enhancement +issues: [ ] diff --git a/docs/changelog/107253.yaml b/docs/changelog/107253.yaml new file mode 100644 index 0000000000000..6961b59231ea3 --- /dev/null +++ b/docs/changelog/107253.yaml @@ -0,0 +1,5 @@ +pr: 107253 +summary: "[Connector API] Support cleaning up sync jobs when deleting a connector" +area: Application +type: feature +issues: [] diff --git a/docs/changelog/107287.yaml b/docs/changelog/107287.yaml new file mode 100644 index 0000000000000..791f07fd1c729 --- /dev/null +++ b/docs/changelog/107287.yaml @@ -0,0 +1,6 @@ +pr: 107287 +summary: Add support for the 'Anonymous IP' database to the geoip processor +area: Ingest Node +type: enhancement +issues: + - 90789 diff --git a/docs/changelog/107291.yaml b/docs/changelog/107291.yaml new file mode 100644 index 0000000000000..3274fb77ef8c8 --- /dev/null +++ b/docs/changelog/107291.yaml @@ -0,0 +1,6 @@ +pr: 107291 +summary: Support data streams in enrich policy indices +area: Ingest Node +type: enhancement +issues: + - 98836 diff --git a/docs/changelog/107312.yaml b/docs/changelog/107312.yaml new file mode 100644 index 0000000000000..6ecd4179596e5 --- /dev/null +++ b/docs/changelog/107312.yaml @@ -0,0 +1,5 @@ +pr: 107312 +summary: Fix NPE in ML assignment notifier +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/107377.yaml b/docs/changelog/107377.yaml new file mode 100644 index 0000000000000..b9fea61d38a0a --- /dev/null +++ b/docs/changelog/107377.yaml @@ -0,0 +1,5 @@ +pr: 107377 +summary: Add support for the 'Enterprise' database to the geoip processor +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/changelog/107432.yaml b/docs/changelog/107432.yaml new file mode 100644 index 0000000000000..c492644c5baf2 --- /dev/null +++ b/docs/changelog/107432.yaml @@ -0,0 +1,6 @@ +pr: 107432 +summary: "Percolator named queries: rewrite for matched info" +area: Percolator +type: bug +issues: + - 107176 diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 26774c7091d27..3511ec9e63b02 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -341,23 +341,24 @@ Response: rounding is also done in UTC. Use the `time_zone` parameter to indicate that bucketing should use a different time zone. -For example, if the interval is a calendar day and the time zone is -`America/New_York` then `2020-01-03T01:00:01Z` is : -# Converted to `2020-01-02T18:00:01` -# Rounded down to `2020-01-02T00:00:00` -# Then converted back to UTC to produce `2020-01-02T05:00:00:00Z` -# Finally, when the bucket is turned into a string key it is printed in - `America/New_York` so it'll display as `"2020-01-02T00:00:00"`. - -It looks like: +When you specify a time zone, the following logic is used to determine the bucket the document belongs in: [source,java] ---- bucket_key = localToUtc(Math.floor(utcToLocal(value) / interval) * interval)) ---- -You can specify time zones as an ISO 8601 UTC offset (e.g. `+01:00` or -`-08:00`) or as an IANA time zone ID, +For example, if the interval is a calendar day and the time zone is +`America/New_York`, then the date value `2020-01-03T01:00:01Z` is processed as follows: + +. Converted to EST: `2020-01-02T20:00:01` +. Rounded down to the nearest interval: `2020-01-02T00:00:00` +. Converted back to UTC: `2020-01-02T05:00:00:00Z` + +When a `key_as_string` is generated for the bucket, the key value is stored in `America/New_York` time, so it'll display as `"2020-01-02T00:00:00"`. + +You can specify time zones as an ISO 8601 UTC offset, such as `+01:00` or +`-08:00`, or as an IANA time zone ID, such as `America/Los_Angeles`. Consider the following example: @@ -618,7 +619,7 @@ For example, for `+50d` we see: -------------------------------------------------- // TESTRESPONSE[skip:no setup made for this example yet] -It is therefor always important when using `offset` with `calendar_interval` bucket sizes +It is therefore always important when using `offset` with `calendar_interval` bucket sizes to understand the consequences of using offsets larger than the interval size. More examples: @@ -633,7 +634,7 @@ but as soon as you push the start date into the second month by having an offset quarters will all start on different dates. [[date-histogram-keyed-response]] -==== Keyed Response +==== Keyed response Setting the `keyed` flag to `true` associates a unique string key with each bucket and returns the ranges as a hash rather than an array: diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index 6ddd3602e1467..5b30501ed7c9d 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -121,6 +121,77 @@ POST _aliases // TEST[s/^/PUT _data_stream\/logs-nginx.access-prod\nPUT _data_stream\/logs-my_app-default\n/] // end::alias-multiple-actions-example[] +[discrete] +[[multiple-action-results]] +=== Multiple action results + +When using multiple actions, if some succeed and some fail, a list of per-action results will be returned. + +Consider a similar action list to the previous example, but now with an alias `log-non-existing`, which does not yet exist. +In this case, the `remove` action will fail, but the `add` action will succeed. +The response will contain the list `action_results`, with a result for every requested action. + +[source,console] +---- +POST _aliases +{ + "actions": [ + { + "remove": { + "index": "index1", + "alias": "logs-non-existing" + } + }, + { + "add": { + "index": "index2", + "alias": "logs-non-existing" + } + } + ] +} +---- +// TEST[s/^/PUT \/index1\nPUT \/index2\n/] + +The API returns the following result: + +[source,console-result] +-------------------------------------------------- +{ + "acknowledged": true, + "errors": true, + "action_results": [ + { + "action": { + "type": "remove", + "indices": [ "index1" ], + "aliases": [ "logs-non-existing" ], + }, + "status": 404, + "error": { + "type": "aliases_not_found_exception", + "reason": "aliases [logs-non-existing] missing", + "resource.type": "aliases", + "resource.id": "logs-non-existing" + } + }, + { + "action": { + "type": "add", + "indices": [ "index2" ], + "aliases": [ "logs-non-existing" ], + }, + "status": 200 + } + ] +} +-------------------------------------------------- + +Allowing the action list to succeed partially may not provide the desired result. +It may be more appropriate to set `must_exist` to `true`, which will cause the entire action +list to fail if a single action fails. + + [discrete] [[add-alias-at-creation]] === Add an alias at index creation diff --git a/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc index dffb20715a676..a78148388a931 100644 --- a/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc @@ -57,10 +57,7 @@ This API deletes a configured collection of [[ccr-delete-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-delete-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc index bcb0406fd43ba..4fa85d6ee638e 100644 --- a/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc @@ -75,10 +75,7 @@ This API will return the specified auto-follow pattern collection. [[ccr-get-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-get-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc index 0ca8a8de6e572..ed0f242640698 100644 --- a/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc @@ -43,10 +43,7 @@ meantime. [[ccr-pause-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-pause-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc index f38454a1ad024..4b5ff5a5eb930 100644 --- a/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc @@ -74,10 +74,7 @@ the new patterns. [[ccr-put-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-put-auto-follow-pattern-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc index 431137a6a9c18..5028b0f3d4775 100644 --- a/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc @@ -38,10 +38,7 @@ have been deleted or closed in the meantime. [[ccr-resume-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-resume-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc index 26bb9ac50d2e3..fd3d24e41be59 100644 --- a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc @@ -52,10 +52,7 @@ replication options and whether the follower indices are active or paused. [[ccr-get-follow-info-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[ccr-get-follow-info-response-body]] diff --git a/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc b/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc index b0a6752358cf7..58d5fbb03fa03 100644 --- a/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc @@ -56,10 +56,7 @@ following task. [[ccr-post-pause-follow-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-post-pause-follow-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc b/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc index 1f1996837cd8e..b762f049bde62 100644 --- a/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc @@ -69,10 +69,7 @@ returns, the follower index will resume fetching operations from the leader inde [[ccr-post-resume-follow-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-post-resume-follow-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc index 0851340a4da08..e8ca3526bbc88 100644 --- a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc @@ -63,10 +63,7 @@ irreversible operation. [[ccr-post-unfollow-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-post-unfollow-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/put-follow.asciidoc b/docs/reference/ccr/apis/follow/put-follow.asciidoc index a7fdfc5ae96c9..11711432437ec 100644 --- a/docs/reference/ccr/apis/follow/put-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/put-follow.asciidoc @@ -65,11 +65,7 @@ referenced leader index. When this API returns, the follower index exists, and follower shard requires transferring all the remote Lucene segment files to the follower index. -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. - +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-put-follow-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc index 69836aec910df..2917e3f86372b 100644 --- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc +++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc @@ -56,10 +56,7 @@ shard-level stats as in the <>. `timeout`:: (Optional, time) Controls the amount of time to wait for results. Defaults to unlimited. -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[ccr-get-stats-response-body]] diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index c008b074acccd..07328ba98bcec 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -626,6 +626,7 @@ Total time spent performing flush operations. (integer) Total time in milliseconds spent performing flush operations. + ======= `warmer`:: diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 6d8a8f748fa0e..bdd3e166c22d6 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -1821,7 +1821,8 @@ The API returns the following response: "all_in_bytes": 0, "coordinating_rejections": 0, "primary_rejections": 0, - "replica_rejections": 0 + "replica_rejections": 0, + "primary_document_rejections": 0 }, "limit" : "0b", "limit_in_bytes": 0 diff --git a/docs/reference/data-streams/data-streams.asciidoc b/docs/reference/data-streams/data-streams.asciidoc index 307930d64c4fb..9c7137563caef 100644 --- a/docs/reference/data-streams/data-streams.asciidoc +++ b/docs/reference/data-streams/data-streams.asciidoc @@ -18,6 +18,28 @@ automate the management of these backing indices. For example, you can use hardware and delete unneeded indices. {ilm-init} can help you reduce costs and overhead as your data grows. + +[discrete] +[[should-you-use-a-data-stream]] +== Should you use a data stream? + +To determine whether you should use a data stream for your data, you should consider the format of +the data, and your expected interaction. A good candidate for using a data stream will match the +following criteria: + +* Your data contains a timestamp field, or one could be automatically generated. +* You mostly perform indexing requests, with occasional updates and deletes. +* You index documents without an `_id`, or when indexing documents with an explicit `_id` you expect first-write-wins behavior. + +For most time series data use-cases, a data stream will be a good fit. However, if you find that +your data doesn't fit into these categories (for example, if you frequently send multiple documents +using the same `_id` expecting last-write-wins), you may want to use an index alias with a write +index instead. See documentation for <> for more information. + +Keep in mind that some features such as <> and +<> require a data stream. + [discrete] [[backing-indices]] == Backing indices @@ -116,19 +138,19 @@ You should not derive any intelligence from the backing indices names. [discrete] [[data-streams-append-only]] -== Append-only +== Append-only (mostly) -Data streams are designed for use cases where existing data is rarely, -if ever, updated. You cannot send update or deletion requests for existing -documents directly to a data stream. Instead, use the +Data streams are designed for use cases where existing data is rarely updated. You cannot send +update or deletion requests for existing documents directly to a data stream. However, you can still +<> in a data stream by submitting +requests directly to the document's backing index. + +If you need to update a larger number of documents in a data stream, you can use the <> and <> APIs. -If needed, you can <> by submitting requests directly to the document's backing index. - -TIP: If you frequently update or delete existing time series data, use an index -alias with a write index instead of a data stream. See +TIP: If you frequently send multiple documents using the same `_id` expecting last-write-wins, you +may want to use an index alias with a write index instead. See <>. include::set-up-a-data-stream.asciidoc[] diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index a055c278b41d9..1a32e64cedb1f 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -414,9 +414,7 @@ This parameter is only returned for successful actions. `result`:: (string) Result of the operation. Successful values are `created`, `deleted`, and -`updated`. -+ -This parameter is only returned for successful operations. +`updated`. Other valid values are `noop` and `not_found`. `_shards`:: (object) diff --git a/docs/reference/esql/esql-functions-operators.asciidoc b/docs/reference/esql/esql-functions-operators.asciidoc index a1ad512fbe512..ddc077f3b8ff8 100644 --- a/docs/reference/esql/esql-functions-operators.asciidoc +++ b/docs/reference/esql/esql-functions-operators.asciidoc @@ -1,40 +1,71 @@ [[esql-functions-operators]] === {esql} functions and operators - ++++ Functions and operators ++++ {esql} provides a comprehensive set of functions and operators for working with data. -The functions are divided into the following categories: +The reference documentation is divided into the following categories: [[esql-functions]] -<>:: +==== Functions overview + +.*Aggregate functions* +[%collapsible] +==== include::functions/aggregation-functions.asciidoc[tag=agg_list] +==== -<>:: +.*Math functions* +[%collapsible] +==== include::functions/math-functions.asciidoc[tag=math_list] +==== -<>:: +.*String functions* +[%collapsible] +==== include::functions/string-functions.asciidoc[tag=string_list] +==== -<>:: +.*Date and time functions* +[%collapsible] +==== include::functions/date-time-functions.asciidoc[tag=date_list] +==== -<>:: +.*Spatial functions* +[%collapsible] +==== include::functions/spatial-functions.asciidoc[tag=spatial_list] +==== -<>:: +.*Type conversion functions* +[%collapsible] +==== include::functions/type-conversion-functions.asciidoc[tag=type_list] +==== -<>:: +.*Conditional functions and expressions* +[%collapsible] +==== include::functions/conditional-functions-and-expressions.asciidoc[tag=cond_list] +==== -<>:: +.*Multi value functions* +[%collapsible] +==== include::functions/mv-functions.asciidoc[tag=mv_list] +==== + +[[esql-operators-overview]] +==== Operators overview -<>:: +.*Operators* +[%collapsible] +==== include::functions/operators.asciidoc[tag=op_list] +==== include::functions/aggregation-functions.asciidoc[] include::functions/math-functions.asciidoc[] diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 29f61299cec30..421272f741602 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -240,7 +240,7 @@ include::{esql-specs}/eval.csv-spec[tag=gs-eval-stats-backticks] === Create a histogram To track statistics over time, {esql} enables you to create histograms using the -<> function. `AUTO_BUCKET` creates human-friendly bucket sizes +<> function. `BUCKET` creates human-friendly bucket sizes and returns a value for each row that corresponds to the resulting bucket the row falls into. @@ -248,22 +248,22 @@ For example, to create hourly buckets for the data on October 23rd: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket] +include::{esql-specs}/date.csv-spec[tag=gs-bucket] ---- -Combine `AUTO_BUCKET` with <> to create a histogram. For example, +Combine `BUCKET` with <> to create a histogram. For example, to count the number of events per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by] +include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by] ---- Or the median duration per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by-median] +include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by-median] ---- [discrete] diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index e4c873457b21b..77f5e79753fdd 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -1,11 +1,10 @@ [[esql-language]] -== Learning {esql} - +== {esql} reference ++++ -Learning {esql} +{esql} reference ++++ -Detailed information about the {esql} language: +Detailed reference documentation for the {esql} language: * <> * <> diff --git a/docs/reference/esql/functions/README.md b/docs/reference/esql/functions/README.md index 7be4c70fbe6b0..35b852ba060f1 100644 --- a/docs/reference/esql/functions/README.md +++ b/docs/reference/esql/functions/README.md @@ -5,6 +5,8 @@ The files in these subdirectories are generated by ESQL's test suite: * `signature` - railroad diagram of the syntax to invoke each function * `types` - a table of each combination of support type for each parameter. These are generated from tests. * `layout` - a fully generated description for each function +* `kibana/definition` - function definitions for kibana's ESQL editor +* `kibana/docs` - the inline docs for kibana Most functions can use the generated docs generated in the `layout` directory. If we need something more custom for the function we can make a file in this diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index c040e7fe01327..2fdc8582d6bfb 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -29,6 +29,6 @@ include::median.asciidoc[] include::median-absolute-deviation.asciidoc[] include::min.asciidoc[] include::percentile.asciidoc[] -include::st_centroid.asciidoc[] +include::st_centroid_agg.asciidoc[] include::sum.asciidoc[] include::values.asciidoc[] diff --git a/docs/reference/esql/functions/auto_bucket.asciidoc b/docs/reference/esql/functions/bucket.asciidoc similarity index 62% rename from docs/reference/esql/functions/auto_bucket.asciidoc rename to docs/reference/esql/functions/bucket.asciidoc index 651ac168aa83a..e436a79d0ec1e 100644 --- a/docs/reference/esql/functions/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/bucket.asciidoc @@ -1,14 +1,12 @@ [discrete] -[[esql-auto_bucket]] -=== `AUTO_BUCKET` - -experimental::[] +[[esql-bucket]] +=== `BUCKET` *Syntax* [source,esql] ---- -AUTO_BUCKET(expression, buckets, from, to) +BUCKET(expression, buckets, from, to) ---- *Parameters* @@ -28,39 +26,39 @@ End of the range. Can be a number or a date expressed as a string. *Description* Creates human-friendly buckets and returns a value for each row that corresponds -to the resulting bucket the row falls into. +to the resulting bucket the row falls into. Using a target number of buckets, a start of a range, and an end of a range, -`AUTO_BUCKET` picks an appropriate bucket size to generate the target number of +`BUCKET` picks an appropriate bucket size to generate the target number of buckets or fewer. For example, asking for at most 20 buckets over a year results in monthly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonth] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonth-result] |=== The goal isn't to provide *exactly* the target number of buckets, it's to pick a range that people are comfortable with that provides at most the target number of buckets. -Combine `AUTO_BUCKET` with +Combine `BUCKET` with <> to create a histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not create buckets that don't match any documents. +NOTE: `BUCKET` does not create buckets that don't match any documents. That's why this example is missing `1985-03-01` and other dates. Asking for more buckets can result in a smaller range. For example, asking for @@ -68,28 +66,28 @@ at most 100 buckets in a year results in weekly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram] +include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram-result] +include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not filter any rows. It only uses the provided range to +NOTE: `BUCKET` does not filter any rows. It only uses the provided range to pick a good bucket size. For rows with a value outside of the range, it returns a bucket value that corresponds to a bucket outside the range. Combine -`AUTO_BUCKET` with <> to filter rows. +`BUCKET` with <> to filter rows. -`AUTO_BUCKET` can also operate on numeric fields. For example, to create a +`BUCKET` can also operate on numeric fields. For example, to create a salary histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric] +include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric-result] +include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric-result] |=== Unlike the earlier example that intentionally filters on a date range, you @@ -104,7 +102,7 @@ per hour: [source.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsAutoBucketLast24hr] +include::{esql-specs}/date.csv-spec[tag=docsBucketLast24hr] ---- Create monthly buckets for the year 1985, and calculate the average salary by @@ -112,9 +110,9 @@ hiring month: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg] +include::{esql-specs}/date.csv-spec[tag=bucket_in_agg] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg-result] +include::{esql-specs}/date.csv-spec[tag=bucket_in_agg-result] |=== diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index e9d6628c63894..149bdffb5ef07 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -8,7 +8,7 @@ {esql} supports these date-time functions: // tag::date_list[] -* experimental:[] <> +* <> * <> * <> * <> @@ -17,10 +17,10 @@ * <> // end::date_list[] -include::auto_bucket.asciidoc[] +include::bucket.asciidoc[] include::date_diff.asciidoc[] include::date_extract.asciidoc[] include::date_format.asciidoc[] include::date_parse.asciidoc[] -include::date_trunc.asciidoc[] +include::layout/date_trunc.asciidoc[] include::now.asciidoc[] diff --git a/docs/reference/esql/functions/description/auto_bucket.asciidoc b/docs/reference/esql/functions/description/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/description/auto_bucket.asciidoc rename to docs/reference/esql/functions/description/bucket.asciidoc diff --git a/docs/reference/esql/functions/description/date_trunc.asciidoc b/docs/reference/esql/functions/description/date_trunc.asciidoc new file mode 100644 index 0000000000000..1fb874e3bd9cd --- /dev/null +++ b/docs/reference/esql/functions/description/date_trunc.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Rounds down a date to the closest interval. diff --git a/docs/reference/esql/functions/description/e.asciidoc b/docs/reference/esql/functions/description/e.asciidoc index 787de53c32ef6..dae6d770bf649 100644 --- a/docs/reference/esql/functions/description/e.asciidoc +++ b/docs/reference/esql/functions/description/e.asciidoc @@ -2,4 +2,4 @@ *Description* -Euler’s number. +Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. diff --git a/docs/reference/esql/functions/description/floor.asciidoc b/docs/reference/esql/functions/description/floor.asciidoc index 139b8b57dafb9..f4e60cac3b0c2 100644 --- a/docs/reference/esql/functions/description/floor.asciidoc +++ b/docs/reference/esql/functions/description/floor.asciidoc @@ -3,3 +3,7 @@ *Description* Round a number down to the nearest integer. + +NOTE: This is a noop for `long` (including unsigned) and `integer`. +For `double` this picks the closest `double` value to the integer +similar to {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. diff --git a/docs/reference/esql/functions/description/locate.asciidoc b/docs/reference/esql/functions/description/locate.asciidoc new file mode 100644 index 0000000000000..60a6d435e37b6 --- /dev/null +++ b/docs/reference/esql/functions/description/locate.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns an integer that indicates the position of a keyword substring within another string diff --git a/docs/reference/esql/functions/description/pi.asciidoc b/docs/reference/esql/functions/description/pi.asciidoc index c2b9b737126e1..90930678ded9f 100644 --- a/docs/reference/esql/functions/description/pi.asciidoc +++ b/docs/reference/esql/functions/description/pi.asciidoc @@ -2,4 +2,4 @@ *Description* -The ratio of a circle’s circumference to its diameter. +Returns the {wikipedia}/Pi[ratio] of a circle's circumference to its diameter. diff --git a/docs/reference/esql/functions/description/pow.asciidoc b/docs/reference/esql/functions/description/pow.asciidoc index fd05421eae005..7e6da7bab2e34 100644 --- a/docs/reference/esql/functions/description/pow.asciidoc +++ b/docs/reference/esql/functions/description/pow.asciidoc @@ -2,4 +2,6 @@ *Description* -Returns the value of a base raised to the power of an exponent. +Returns the value of `base` raised to the power of `exponent`. + +NOTE: It is still possible to overflow a double result here; in that case, null will be returned. diff --git a/docs/reference/esql/functions/description/round.asciidoc b/docs/reference/esql/functions/description/round.asciidoc index 3dfec1b30565d..aebed74b88485 100644 --- a/docs/reference/esql/functions/description/round.asciidoc +++ b/docs/reference/esql/functions/description/round.asciidoc @@ -2,4 +2,4 @@ *Description* -Rounds a number to the closest number with the specified number of digits. +Rounds a number to the closest number with the specified number of digits. Defaults to 0 digits if no number of digits is provided. If the specified number of digits is negative, rounds to the number of digits left of the decimal point. diff --git a/docs/reference/esql/functions/description/signum.asciidoc b/docs/reference/esql/functions/description/signum.asciidoc new file mode 100644 index 0000000000000..db44c019e247e --- /dev/null +++ b/docs/reference/esql/functions/description/signum.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns the sign of the given number. It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc index ed79fe3d9c1f3..678fde7f5d98b 100644 --- a/docs/reference/esql/functions/description/st_contains.asciidoc +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the first geometry contains the second geometry. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_disjoint.asciidoc b/docs/reference/esql/functions/description/st_disjoint.asciidoc new file mode 100644 index 0000000000000..95ab02a39614a --- /dev/null +++ b/docs/reference/esql/functions/description/st_disjoint.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the two geometries or geometry columns are disjoint. diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index 3a36d79cbd123..b736ba29a6c8b 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the two geometries or geometry columns intersect. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc index be52db3f694bf..890f28cb769b0 100644 --- a/docs/reference/esql/functions/description/st_within.asciidoc +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -3,5 +3,3 @@ *Description* Returns whether the first geometry is within the second geometry. - -NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/date_trunc.asciidoc b/docs/reference/esql/functions/examples/date_trunc.asciidoc similarity index 68% rename from docs/reference/esql/functions/date_trunc.asciidoc rename to docs/reference/esql/functions/examples/date_trunc.asciidoc index 4aa228dc14e65..d7cece9aff58b 100644 --- a/docs/reference/esql/functions/date_trunc.asciidoc +++ b/docs/reference/esql/functions/examples/date_trunc.asciidoc @@ -1,26 +1,4 @@ -[discrete] -[[esql-date_trunc]] -=== `DATE_TRUNC` - -*Syntax* - -[source,esql] ----- -DATE_TRUNC(interval, date) ----- - -*Parameters* - -`interval`:: -Interval, expressed using the <>. If `null`, the function returns `null`. - -`date`:: -Date expression. If `null`, the function returns `null`. - -*Description* - -Rounds down a date to the closest interval. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Examples* @@ -32,10 +10,8 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] |=== - Combine `DATE_TRUNC` with <> to create date histograms. For example, the number of hires per year: - [source.merge.styled,esql] ---- include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] @@ -44,9 +20,7 @@ include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] |=== include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram-result] |=== - Or an hourly error rate: - [source.merge.styled,esql] ---- include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] @@ -55,3 +29,4 @@ include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] |=== include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] |=== + diff --git a/docs/reference/esql/functions/e.asciidoc b/docs/reference/esql/functions/examples/e.asciidoc similarity index 51% rename from docs/reference/esql/functions/e.asciidoc rename to docs/reference/esql/functions/examples/e.asciidoc index ac082c1a68a07..509f6b17f53e8 100644 --- a/docs/reference/esql/functions/e.asciidoc +++ b/docs/reference/esql/functions/examples/e.asciidoc @@ -1,15 +1,4 @@ -[discrete] -[[esql-e]] -=== `E` - -*Syntax* - -[.text-center] -image::esql/functions/signature/e.svg[Embedded,opts=inline] - -*Description* - -Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Example* @@ -21,3 +10,4 @@ include::{esql-specs}/math.csv-spec[tag=e] |=== include::{esql-specs}/math.csv-spec[tag=e-result] |=== + diff --git a/docs/reference/esql/functions/examples/floor.asciidoc b/docs/reference/esql/functions/examples/floor.asciidoc new file mode 100644 index 0000000000000..ef77c0aa8e346 --- /dev/null +++ b/docs/reference/esql/functions/examples/floor.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=floor] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=floor-result] +|=== + diff --git a/docs/reference/esql/functions/examples/pi.asciidoc b/docs/reference/esql/functions/examples/pi.asciidoc new file mode 100644 index 0000000000000..d15c460993e1b --- /dev/null +++ b/docs/reference/esql/functions/examples/pi.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=pi] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=pi-result] +|=== + diff --git a/docs/reference/esql/functions/pow.asciidoc b/docs/reference/esql/functions/examples/pow.asciidoc similarity index 50% rename from docs/reference/esql/functions/pow.asciidoc rename to docs/reference/esql/functions/examples/pow.asciidoc index 6618b728d7da9..5c24a043e606b 100644 --- a/docs/reference/esql/functions/pow.asciidoc +++ b/docs/reference/esql/functions/examples/pow.asciidoc @@ -1,27 +1,4 @@ -[discrete] -[[esql-pow]] -=== `POW` - -*Syntax* - -[.text-center] -image::esql/functions/signature/pow.svg[Embedded,opts=inline] - -*Parameters* - -`base`:: -Numeric expression. If `null`, the function returns `null`. - -`exponent`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the value of `base` raised to the power of `exponent`. Both arguments -must be numeric. The output is always a double. Note that it is still possible -to overflow a double result here; in that case, null will be returned. - -include::types/pow.asciidoc[] +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Examples* @@ -33,10 +10,8 @@ include::{esql-specs}/math.csv-spec[tag=powDI] |=== include::{esql-specs}/math.csv-spec[tag=powDI-result] |=== - The exponent can be a fraction, which is similar to performing a root. For example, the exponent of `0.5` will give the square root of the base: - [source.merge.styled,esql] ---- include::{esql-specs}/math.csv-spec[tag=powID-sqrt] @@ -45,3 +20,4 @@ include::{esql-specs}/math.csv-spec[tag=powID-sqrt] |=== include::{esql-specs}/math.csv-spec[tag=powID-sqrt-result] |=== + diff --git a/docs/reference/esql/functions/examples/round.asciidoc b/docs/reference/esql/functions/examples/round.asciidoc new file mode 100644 index 0000000000000..c69eefccc6c95 --- /dev/null +++ b/docs/reference/esql/functions/examples/round.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=round] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=round-result] +|=== + diff --git a/docs/reference/esql/functions/examples/signum.asciidoc b/docs/reference/esql/functions/examples/signum.asciidoc new file mode 100644 index 0000000000000..190c1d0f71136 --- /dev/null +++ b/docs/reference/esql/functions/examples/signum.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=signum] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=signum-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_disjoint.asciidoc b/docs/reference/esql/functions/examples/st_disjoint.asciidoc new file mode 100644 index 0000000000000..192553e528a24 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_disjoint.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_disjoint-airport_city_boundaries] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_disjoint-airport_city_boundaries-result] +|=== + diff --git a/docs/reference/esql/functions/floor.asciidoc b/docs/reference/esql/functions/floor.asciidoc deleted file mode 100644 index 69d8657d008b2..0000000000000 --- a/docs/reference/esql/functions/floor.asciidoc +++ /dev/null @@ -1,34 +0,0 @@ -[discrete] -[[esql-floor]] -=== `FLOOR` - -*Syntax* - -[.text-center] -image::esql/functions/signature/floor.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Rounds a number down to the nearest integer. - -NOTE: This is a noop for `long` (including unsigned) and `integer`. - For `double` this picks the closest `double` value to the integer - similar to {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. - -include::types/floor.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=floor] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=floor-result] -|=== diff --git a/docs/reference/esql/functions/kibana/definition/abs.json b/docs/reference/esql/functions/kibana/definition/abs.json new file mode 100644 index 0000000000000..82c3c205d7512 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/abs.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "abs", + "description" : "Returns the absolute value.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW number = -1.0 \n| EVAL abs_number = ABS(number)", + "FROM employees\n| KEEP first_name, last_name, height\n| EVAL abs_height = ABS(0.0 - height)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/acos.json b/docs/reference/esql/functions/kibana/definition/acos.json new file mode 100644 index 0000000000000..6a6ab59278639 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/acos.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "acos", + "description" : "Returns the arccosine of `n` as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=.9\n| EVAL acos=ACOS(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/asin.json b/docs/reference/esql/functions/kibana/definition/asin.json new file mode 100644 index 0000000000000..f5ebb817fff33 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/asin.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "asin", + "description" : "Returns the arcsine of the input\nnumeric expression as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Number between -1 and 1. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=.9\n| EVAL asin=ASIN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/atan.json b/docs/reference/esql/functions/kibana/definition/atan.json new file mode 100644 index 0000000000000..654a48b8ca76d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/atan.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "atan", + "description" : "Returns the arctangent of the input\nnumeric expression as an angle, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=12.9\n| EVAL atan=ATAN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/atan2.json b/docs/reference/esql/functions/kibana/definition/atan2.json new file mode 100644 index 0000000000000..63940831241f7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/atan2.json @@ -0,0 +1,299 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "atan2", + "description" : "The angle between the positive x-axis and the ray from the\norigin to the point (x , y) in the Cartesian plane, expressed in radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "double", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "integer", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "double", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "integer", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "y_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "y coordinate. If `null`, the function returns `null`." + }, + { + "name" : "x_coordinate", + "type" : "unsigned_long", + "optional" : false, + "description" : "x coordinate. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW y=12.9, x=.6\n| EVAL atan2=ATAN2(y, x)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json new file mode 100644 index 0000000000000..050c334ac7e6e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -0,0 +1,848 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "bucket", + "description" : "Creates human-friendly buckets and returns a datetime value\nfor each row that corresponds to the resulting bucket the row falls into.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "from", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "to", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json new file mode 100644 index 0000000000000..73bc215ac6ade --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "case", + "description" : "Accepts pairs of conditions and values.\nThe function returns the value that belongs to the first condition that evaluates to true.", + "signatures" : [ + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "trueValue", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "falseValue", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ceil.json b/docs/reference/esql/functions/kibana/definition/ceil.json new file mode 100644 index 0000000000000..b8ac9ad55f31a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ceil.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ceil", + "description" : "Round a number up to the nearest integer.", + "note" : "This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to Math.ceil.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW a=1.8\n| EVAL a=CEIL(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json new file mode 100644 index 0000000000000..87feead06d091 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -0,0 +1,161 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "coalesce", + "description" : "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "Expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "Expression to evaluate" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "Other expression to evaluate" + } + ], + "variadic" : true, + "returnType" : "text" + } + ], + "examples" : [ + "ROW a=null, b=\"b\"\n| EVAL COALESCE(a, b)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/concat.json b/docs/reference/esql/functions/kibana/definition/concat.json new file mode 100644 index 0000000000000..bb1b84f67aff9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/concat.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "concat", + "description" : "Concatenates two or more strings.", + "signatures" : [ + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/cos.json b/docs/reference/esql/functions/kibana/definition/cos.json new file mode 100644 index 0000000000000..c7757fbd4071d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/cos.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "cos", + "description" : "Returns the cosine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL cos=COS(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/cosh.json b/docs/reference/esql/functions/kibana/definition/cosh.json new file mode 100644 index 0000000000000..a34eee15be37e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/cosh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "cosh", + "description" : "Returns the hyperbolic cosine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL cosh=COSH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_diff.json b/docs/reference/esql/functions/kibana/definition/date_diff.json new file mode 100644 index 0000000000000..aa030ea163709 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_diff.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_diff", + "description" : "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument", + "signatures" : [ + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "A valid date unit" + }, + { + "name" : "startTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "A valid date unit" + }, + { + "name" : "startTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "datetime", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_extract.json b/docs/reference/esql/functions/kibana/definition/date_extract.json new file mode 100644 index 0000000000000..c5edf5ac14109 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_extract.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_extract", + "description" : "Extracts parts of a date, like year, month, day, hour.", + "signatures" : [ + { + "params" : [ + { + "name" : "datePart", + "type" : "keyword", + "optional" : false, + "description" : "Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "datePart", + "type" : "text", + "optional" : false, + "description" : "Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_format.json b/docs/reference/esql/functions/kibana/definition/date_format.json new file mode 100644 index 0000000000000..8807e5d330f84 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_format.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_format", + "description" : "Returns a string representation of a date, in the provided format.", + "signatures" : [ + { + "params" : [ + { + "name" : "dateFormat", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "dateFormat", + "type" : "text", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_parse.json b/docs/reference/esql/functions/kibana/definition/date_parse.json new file mode 100644 index 0000000000000..85bce19532020 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_parse.json @@ -0,0 +1,62 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_parse", + "description" : "Parses a string into a date value", + "signatures" : [ + { + "params" : [ + { + "name" : "datePattern", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "keyword", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "datePattern", + "type" : "keyword", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "text", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "datePattern", + "type" : "text", + "optional" : true, + "description" : "A valid date pattern" + }, + { + "name" : "dateString", + "type" : "text", + "optional" : false, + "description" : "A string representing a date" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json new file mode 100644 index 0000000000000..3d8658c496529 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -0,0 +1,49 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "date_trunc", + "description" : "Rounds down a date to the closest interval.", + "signatures" : [ + { + "params" : [ + { + "name" : "interval", + "type" : "date_period", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "interval", + "type" : "time_duration", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "datetime", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ], + "examples" : [ + "FROM employees\n| KEEP first_name, last_name, hire_date\n| EVAL year_hired = DATE_TRUNC(1 year, hire_date)", + "FROM employees\n| EVAL year = DATE_TRUNC(1 year, hire_date)\n| STATS hires = COUNT(emp_no) BY year\n| SORT year", + "FROM sample_data\n| EVAL error = CASE(message LIKE \"*error*\", 1, 0)\n| EVAL hour = DATE_TRUNC(1 hour, @timestamp)\n| STATS error_rate = AVG(error) by hour\n| SORT hour" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/e.json b/docs/reference/esql/functions/kibana/definition/e.json new file mode 100644 index 0000000000000..6bd0197797e59 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/e.json @@ -0,0 +1,15 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "e", + "description" : "Returns Euler's number.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ], + "examples" : [ + "ROW E()" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ends_with.json b/docs/reference/esql/functions/kibana/definition/ends_with.json new file mode 100644 index 0000000000000..66f4c7404905c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ends_with.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ends_with", + "description" : "Returns a boolean that indicates whether a keyword string ends with another string", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "suffix", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "suffix", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/floor.json b/docs/reference/esql/functions/kibana/definition/floor.json new file mode 100644 index 0000000000000..b61588559a928 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/floor.json @@ -0,0 +1,60 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "floor", + "description" : "Round a number down to the nearest integer.", + "note" : "This is a noop for `long` (including unsigned) and `integer`.\nFor `double` this picks the closest `double` value to the integer\nsimilar to Math.floor.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "ROW a=1.8\n| EVAL a=FLOOR(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json new file mode 100644 index 0000000000000..f72f54708c6b1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -0,0 +1,212 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "greatest", + "description" : "Returns the maximum value from many columns.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "double", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "ip", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json new file mode 100644 index 0000000000000..66efedc0c9fe5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -0,0 +1,212 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "least", + "description" : "Returns the minimum value from many columns.", + "signatures" : [ + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "boolean", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "boolean", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "first", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "double", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "first", + "type" : "ip", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "ip", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "keyword", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "text", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "first", + "type" : "version", + "optional" : false, + "description" : "" + }, + { + "name" : "rest", + "type" : "version", + "optional" : true, + "description" : "" + } + ], + "variadic" : true, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/left.json b/docs/reference/esql/functions/kibana/definition/left.json new file mode 100644 index 0000000000000..bcda92b887bb0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/left.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "left", + "description" : "Returns the substring that extracts 'length' chars from 'string' starting from the left.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "The string from which to return a substring." + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "The number of characters to return." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "The string from which to return a substring." + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "The number of characters to return." + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ], + "examples" : [ + "FROM employees\n| KEEP last_name\n| EVAL left = LEFT(last_name, 3)\n| SORT last_name ASC\n| LIMIT 5" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/length.json b/docs/reference/esql/functions/kibana/definition/length.json new file mode 100644 index 0000000000000..a42656b71d471 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/length.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "length", + "description" : "Returns the character length of a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/locate.json b/docs/reference/esql/functions/kibana/definition/locate.json new file mode 100644 index 0000000000000..9629b81820f8a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/locate.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "locate", + "description" : "Returns an integer that indicates the position of a keyword substring within another string", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "keyword", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "An input string" + }, + { + "name" : "substring", + "type" : "text", + "optional" : false, + "description" : "A substring to locate in the input string" + }, + { + "name" : "start", + "type" : "integer", + "optional" : true, + "description" : "The start index" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/log.json b/docs/reference/esql/functions/kibana/definition/log.json new file mode 100644 index 0000000000000..0edafefc4dd1a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/log.json @@ -0,0 +1,348 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "log", + "description" : "Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double.\n\nLogs of zero, negative numbers, and base of one return `null` as well as a warning.", + "signatures" : [ + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : true, + "description" : "Base of logarithm. If `null`, the function returns `null`. If not provided, this function returns the natural logarithm (base e) of a value." + }, + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW base = 2.0, value = 8.0\n| EVAL s = LOG(base, value)", + "row value = 100\n| EVAL s = LOG(value);" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/log10.json b/docs/reference/esql/functions/kibana/definition/log10.json new file mode 100644 index 0000000000000..ca506b0df33e2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/log10.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "log10", + "description" : "Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double.\n\nLogs of 0 and negative numbers return `null` as well as a warning.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW d = 1000.0 \n| EVAL s = LOG10(d)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/ltrim.json b/docs/reference/esql/functions/kibana/definition/ltrim.json new file mode 100644 index 0000000000000..bcf51f6b9e9fb --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/ltrim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "ltrim", + "description" : "Removes leading whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_avg.json b/docs/reference/esql/functions/kibana/definition/mv_avg.json new file mode 100644 index 0000000000000..2fa14f0c91d51 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_avg.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_avg", + "description" : "Converts a multivalued field into a single valued field containing the average of all of the values.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_concat.json b/docs/reference/esql/functions/kibana/definition/mv_concat.json new file mode 100644 index 0000000000000..1f6936857bcff --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_concat.json @@ -0,0 +1,80 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_concat", + "description" : "Reduce a multivalued string field to a single valued field by concatenating all values.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "values to join" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json new file mode 100644 index 0000000000000..d27821451899b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_count", + "description" : "Reduce a multivalued field to a single valued field containing the count of values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json new file mode 100644 index 0000000000000..c0f02d9febc42 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -0,0 +1,116 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_dedupe", + "description" : "Remove duplicate values from a multivalued field.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json new file mode 100644 index 0000000000000..d73b3ae002be3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_first", + "description" : "Reduce a multivalued field to a single valued field containing the first value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json new file mode 100644 index 0000000000000..0484bfa0b488b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_last", + "description" : "Reduce a multivalued field to a single valued field containing the last value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json new file mode 100644 index 0000000000000..62a6e15f3346a --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -0,0 +1,128 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_max", + "description" : "Reduce a multivalued field to a single valued field containing the maximum value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_median.json b/docs/reference/esql/functions/kibana/definition/mv_median.json new file mode 100644 index 0000000000000..a6d79f7e6f0a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_median.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_median", + "description" : "Converts a multivalued field into a single valued field containing the median value.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json new file mode 100644 index 0000000000000..8a6f485aedc57 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -0,0 +1,128 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_min", + "description" : "Reduce a multivalued field to a single valued field containing the minimum value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json new file mode 100644 index 0000000000000..6d3aa873d8d01 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -0,0 +1,320 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_slice", + "description" : "Returns a subset of the multivalued field using the start and end index values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "start index" + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "end index (included)" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json new file mode 100644 index 0000000000000..f647d51a2cfaf --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -0,0 +1,170 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_sort", + "description" : "Sorts a multivalued field in lexicographical order.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "sort order" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_sum.json b/docs/reference/esql/functions/kibana/definition/mv_sum.json new file mode 100644 index 0000000000000..25f687efed675 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_sum.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_sum", + "description" : "Converts a multivalued field into a single valued field containing the sum of all of the values.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/mv_zip.json b/docs/reference/esql/functions/kibana/definition/mv_zip.json new file mode 100644 index 0000000000000..7fabc0e56f12d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/mv_zip.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "mv_zip", + "description" : "Combines the values from two multivalued fields with a delimiter that joins them together.", + "signatures" : [ + { + "params" : [ + { + "name" : "string1", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "string2", + "type" : "keyword", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : true, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string1", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "string2", + "type" : "text", + "optional" : false, + "description" : "A multivalued field" + }, + { + "name" : "delim", + "type" : "text", + "optional" : true, + "description" : "delimiter" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/pi.json b/docs/reference/esql/functions/kibana/definition/pi.json new file mode 100644 index 0000000000000..047067f852c80 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/pi.json @@ -0,0 +1,15 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "pi", + "description" : "Returns the ratio of a circle's circumference to its diameter.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ], + "examples" : [ + "ROW PI()" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/pow.json b/docs/reference/esql/functions/kibana/definition/pow.json new file mode 100644 index 0000000000000..bef8dee142b23 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/pow.json @@ -0,0 +1,301 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "pow", + "description" : "Returns the value of `base` raised to the power of `exponent`.", + "note" : "It is still possible to overflow a double result here; in that case, null will be returned.", + "signatures" : [ + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "double", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "long", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "double", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "long", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "base", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression for the base. If `null`, the function returns `null`." + }, + { + "name" : "exponent", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression for the exponent. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW base = 2.0, exponent = 2\n| EVAL result = POW(base, exponent)", + "ROW base = 4, exponent = 0.5\n| EVAL s = POW(base, exponent)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/replace.json b/docs/reference/esql/functions/kibana/definition/replace.json new file mode 100644 index 0000000000000..cf54b296555a4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/replace.json @@ -0,0 +1,200 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "replace", + "description" : "The function substitutes in the string any match of the regular expression with the replacement string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "regex", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "newString", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/right.json b/docs/reference/esql/functions/kibana/definition/right.json new file mode 100644 index 0000000000000..58d081c3782bf --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/right.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "right", + "description" : "Return the substring that extracts length chars from the string starting from the right.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/round.json b/docs/reference/esql/functions/kibana/definition/round.json new file mode 100644 index 0000000000000..f8a0c0ce31165 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/round.json @@ -0,0 +1,113 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "round", + "description" : "Rounds a number to the closest number with the specified number of digits.\nDefaults to 0 digits if no number of digits is provided. If the specified number\nof digits is negative, rounds to the number of digits left of the decimal point.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + }, + { + "name" : "decimals", + "type" : "integer", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + }, + { + "name" : "decimals", + "type" : "integer", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + }, + { + "name" : "decimals", + "type" : "integer", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ], + "examples" : [ + "FROM employees\n| KEEP first_name, last_name, height\n| EVAL height_ft = ROUND(height * 3.281, 1)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/rtrim.json b/docs/reference/esql/functions/kibana/definition/rtrim.json new file mode 100644 index 0000000000000..586d53a3f84da --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/rtrim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "rtrim", + "description" : "Removes trailing whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/signum.json b/docs/reference/esql/functions/kibana/definition/signum.json new file mode 100644 index 0000000000000..b8343283f457e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/signum.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "signum", + "description" : "Returns the sign of the given number.\nIt returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "Numeric expression. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW d = 100.0\n| EVAL s = SIGNUM(d)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sin.json b/docs/reference/esql/functions/kibana/definition/sin.json new file mode 100644 index 0000000000000..8d092bd0c15a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sin.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sin", + "description" : "Returns ths Sine trigonometric function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL sin=SIN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sinh.json b/docs/reference/esql/functions/kibana/definition/sinh.json new file mode 100644 index 0000000000000..2261b18134f6c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sinh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sinh", + "description" : "Returns the hyperbolic sine of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL sinh=SINH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/split.json b/docs/reference/esql/functions/kibana/definition/split.json new file mode 100644 index 0000000000000..b64def1b813fc --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/split.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "split", + "description" : "Split a single valued string into multiple strings.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "delim", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "delim", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/sqrt.json b/docs/reference/esql/functions/kibana/definition/sqrt.json new file mode 100644 index 0000000000000..6036fcfd113f3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/sqrt.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "sqrt", + "description" : "Returns the square root of a number.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json new file mode 100644 index 0000000000000..f4f8003917908 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_contains", + "description" : "Returns whether the first geometry contains the second geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE(\"POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json new file mode 100644 index 0000000000000..98647b63ff18f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_disjoint", + "description" : "Returns whether the two geometries or geometry columns are disjoint.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE(\"POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json new file mode 100644 index 0000000000000..ba619fe57ecf5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_intersects", + "description" : "Returns whether the two geometries or geometry columns intersect.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airports\n| WHERE ST_INTERSECTS(location, TO_GEOSHAPE(\"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\"))" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json new file mode 100644 index 0000000000000..ee98337441ab7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -0,0 +1,155 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_within", + "description" : "Returns whether the first geometry is within the second geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "cartesian_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_point", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "geomA", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + }, + { + "name" : "geomB", + "type" : "geo_shape", + "optional" : false, + "description" : "Geometry column name or variable of geometry type" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM airport_city_boundaries\n| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE(\"POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))\"))\n| KEEP abbrev, airport, region, city, city_location" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json new file mode 100644 index 0000000000000..57598b3470e11 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_x", + "description" : "Extracts the x-coordinate from a point geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "point", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "point", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json new file mode 100644 index 0000000000000..0dacaa56bb8de --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "st_y", + "description" : "Extracts the y-coordinate from a point geometry.", + "signatures" : [ + { + "params" : [ + { + "name" : "point", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "point", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/starts_with.json b/docs/reference/esql/functions/kibana/definition/starts_with.json new file mode 100644 index 0000000000000..918940d110651 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/starts_with.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "starts_with", + "description" : "Returns a boolean that indicates whether a keyword string starts with another string", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "prefix", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "prefix", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/substring.json b/docs/reference/esql/functions/kibana/definition/substring.json new file mode 100644 index 0000000000000..89c62258f4516 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/substring.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "substring", + "description" : "Returns a substring of a string, specified by a start position and an optional length", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "length", + "type" : "integer", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tan.json b/docs/reference/esql/functions/kibana/definition/tan.json new file mode 100644 index 0000000000000..7498964dc1a2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tan.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tan", + "description" : "Returns the Tangent trigonometric function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL tan=TAN(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tanh.json b/docs/reference/esql/functions/kibana/definition/tanh.json new file mode 100644 index 0000000000000..507f62d394be3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tanh.json @@ -0,0 +1,59 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tanh", + "description" : "Returns the Tangent hyperbolic function of an angle.", + "signatures" : [ + { + "params" : [ + { + "name" : "angle", + "type" : "double", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "integer", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "angle", + "type" : "unsigned_long", + "optional" : false, + "description" : "An angle, in radians. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + } + ], + "examples" : [ + "ROW a=1.8 \n| EVAL tanh=TANH(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/tau.json b/docs/reference/esql/functions/kibana/definition/tau.json new file mode 100644 index 0000000000000..6ad20f86be4de --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/tau.json @@ -0,0 +1,12 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "tau", + "description" : "The ratio of a circle’s circumference to its radius.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_boolean.json b/docs/reference/esql/functions/kibana/definition/to_boolean.json new file mode 100644 index 0000000000000..314df3f7a4ca9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_boolean.json @@ -0,0 +1,92 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_boolean", + "description" : "Converts an input value to a boolean value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json new file mode 100644 index 0000000000000..59b0c0b38f850 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianpoint.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_cartesianpoint", + "description" : "Converts an input value to a point value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json new file mode 100644 index 0000000000000..75c1f05bd7738 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_cartesianshape.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_cartesianshape", + "description" : "Converts an input value to a shape value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json new file mode 100644 index 0000000000000..e2b10e54f4a29 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -0,0 +1,92 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_datetime", + "description" : "Converts an input value to a date value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_degrees.json b/docs/reference/esql/functions/kibana/definition/to_degrees.json new file mode 100644 index 0000000000000..7652254fcebe1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_degrees.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_degrees", + "description" : "Converts a number in radians to degrees.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_double.json b/docs/reference/esql/functions/kibana/definition/to_double.json new file mode 100644 index 0000000000000..7fad85d7be129 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_double.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_double", + "description" : "Converts an input value to a double value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_geopoint.json b/docs/reference/esql/functions/kibana/definition/to_geopoint.json new file mode 100644 index 0000000000000..b8a7ca9b9a19f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_geopoint.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_geopoint", + "description" : "Converts an input value to a geo_point value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_geoshape.json b/docs/reference/esql/functions/kibana/definition/to_geoshape.json new file mode 100644 index 0000000000000..d3dee5812510c --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_geoshape.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_geoshape", + "description" : "Converts an input value to a geo_shape value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_integer.json b/docs/reference/esql/functions/kibana/definition/to_integer.json new file mode 100644 index 0000000000000..3e8a7897bda7b --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_integer.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_integer", + "description" : "Converts an input value to an integer value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "integer" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_ip.json b/docs/reference/esql/functions/kibana/definition/to_ip.json new file mode 100644 index 0000000000000..f99ef65752559 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_ip.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_ip", + "description" : "Converts an input string to an IP value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "ip" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json new file mode 100644 index 0000000000000..56fd5dc83e721 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_long", + "description" : "Converts an input value to a long value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_lower.json b/docs/reference/esql/functions/kibana/definition/to_lower.json new file mode 100644 index 0000000000000..4b3121da437ed --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_lower.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_lower", + "description" : "Returns a new string representing the input string converted to lower case.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_radians.json b/docs/reference/esql/functions/kibana/definition/to_radians.json new file mode 100644 index 0000000000000..8b8fc287318ab --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_radians.json @@ -0,0 +1,56 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_radians", + "description" : "Converts a number in degrees to radians.", + "signatures" : [ + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json new file mode 100644 index 0000000000000..bb77c68bf59e4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -0,0 +1,176 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_string", + "description" : "Converts a field into a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json new file mode 100644 index 0000000000000..923294c19ffba --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_unsigned_long.json @@ -0,0 +1,104 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_unsigned_long", + "description" : "Converts an input value to an unsigned long value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_upper.json b/docs/reference/esql/functions/kibana/definition/to_upper.json new file mode 100644 index 0000000000000..d5ecb1f47206f --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_upper.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_upper", + "description" : "Returns a new string representing the input string converted to upper case.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "The input string" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_version.json b/docs/reference/esql/functions/kibana/definition/to_version.json new file mode 100644 index 0000000000000..6076f8dfd70c0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_version.json @@ -0,0 +1,44 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_version", + "description" : "Converts an input string to a version value.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "version" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/trim.json b/docs/reference/esql/functions/kibana/definition/trim.json new file mode 100644 index 0000000000000..8e194df0eb84d --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/trim.json @@ -0,0 +1,32 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "trim", + "description" : "Removes leading and trailing whitespaces from a string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "text" + } + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/abs.md b/docs/reference/esql/functions/kibana/docs/abs.md new file mode 100644 index 0000000000000..9dc2c5c76f4f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/abs.md @@ -0,0 +1,11 @@ + + +### ABS +Returns the absolute value. + +``` +ROW number = -1.0 +| EVAL abs_number = ABS(number) +``` diff --git a/docs/reference/esql/functions/kibana/docs/acos.md b/docs/reference/esql/functions/kibana/docs/acos.md new file mode 100644 index 0000000000000..19ae2522d48b4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/acos.md @@ -0,0 +1,11 @@ + + +### ACOS +Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. + +``` +ROW a=.9 +| EVAL acos=ACOS(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/asin.md b/docs/reference/esql/functions/kibana/docs/asin.md new file mode 100644 index 0000000000000..c072ac19b5b92 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/asin.md @@ -0,0 +1,12 @@ + + +### ASIN +Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input +numeric expression as an angle, expressed in radians. + +``` +ROW a=.9 +| EVAL asin=ASIN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/atan.md b/docs/reference/esql/functions/kibana/docs/atan.md new file mode 100644 index 0000000000000..62686f2fbab2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/atan.md @@ -0,0 +1,12 @@ + + +### ATAN +Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input +numeric expression as an angle, expressed in radians. + +``` +ROW a=12.9 +| EVAL atan=ATAN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/atan2.md b/docs/reference/esql/functions/kibana/docs/atan2.md new file mode 100644 index 0000000000000..0000c532236d9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/atan2.md @@ -0,0 +1,12 @@ + + +### ATAN2 +The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the +origin to the point (x , y) in the Cartesian plane, expressed in radians. + +``` +ROW y=12.9, x=.6 +| EVAL atan2=ATAN2(y, x) +``` diff --git a/docs/reference/esql/functions/kibana/docs/bucket.md b/docs/reference/esql/functions/kibana/docs/bucket.md new file mode 100644 index 0000000000000..6ebfe7de5527d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/bucket.md @@ -0,0 +1,8 @@ + + +### BUCKET +Creates human-friendly buckets and returns a datetime value +for each row that corresponds to the resulting bucket the row falls into. + diff --git a/docs/reference/esql/functions/kibana/docs/case.md b/docs/reference/esql/functions/kibana/docs/case.md new file mode 100644 index 0000000000000..e1494a5c2af8c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/case.md @@ -0,0 +1,8 @@ + + +### CASE +Accepts pairs of conditions and values. +The function returns the value that belongs to the first condition that evaluates to true. + diff --git a/docs/reference/esql/functions/kibana/docs/ceil.md b/docs/reference/esql/functions/kibana/docs/ceil.md new file mode 100644 index 0000000000000..812b139206c35 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ceil.md @@ -0,0 +1,12 @@ + + +### CEIL +Round a number up to the nearest integer. + +``` +ROW a=1.8 +| EVAL a=CEIL(a) +``` +Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. diff --git a/docs/reference/esql/functions/kibana/docs/coalesce.md b/docs/reference/esql/functions/kibana/docs/coalesce.md new file mode 100644 index 0000000000000..89cca3f3a286a --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/coalesce.md @@ -0,0 +1,11 @@ + + +### COALESCE +Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. + +``` +ROW a=null, b="b" +| EVAL COALESCE(a, b) +``` diff --git a/docs/reference/esql/functions/kibana/docs/concat.md b/docs/reference/esql/functions/kibana/docs/concat.md new file mode 100644 index 0000000000000..9c30d978370dc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/concat.md @@ -0,0 +1,7 @@ + + +### CONCAT +Concatenates two or more strings. + diff --git a/docs/reference/esql/functions/kibana/docs/cos.md b/docs/reference/esql/functions/kibana/docs/cos.md new file mode 100644 index 0000000000000..9e8abebaddb89 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/cos.md @@ -0,0 +1,11 @@ + + +### COS +Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. + +``` +ROW a=1.8 +| EVAL cos=COS(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/cosh.md b/docs/reference/esql/functions/kibana/docs/cosh.md new file mode 100644 index 0000000000000..b8fae70ae2eed --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/cosh.md @@ -0,0 +1,11 @@ + + +### COSH +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle. + +``` +ROW a=1.8 +| EVAL cosh=COSH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/date_diff.md b/docs/reference/esql/functions/kibana/docs/date_diff.md new file mode 100644 index 0000000000000..8d33e21d2f92c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_diff.md @@ -0,0 +1,7 @@ + + +### DATE_DIFF +Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument + diff --git a/docs/reference/esql/functions/kibana/docs/date_extract.md b/docs/reference/esql/functions/kibana/docs/date_extract.md new file mode 100644 index 0000000000000..49eb2391c188e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_extract.md @@ -0,0 +1,7 @@ + + +### DATE_EXTRACT +Extracts parts of a date, like year, month, day, hour. + diff --git a/docs/reference/esql/functions/kibana/docs/date_format.md b/docs/reference/esql/functions/kibana/docs/date_format.md new file mode 100644 index 0000000000000..fbf7fcbf0cb48 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_format.md @@ -0,0 +1,7 @@ + + +### DATE_FORMAT +Returns a string representation of a date, in the provided format. + diff --git a/docs/reference/esql/functions/kibana/docs/date_parse.md b/docs/reference/esql/functions/kibana/docs/date_parse.md new file mode 100644 index 0000000000000..8cf0769c38f3b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_parse.md @@ -0,0 +1,7 @@ + + +### DATE_PARSE +Parses a string into a date value + diff --git a/docs/reference/esql/functions/kibana/docs/date_trunc.md b/docs/reference/esql/functions/kibana/docs/date_trunc.md new file mode 100644 index 0000000000000..6aa81ebbac3c3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/date_trunc.md @@ -0,0 +1,12 @@ + + +### DATE_TRUNC +Rounds down a date to the closest interval. + +``` +FROM employees +| KEEP first_name, last_name, hire_date +| EVAL year_hired = DATE_TRUNC(1 year, hire_date) +``` diff --git a/docs/reference/esql/functions/kibana/docs/e.md b/docs/reference/esql/functions/kibana/docs/e.md new file mode 100644 index 0000000000000..7700101b9229e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/e.md @@ -0,0 +1,10 @@ + + +### E +Returns Euler's number. + +``` +ROW E() +``` diff --git a/docs/reference/esql/functions/kibana/docs/ends_with.md b/docs/reference/esql/functions/kibana/docs/ends_with.md new file mode 100644 index 0000000000000..74f02c732edef --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ends_with.md @@ -0,0 +1,7 @@ + + +### ENDS_WITH +Returns a boolean that indicates whether a keyword string ends with another string + diff --git a/docs/reference/esql/functions/kibana/docs/floor.md b/docs/reference/esql/functions/kibana/docs/floor.md new file mode 100644 index 0000000000000..116edc2d15ba7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/floor.md @@ -0,0 +1,14 @@ + + +### FLOOR +Round a number down to the nearest integer. + +``` +ROW a=1.8 +| EVAL a=FLOOR(a) +``` +Note: This is a noop for `long` (including unsigned) and `integer`. +For `double` this picks the closest `double` value to the integer +similar to {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. diff --git a/docs/reference/esql/functions/kibana/docs/greatest.md b/docs/reference/esql/functions/kibana/docs/greatest.md new file mode 100644 index 0000000000000..3db0c9ed87aa5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/greatest.md @@ -0,0 +1,7 @@ + + +### GREATEST +Returns the maximum value from many columns. + diff --git a/docs/reference/esql/functions/kibana/docs/least.md b/docs/reference/esql/functions/kibana/docs/least.md new file mode 100644 index 0000000000000..ff2c19592c8e1 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/least.md @@ -0,0 +1,7 @@ + + +### LEAST +Returns the minimum value from many columns. + diff --git a/docs/reference/esql/functions/kibana/docs/left.md b/docs/reference/esql/functions/kibana/docs/left.md new file mode 100644 index 0000000000000..73b79f7976512 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/left.md @@ -0,0 +1,14 @@ + + +### LEFT +Returns the substring that extracts 'length' chars from 'string' starting from the left. + +``` +FROM employees +| KEEP last_name +| EVAL left = LEFT(last_name, 3) +| SORT last_name ASC +| LIMIT 5 +``` diff --git a/docs/reference/esql/functions/kibana/docs/length.md b/docs/reference/esql/functions/kibana/docs/length.md new file mode 100644 index 0000000000000..bb1cefd390c71 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/length.md @@ -0,0 +1,7 @@ + + +### LENGTH +Returns the character length of a string. + diff --git a/docs/reference/esql/functions/kibana/docs/locate.md b/docs/reference/esql/functions/kibana/docs/locate.md new file mode 100644 index 0000000000000..0b4d4c625c17e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/locate.md @@ -0,0 +1,7 @@ + + +### LOCATE +Returns an integer that indicates the position of a keyword substring within another string + diff --git a/docs/reference/esql/functions/kibana/docs/log.md b/docs/reference/esql/functions/kibana/docs/log.md new file mode 100644 index 0000000000000..7ac136d31f720 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/log.md @@ -0,0 +1,13 @@ + + +### LOG +Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. + +Logs of zero, negative numbers, and base of one return `null` as well as a warning. + +``` +ROW base = 2.0, value = 8.0 +| EVAL s = LOG(base, value) +``` diff --git a/docs/reference/esql/functions/kibana/docs/log10.md b/docs/reference/esql/functions/kibana/docs/log10.md new file mode 100644 index 0000000000000..23ec30643e51e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/log10.md @@ -0,0 +1,13 @@ + + +### LOG10 +Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. + +Logs of 0 and negative numbers return `null` as well as a warning. + +``` +ROW d = 1000.0 +| EVAL s = LOG10(d) +``` diff --git a/docs/reference/esql/functions/kibana/docs/ltrim.md b/docs/reference/esql/functions/kibana/docs/ltrim.md new file mode 100644 index 0000000000000..33fe7b8da1b6f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/ltrim.md @@ -0,0 +1,7 @@ + + +### LTRIM +Removes leading whitespaces from a string. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_avg.md b/docs/reference/esql/functions/kibana/docs/mv_avg.md new file mode 100644 index 0000000000000..73636e07fa6e4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_avg.md @@ -0,0 +1,7 @@ + + +### MV_AVG +Converts a multivalued field into a single valued field containing the average of all of the values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_concat.md b/docs/reference/esql/functions/kibana/docs/mv_concat.md new file mode 100644 index 0000000000000..f8092e47aaed0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_concat.md @@ -0,0 +1,7 @@ + + +### MV_CONCAT +Reduce a multivalued string field to a single valued field by concatenating all values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_count.md b/docs/reference/esql/functions/kibana/docs/mv_count.md new file mode 100644 index 0000000000000..ceea555d0d05c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_count.md @@ -0,0 +1,7 @@ + + +### MV_COUNT +Reduce a multivalued field to a single valued field containing the count of values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_dedupe.md b/docs/reference/esql/functions/kibana/docs/mv_dedupe.md new file mode 100644 index 0000000000000..6968c4dd9b3a9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_dedupe.md @@ -0,0 +1,7 @@ + + +### MV_DEDUPE +Remove duplicate values from a multivalued field. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_first.md b/docs/reference/esql/functions/kibana/docs/mv_first.md new file mode 100644 index 0000000000000..6ed8bb7570a93 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_first.md @@ -0,0 +1,7 @@ + + +### MV_FIRST +Reduce a multivalued field to a single valued field containing the first value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_last.md b/docs/reference/esql/functions/kibana/docs/mv_last.md new file mode 100644 index 0000000000000..5b68b84b4393f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_last.md @@ -0,0 +1,7 @@ + + +### MV_LAST +Reduce a multivalued field to a single valued field containing the last value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_max.md b/docs/reference/esql/functions/kibana/docs/mv_max.md new file mode 100644 index 0000000000000..acb29f7a592f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_max.md @@ -0,0 +1,7 @@ + + +### MV_MAX +Reduce a multivalued field to a single valued field containing the maximum value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_median.md b/docs/reference/esql/functions/kibana/docs/mv_median.md new file mode 100644 index 0000000000000..81de2c3b2c689 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_median.md @@ -0,0 +1,7 @@ + + +### MV_MEDIAN +Converts a multivalued field into a single valued field containing the median value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_min.md b/docs/reference/esql/functions/kibana/docs/mv_min.md new file mode 100644 index 0000000000000..637211487a972 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_min.md @@ -0,0 +1,7 @@ + + +### MV_MIN +Reduce a multivalued field to a single valued field containing the minimum value. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_slice.md b/docs/reference/esql/functions/kibana/docs/mv_slice.md new file mode 100644 index 0000000000000..7bbf36f67079d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_slice.md @@ -0,0 +1,7 @@ + + +### MV_SLICE +Returns a subset of the multivalued field using the start and end index values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_sort.md b/docs/reference/esql/functions/kibana/docs/mv_sort.md new file mode 100644 index 0000000000000..65a74d0455f4b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_sort.md @@ -0,0 +1,7 @@ + + +### MV_SORT +Sorts a multivalued field in lexicographical order. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_sum.md b/docs/reference/esql/functions/kibana/docs/mv_sum.md new file mode 100644 index 0000000000000..a2b1bfb8ac481 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_sum.md @@ -0,0 +1,7 @@ + + +### MV_SUM +Converts a multivalued field into a single valued field containing the sum of all of the values. + diff --git a/docs/reference/esql/functions/kibana/docs/mv_zip.md b/docs/reference/esql/functions/kibana/docs/mv_zip.md new file mode 100644 index 0000000000000..b6de218ecb45b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/mv_zip.md @@ -0,0 +1,7 @@ + + +### MV_ZIP +Combines the values from two multivalued fields with a delimiter that joins them together. + diff --git a/docs/reference/esql/functions/kibana/docs/pi.md b/docs/reference/esql/functions/kibana/docs/pi.md new file mode 100644 index 0000000000000..3e179cbeae5d7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/pi.md @@ -0,0 +1,10 @@ + + +### PI +Returns the {wikipedia}/Pi[ratio] of a circle's circumference to its diameter. + +``` +ROW PI() +``` diff --git a/docs/reference/esql/functions/kibana/docs/pow.md b/docs/reference/esql/functions/kibana/docs/pow.md new file mode 100644 index 0000000000000..d214504ce4b03 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/pow.md @@ -0,0 +1,12 @@ + + +### POW +Returns the value of `base` raised to the power of `exponent`. + +``` +ROW base = 2.0, exponent = 2 +| EVAL result = POW(base, exponent) +``` +Note: It is still possible to overflow a double result here; in that case, null will be returned. diff --git a/docs/reference/esql/functions/kibana/docs/replace.md b/docs/reference/esql/functions/kibana/docs/replace.md new file mode 100644 index 0000000000000..9744a9ad7244b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/replace.md @@ -0,0 +1,7 @@ + + +### REPLACE +The function substitutes in the string any match of the regular expression with the replacement string. + diff --git a/docs/reference/esql/functions/kibana/docs/right.md b/docs/reference/esql/functions/kibana/docs/right.md new file mode 100644 index 0000000000000..6e211ae079f62 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/right.md @@ -0,0 +1,7 @@ + + +### RIGHT +Return the substring that extracts length chars from the string starting from the right. + diff --git a/docs/reference/esql/functions/kibana/docs/round.md b/docs/reference/esql/functions/kibana/docs/round.md new file mode 100644 index 0000000000000..f6e7ef5314a77 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/round.md @@ -0,0 +1,14 @@ + + +### ROUND +Rounds a number to the closest number with the specified number of digits. +Defaults to 0 digits if no number of digits is provided. If the specified number +of digits is negative, rounds to the number of digits left of the decimal point. + +``` +FROM employees +| KEEP first_name, last_name, height +| EVAL height_ft = ROUND(height * 3.281, 1) +``` diff --git a/docs/reference/esql/functions/kibana/docs/rtrim.md b/docs/reference/esql/functions/kibana/docs/rtrim.md new file mode 100644 index 0000000000000..fc5636e40e804 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/rtrim.md @@ -0,0 +1,7 @@ + + +### RTRIM +Removes trailing whitespaces from a string. + diff --git a/docs/reference/esql/functions/kibana/docs/signum.md b/docs/reference/esql/functions/kibana/docs/signum.md new file mode 100644 index 0000000000000..f2e66b84c69c8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/signum.md @@ -0,0 +1,12 @@ + + +### SIGNUM +Returns the sign of the given number. +It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. + +``` +ROW d = 100.0 +| EVAL s = SIGNUM(d) +``` diff --git a/docs/reference/esql/functions/kibana/docs/sin.md b/docs/reference/esql/functions/kibana/docs/sin.md new file mode 100644 index 0000000000000..a87b4e4f452af --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sin.md @@ -0,0 +1,11 @@ + + +### SIN +Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. + +``` +ROW a=1.8 +| EVAL sin=SIN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/sinh.md b/docs/reference/esql/functions/kibana/docs/sinh.md new file mode 100644 index 0000000000000..81e8d9fd473d5 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sinh.md @@ -0,0 +1,11 @@ + + +### SINH +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. + +``` +ROW a=1.8 +| EVAL sinh=SINH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/split.md b/docs/reference/esql/functions/kibana/docs/split.md new file mode 100644 index 0000000000000..d06d8857967f4 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/split.md @@ -0,0 +1,7 @@ + + +### SPLIT +Split a single valued string into multiple strings. + diff --git a/docs/reference/esql/functions/kibana/docs/sqrt.md b/docs/reference/esql/functions/kibana/docs/sqrt.md new file mode 100644 index 0000000000000..6e52bfed4037b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/sqrt.md @@ -0,0 +1,7 @@ + + +### SQRT +Returns the square root of a number. + diff --git a/docs/reference/esql/functions/kibana/docs/st_contains.md b/docs/reference/esql/functions/kibana/docs/st_contains.md new file mode 100644 index 0000000000000..6e23bb9b0f116 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_contains.md @@ -0,0 +1,12 @@ + + +### ST_CONTAINS +Returns whether the first geometry contains the second geometry. + +``` +FROM airport_city_boundaries +| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE("POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_disjoint.md b/docs/reference/esql/functions/kibana/docs/st_disjoint.md new file mode 100644 index 0000000000000..7cf66b168bd70 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_disjoint.md @@ -0,0 +1,12 @@ + + +### ST_DISJOINT +Returns whether the two geometries or geometry columns are disjoint. + +``` +FROM airport_city_boundaries +| WHERE ST_DISJOINT(city_boundary, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_intersects.md b/docs/reference/esql/functions/kibana/docs/st_intersects.md new file mode 100644 index 0000000000000..e4db33429dbe3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_intersects.md @@ -0,0 +1,11 @@ + + +### ST_INTERSECTS +Returns whether the two geometries or geometry columns intersect. + +``` +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_within.md b/docs/reference/esql/functions/kibana/docs/st_within.md new file mode 100644 index 0000000000000..cbb3ae5ee9aca --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_within.md @@ -0,0 +1,12 @@ + + +### ST_WITHIN +Returns whether the first geometry is within the second geometry. + +``` +FROM airport_city_boundaries +| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE("POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))")) +| KEEP abbrev, airport, region, city, city_location +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_x.md b/docs/reference/esql/functions/kibana/docs/st_x.md new file mode 100644 index 0000000000000..af2f4de1487cd --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_x.md @@ -0,0 +1,7 @@ + + +### ST_X +Extracts the x-coordinate from a point geometry. + diff --git a/docs/reference/esql/functions/kibana/docs/st_y.md b/docs/reference/esql/functions/kibana/docs/st_y.md new file mode 100644 index 0000000000000..575a5bd3c7d33 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/st_y.md @@ -0,0 +1,7 @@ + + +### ST_Y +Extracts the y-coordinate from a point geometry. + diff --git a/docs/reference/esql/functions/kibana/docs/starts_with.md b/docs/reference/esql/functions/kibana/docs/starts_with.md new file mode 100644 index 0000000000000..5af544c855051 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/starts_with.md @@ -0,0 +1,7 @@ + + +### STARTS_WITH +Returns a boolean that indicates whether a keyword string starts with another string + diff --git a/docs/reference/esql/functions/kibana/docs/substring.md b/docs/reference/esql/functions/kibana/docs/substring.md new file mode 100644 index 0000000000000..d1d9c696f7813 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/substring.md @@ -0,0 +1,7 @@ + + +### SUBSTRING +Returns a substring of a string, specified by a start position and an optional length + diff --git a/docs/reference/esql/functions/kibana/docs/tan.md b/docs/reference/esql/functions/kibana/docs/tan.md new file mode 100644 index 0000000000000..edfb4210f7dd2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tan.md @@ -0,0 +1,11 @@ + + +### TAN +Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. + +``` +ROW a=1.8 +| EVAL tan=TAN(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/tanh.md b/docs/reference/esql/functions/kibana/docs/tanh.md new file mode 100644 index 0000000000000..d3d8c7d4e9196 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tanh.md @@ -0,0 +1,11 @@ + + +### TANH +Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. + +``` +ROW a=1.8 +| EVAL tanh=TANH(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/tau.md b/docs/reference/esql/functions/kibana/docs/tau.md new file mode 100644 index 0000000000000..9a530e61dd342 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/tau.md @@ -0,0 +1,7 @@ + + +### TAU +The ratio of a circle’s circumference to its radius. + diff --git a/docs/reference/esql/functions/kibana/docs/to_boolean.md b/docs/reference/esql/functions/kibana/docs/to_boolean.md new file mode 100644 index 0000000000000..9c1bd747d168f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_boolean.md @@ -0,0 +1,7 @@ + + +### TO_BOOLEAN +Converts an input value to a boolean value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md b/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md new file mode 100644 index 0000000000000..dbaa76d1d23e0 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_cartesianpoint.md @@ -0,0 +1,7 @@ + + +### TO_CARTESIANPOINT +Converts an input value to a point value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md b/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md new file mode 100644 index 0000000000000..e3fd29e8f9907 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_cartesianshape.md @@ -0,0 +1,7 @@ + + +### TO_CARTESIANSHAPE +Converts an input value to a shape value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_datetime.md b/docs/reference/esql/functions/kibana/docs/to_datetime.md new file mode 100644 index 0000000000000..8326866c7166d --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_datetime.md @@ -0,0 +1,7 @@ + + +### TO_DATETIME +Converts an input value to a date value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_degrees.md b/docs/reference/esql/functions/kibana/docs/to_degrees.md new file mode 100644 index 0000000000000..dc5e36a592b2c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_degrees.md @@ -0,0 +1,7 @@ + + +### TO_DEGREES +Converts a number in radians to degrees. + diff --git a/docs/reference/esql/functions/kibana/docs/to_double.md b/docs/reference/esql/functions/kibana/docs/to_double.md new file mode 100644 index 0000000000000..4f531e1c8fdde --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_double.md @@ -0,0 +1,7 @@ + + +### TO_DOUBLE +Converts an input value to a double value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_geopoint.md b/docs/reference/esql/functions/kibana/docs/to_geopoint.md new file mode 100644 index 0000000000000..7f9b8ca59bc8f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_geopoint.md @@ -0,0 +1,7 @@ + + +### TO_GEOPOINT +Converts an input value to a geo_point value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_geoshape.md b/docs/reference/esql/functions/kibana/docs/to_geoshape.md new file mode 100644 index 0000000000000..cdfbdc5b6ffd9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_geoshape.md @@ -0,0 +1,7 @@ + + +### TO_GEOSHAPE +Converts an input value to a geo_shape value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_integer.md b/docs/reference/esql/functions/kibana/docs/to_integer.md new file mode 100644 index 0000000000000..ad04ecbd1e304 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_integer.md @@ -0,0 +1,7 @@ + + +### TO_INTEGER +Converts an input value to an integer value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_ip.md b/docs/reference/esql/functions/kibana/docs/to_ip.md new file mode 100644 index 0000000000000..47d06e9ab755e --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_ip.md @@ -0,0 +1,7 @@ + + +### TO_IP +Converts an input string to an IP value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_long.md b/docs/reference/esql/functions/kibana/docs/to_long.md new file mode 100644 index 0000000000000..c19273376bd4b --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_long.md @@ -0,0 +1,7 @@ + + +### TO_LONG +Converts an input value to a long value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_lower.md b/docs/reference/esql/functions/kibana/docs/to_lower.md new file mode 100644 index 0000000000000..f63926ba13825 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_lower.md @@ -0,0 +1,7 @@ + + +### TO_LOWER +Returns a new string representing the input string converted to lower case. + diff --git a/docs/reference/esql/functions/kibana/docs/to_radians.md b/docs/reference/esql/functions/kibana/docs/to_radians.md new file mode 100644 index 0000000000000..071d9ff05e0b6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_radians.md @@ -0,0 +1,7 @@ + + +### TO_RADIANS +Converts a number in degrees to radians. + diff --git a/docs/reference/esql/functions/kibana/docs/to_string.md b/docs/reference/esql/functions/kibana/docs/to_string.md new file mode 100644 index 0000000000000..a066f488363aa --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_string.md @@ -0,0 +1,7 @@ + + +### TO_STRING +Converts a field into a string. + diff --git a/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md b/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md new file mode 100644 index 0000000000000..fbe9e22215ee8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_unsigned_long.md @@ -0,0 +1,7 @@ + + +### TO_UNSIGNED_LONG +Converts an input value to an unsigned long value. + diff --git a/docs/reference/esql/functions/kibana/docs/to_upper.md b/docs/reference/esql/functions/kibana/docs/to_upper.md new file mode 100644 index 0000000000000..4c4f5fe02b646 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_upper.md @@ -0,0 +1,7 @@ + + +### TO_UPPER +Returns a new string representing the input string converted to upper case. + diff --git a/docs/reference/esql/functions/kibana/docs/to_version.md b/docs/reference/esql/functions/kibana/docs/to_version.md new file mode 100644 index 0000000000000..23cd9fcb152a3 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_version.md @@ -0,0 +1,7 @@ + + +### TO_VERSION +Converts an input string to a version value. + diff --git a/docs/reference/esql/functions/kibana/docs/trim.md b/docs/reference/esql/functions/kibana/docs/trim.md new file mode 100644 index 0000000000000..2911abbf5e1a9 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/trim.md @@ -0,0 +1,7 @@ + + +### TRIM +Removes leading and trailing whitespaces from a string. + diff --git a/docs/reference/esql/functions/layout/auto_bucket.asciidoc b/docs/reference/esql/functions/layout/auto_bucket.asciidoc deleted file mode 100644 index 82e05ab5d215c..0000000000000 --- a/docs/reference/esql/functions/layout/auto_bucket.asciidoc +++ /dev/null @@ -1,14 +0,0 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. - -[discrete] -[[esql-auto_bucket]] -=== `AUTO_BUCKET` - -*Syntax* - -[.text-center] -image::esql/functions/signature/auto_bucket.svg[Embedded,opts=inline] - -include::../parameters/auto_bucket.asciidoc[] -include::../description/auto_bucket.asciidoc[] -include::../types/auto_bucket.asciidoc[] diff --git a/docs/reference/esql/functions/layout/bucket.asciidoc b/docs/reference/esql/functions/layout/bucket.asciidoc new file mode 100644 index 0000000000000..0445007237c8c --- /dev/null +++ b/docs/reference/esql/functions/layout/bucket.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-bucket]] +=== `BUCKET` + +*Syntax* + +[.text-center] +image::esql/functions/signature/bucket.svg[Embedded,opts=inline] + +include::../parameters/bucket.asciidoc[] +include::../description/bucket.asciidoc[] +include::../types/bucket.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_trunc.asciidoc b/docs/reference/esql/functions/layout/date_trunc.asciidoc new file mode 100644 index 0000000000000..0bd9ce4b4dbe4 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_trunc.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_trunc]] +=== `DATE_TRUNC` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_trunc.svg[Embedded,opts=inline] + +include::../parameters/date_trunc.asciidoc[] +include::../description/date_trunc.asciidoc[] +include::../types/date_trunc.asciidoc[] +include::../examples/date_trunc.asciidoc[] diff --git a/docs/reference/esql/functions/layout/e.asciidoc b/docs/reference/esql/functions/layout/e.asciidoc index a0e1ca3830e30..2c353f4e4ae4f 100644 --- a/docs/reference/esql/functions/layout/e.asciidoc +++ b/docs/reference/esql/functions/layout/e.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/e.svg[Embedded,opts=inline] include::../parameters/e.asciidoc[] include::../description/e.asciidoc[] include::../types/e.asciidoc[] +include::../examples/e.asciidoc[] diff --git a/docs/reference/esql/functions/layout/floor.asciidoc b/docs/reference/esql/functions/layout/floor.asciidoc index f3f05260989dc..0c26c4fda36c2 100644 --- a/docs/reference/esql/functions/layout/floor.asciidoc +++ b/docs/reference/esql/functions/layout/floor.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/floor.svg[Embedded,opts=inline] include::../parameters/floor.asciidoc[] include::../description/floor.asciidoc[] include::../types/floor.asciidoc[] +include::../examples/floor.asciidoc[] diff --git a/docs/reference/esql/functions/layout/locate.asciidoc b/docs/reference/esql/functions/layout/locate.asciidoc new file mode 100644 index 0000000000000..1017c7f844dd6 --- /dev/null +++ b/docs/reference/esql/functions/layout/locate.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-locate]] +=== `LOCATE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/locate.svg[Embedded,opts=inline] + +include::../parameters/locate.asciidoc[] +include::../description/locate.asciidoc[] +include::../types/locate.asciidoc[] diff --git a/docs/reference/esql/functions/layout/pi.asciidoc b/docs/reference/esql/functions/layout/pi.asciidoc index 402e7b28481d6..37fd1b80bac41 100644 --- a/docs/reference/esql/functions/layout/pi.asciidoc +++ b/docs/reference/esql/functions/layout/pi.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/pi.svg[Embedded,opts=inline] include::../parameters/pi.asciidoc[] include::../description/pi.asciidoc[] include::../types/pi.asciidoc[] +include::../examples/pi.asciidoc[] diff --git a/docs/reference/esql/functions/layout/pow.asciidoc b/docs/reference/esql/functions/layout/pow.asciidoc index 019c17b7a03c6..3adaff72d26be 100644 --- a/docs/reference/esql/functions/layout/pow.asciidoc +++ b/docs/reference/esql/functions/layout/pow.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/pow.svg[Embedded,opts=inline] include::../parameters/pow.asciidoc[] include::../description/pow.asciidoc[] include::../types/pow.asciidoc[] +include::../examples/pow.asciidoc[] diff --git a/docs/reference/esql/functions/layout/round.asciidoc b/docs/reference/esql/functions/layout/round.asciidoc index 8424432052750..2fbcffd814747 100644 --- a/docs/reference/esql/functions/layout/round.asciidoc +++ b/docs/reference/esql/functions/layout/round.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/round.svg[Embedded,opts=inline] include::../parameters/round.asciidoc[] include::../description/round.asciidoc[] include::../types/round.asciidoc[] +include::../examples/round.asciidoc[] diff --git a/docs/reference/esql/functions/layout/signum.asciidoc b/docs/reference/esql/functions/layout/signum.asciidoc new file mode 100644 index 0000000000000..f5b565993f392 --- /dev/null +++ b/docs/reference/esql/functions/layout/signum.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-signum]] +=== `SIGNUM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/signum.svg[Embedded,opts=inline] + +include::../parameters/signum.asciidoc[] +include::../description/signum.asciidoc[] +include::../types/signum.asciidoc[] +include::../examples/signum.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_disjoint.asciidoc b/docs/reference/esql/functions/layout/st_disjoint.asciidoc new file mode 100644 index 0000000000000..a1eef41006f3e --- /dev/null +++ b/docs/reference/esql/functions/layout/st_disjoint.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_disjoint]] +=== `ST_DISJOINT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] + +include::../parameters/st_disjoint.asciidoc[] +include::../description/st_disjoint.asciidoc[] +include::../types/st_disjoint.asciidoc[] +include::../examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 8748b35443e8e..1789f53b3704c 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -23,6 +23,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -39,13 +40,14 @@ include::layout/atan2.asciidoc[] include::layout/ceil.asciidoc[] include::layout/cos.asciidoc[] include::layout/cosh.asciidoc[] -include::e.asciidoc[] -include::floor.asciidoc[] +include::layout/e.asciidoc[] +include::layout/floor.asciidoc[] include::layout/log.asciidoc[] include::layout/log10.asciidoc[] -include::pi.asciidoc[] -include::pow.asciidoc[] -include::round.asciidoc[] +include::layout/pi.asciidoc[] +include::layout/pow.asciidoc[] +include::layout/round.asciidoc[] +include::layout/signum.asciidoc[] include::layout/sin.asciidoc[] include::layout/sinh.asciidoc[] include::sqrt.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc b/docs/reference/esql/functions/parameters/bucket.asciidoc similarity index 100% rename from docs/reference/esql/functions/parameters/auto_bucket.asciidoc rename to docs/reference/esql/functions/parameters/bucket.asciidoc diff --git a/docs/reference/esql/functions/parameters/date_trunc.asciidoc b/docs/reference/esql/functions/parameters/date_trunc.asciidoc new file mode 100644 index 0000000000000..19f7cb6cd7c74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_trunc.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`interval`:: +Interval; expressed using the timespan literal syntax. + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/parameters/floor.asciidoc b/docs/reference/esql/functions/parameters/floor.asciidoc index 91c56709d182a..65013f4c21265 100644 --- a/docs/reference/esql/functions/parameters/floor.asciidoc +++ b/docs/reference/esql/functions/parameters/floor.asciidoc @@ -3,4 +3,4 @@ *Parameters* `number`:: - +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/locate.asciidoc b/docs/reference/esql/functions/parameters/locate.asciidoc new file mode 100644 index 0000000000000..e48a7a891712c --- /dev/null +++ b/docs/reference/esql/functions/parameters/locate.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`string`:: +An input string + +`substring`:: +A substring to locate in the input string + +`start`:: +The start index diff --git a/docs/reference/esql/functions/parameters/pow.asciidoc b/docs/reference/esql/functions/parameters/pow.asciidoc index 8e94723086e2e..1576c96ee2277 100644 --- a/docs/reference/esql/functions/parameters/pow.asciidoc +++ b/docs/reference/esql/functions/parameters/pow.asciidoc @@ -3,7 +3,7 @@ *Parameters* `base`:: - +Numeric expression for the base. If `null`, the function returns `null`. `exponent`:: - +Numeric expression for the exponent. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/round.asciidoc b/docs/reference/esql/functions/parameters/round.asciidoc index 788c99434fd29..ce3a43b56f497 100644 --- a/docs/reference/esql/functions/parameters/round.asciidoc +++ b/docs/reference/esql/functions/parameters/round.asciidoc @@ -3,7 +3,7 @@ *Parameters* `number`:: -The numeric value to round +The numeric value to round. If `null`, the function returns `null`. `decimals`:: -The number of decimal places to round to. Defaults to 0. +The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/signum.asciidoc b/docs/reference/esql/functions/parameters/signum.asciidoc new file mode 100644 index 0000000000000..65013f4c21265 --- /dev/null +++ b/docs/reference/esql/functions/parameters/signum.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`number`:: +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc new file mode 100644 index 0000000000000..e87a0d0eb94f0 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/pi.asciidoc b/docs/reference/esql/functions/pi.asciidoc deleted file mode 100644 index fb88cbffc99d0..0000000000000 --- a/docs/reference/esql/functions/pi.asciidoc +++ /dev/null @@ -1,23 +0,0 @@ -[discrete] -[[esql-pi]] -=== `PI` - -*Syntax* - -[.text-center] -image::esql/functions/signature/pi.svg[Embedded,opts=inline] - -*Description* - -Returns the {wikipedia}/Pi[ratio] of a circle's circumference to its diameter. - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=pi] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=pi-result] -|=== diff --git a/docs/reference/esql/functions/signature/auto_bucket.svg b/docs/reference/esql/functions/signature/auto_bucket.svg deleted file mode 100644 index 7da9a053825f1..0000000000000 --- a/docs/reference/esql/functions/signature/auto_bucket.svg +++ /dev/null @@ -1 +0,0 @@ -AUTO_BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/bucket.svg b/docs/reference/esql/functions/signature/bucket.svg new file mode 100644 index 0000000000000..f6662910c010d --- /dev/null +++ b/docs/reference/esql/functions/signature/bucket.svg @@ -0,0 +1 @@ +BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/case.svg b/docs/reference/esql/functions/signature/case.svg new file mode 100644 index 0000000000000..d6fd7da38aca6 --- /dev/null +++ b/docs/reference/esql/functions/signature/case.svg @@ -0,0 +1 @@ +CASE(condition,trueValue) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/date_trunc.svg b/docs/reference/esql/functions/signature/date_trunc.svg new file mode 100644 index 0000000000000..c82cd04ed5c88 --- /dev/null +++ b/docs/reference/esql/functions/signature/date_trunc.svg @@ -0,0 +1 @@ +DATE_TRUNC(interval,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/locate.svg b/docs/reference/esql/functions/signature/locate.svg new file mode 100644 index 0000000000000..2b7bc2dac0e86 --- /dev/null +++ b/docs/reference/esql/functions/signature/locate.svg @@ -0,0 +1 @@ +LOCATE(string,substring,start) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/signum.svg b/docs/reference/esql/functions/signature/signum.svg new file mode 100644 index 0000000000000..76d2972f18f42 --- /dev/null +++ b/docs/reference/esql/functions/signature/signum.svg @@ -0,0 +1 @@ +SIGNUM(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/st_disjoint.svg b/docs/reference/esql/functions/signature/st_disjoint.svg new file mode 100644 index 0000000000000..becd0be37e441 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_disjoint.svg @@ -0,0 +1 @@ +ST_DISJOINT(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index 739d6b2d6f58f..b6d178ddd624d 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -9,6 +9,7 @@ // tag::spatial_list[] * experimental:[] <> +* experimental:[] <> * experimental:[] <> * experimental:[] <> * experimental:[] <> @@ -16,6 +17,7 @@ // end::spatial_list[] include::st_intersects.asciidoc[] +include::st_disjoint.asciidoc[] include::st_contains.asciidoc[] include::st_within.asciidoc[] include::st_x.asciidoc[] diff --git a/docs/reference/esql/functions/st_centroid.asciidoc b/docs/reference/esql/functions/st_centroid_agg.asciidoc similarity index 69% rename from docs/reference/esql/functions/st_centroid.asciidoc rename to docs/reference/esql/functions/st_centroid_agg.asciidoc index e91a325a5597b..c980560f8f198 100644 --- a/docs/reference/esql/functions/st_centroid.asciidoc +++ b/docs/reference/esql/functions/st_centroid_agg.asciidoc @@ -1,6 +1,6 @@ [discrete] [[esql-agg-st-centroid]] -=== `ST_CENTROID` +=== `ST_CENTROID_AGG` experimental::[] @@ -8,11 +8,11 @@ Calculate the spatial centroid over a field with spatial point geometry type. [source.merge.styled,esql] ---- -include::{esql-specs}/spatial.csv-spec[tag=st_centroid-airports] +include::{esql-specs}/spatial.csv-spec[tag=st_centroid_agg-airports] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/spatial.csv-spec[tag=st_centroid-airports-result] +include::{esql-specs}/spatial.csv-spec[tag=st_centroid_agg-airports-result] |=== Supported types: diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc index 07b1a11aa7247..110c4fe4ca9ec 100644 --- a/docs/reference/esql/functions/st_contains.asciidoc +++ b/docs/reference/esql/functions/st_contains.asciidoc @@ -20,7 +20,7 @@ The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. include::description/st_contains.asciidoc[] -This is the inverse of the `<>` function. +This is the inverse of the <> function. include::types/st_contains.asciidoc[] include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_disjoint.asciidoc b/docs/reference/esql/functions/st_disjoint.asciidoc new file mode 100644 index 0000000000000..db89ca186a0ff --- /dev/null +++ b/docs/reference/esql/functions/st_disjoint.asciidoc @@ -0,0 +1,27 @@ +[discrete] +[[esql-st_disjoint]] +=== `ST_DISJOINT` + +experimental::[] + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +include::description/st_disjoint.asciidoc[] +This is the inverse of the <> function. +In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ + +include::types/st_disjoint.asciidoc[] +include::examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc index fbe313d10b0e7..d75a7f3a50e0f 100644 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ b/docs/reference/esql/functions/st_intersects.asciidoc @@ -24,6 +24,7 @@ This means it is not possible to combine `geo_*` and `cartesian_*` parameters. Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). +This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ include::types/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc index 64adb91219c4a..0f0190a9de638 100644 --- a/docs/reference/esql/functions/st_within.asciidoc +++ b/docs/reference/esql/functions/st_within.asciidoc @@ -20,7 +20,7 @@ The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. include::description/st_within.asciidoc[] -This is the inverse of the `<>` function. +This is the inverse of the <> function. include::types/st_within.asciidoc[] include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index b568ae1061bb5..273c508fc6f63 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -11,6 +11,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -25,6 +26,7 @@ include::concat.asciidoc[] include::layout/left.asciidoc[] include::length.asciidoc[] +include::layout/locate.asciidoc[] include::ltrim.asciidoc[] include::replace.asciidoc[] include::right.asciidoc[] diff --git a/docs/reference/esql/functions/types/bucket.asciidoc b/docs/reference/esql/functions/types/bucket.asciidoc new file mode 100644 index 0000000000000..c4b997d0e124d --- /dev/null +++ b/docs/reference/esql/functions/types/bucket.asciidoc @@ -0,0 +1,36 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | buckets | from | to | result +datetime | integer | datetime | datetime | datetime +double | integer | double | double | double +double | integer | double | integer | double +double | integer | double | long | double +double | integer | integer | double | double +double | integer | integer | integer | double +double | integer | integer | long | double +double | integer | long | double | double +double | integer | long | integer | double +double | integer | long | long | double +integer | integer | double | double | double +integer | integer | double | integer | double +integer | integer | double | long | double +integer | integer | integer | double | double +integer | integer | integer | integer | double +integer | integer | integer | long | double +integer | integer | long | double | double +integer | integer | long | integer | double +integer | integer | long | long | double +long | integer | double | double | double +long | integer | double | integer | double +long | integer | double | long | double +long | integer | integer | double | double +long | integer | integer | integer | double +long | integer | integer | long | double +long | integer | long | double | double +long | integer | long | integer | double +long | integer | long | long | double +|=== diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index 44acf331a43dc..e7d627ab915a1 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -5,5 +5,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== condition | trueValue | result - +keyword |=== diff --git a/docs/reference/esql/functions/types/date_trunc.asciidoc b/docs/reference/esql/functions/types/date_trunc.asciidoc new file mode 100644 index 0000000000000..8df45cfef54a8 --- /dev/null +++ b/docs/reference/esql/functions/types/date_trunc.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +interval | date | result +date_period | datetime | datetime +time_duration | datetime | datetime +|=== diff --git a/docs/reference/esql/functions/types/e.asciidoc b/docs/reference/esql/functions/types/e.asciidoc index 50e9c47238e34..aa22828b31bb1 100644 --- a/docs/reference/esql/functions/types/e.asciidoc +++ b/docs/reference/esql/functions/types/e.asciidoc @@ -5,5 +5,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== result - +double |=== diff --git a/docs/reference/esql/functions/types/locate.asciidoc b/docs/reference/esql/functions/types/locate.asciidoc new file mode 100644 index 0000000000000..895dce1335813 --- /dev/null +++ b/docs/reference/esql/functions/types/locate.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +string | substring | start | result +keyword | keyword | integer | integer +keyword | text | integer | integer +text | keyword | integer | integer +text | text | integer | integer +|=== diff --git a/docs/reference/esql/functions/types/pi.asciidoc b/docs/reference/esql/functions/types/pi.asciidoc index 50e9c47238e34..aa22828b31bb1 100644 --- a/docs/reference/esql/functions/types/pi.asciidoc +++ b/docs/reference/esql/functions/types/pi.asciidoc @@ -5,5 +5,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== result - +double |=== diff --git a/docs/reference/esql/functions/types/round.asciidoc b/docs/reference/esql/functions/types/round.asciidoc index ea9ab93825d4f..8c13e14e73b01 100644 --- a/docs/reference/esql/functions/types/round.asciidoc +++ b/docs/reference/esql/functions/types/round.asciidoc @@ -6,4 +6,6 @@ |=== number | decimals | result double | integer | double +integer | integer | integer +long | integer | long |=== diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/signum.asciidoc similarity index 69% rename from docs/reference/esql/functions/types/auto_bucket.asciidoc rename to docs/reference/esql/functions/types/signum.asciidoc index 535e2df29c353..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/signum.asciidoc @@ -4,6 +4,9 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -field | buckets | from | to | result - +number | result +double | double +integer | double +long | double +unsigned_long | double |=== diff --git a/docs/reference/esql/functions/types/st_disjoint.asciidoc b/docs/reference/esql/functions/types/st_disjoint.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_disjoint.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/docs/reference/esql/functions/types/tau.asciidoc b/docs/reference/esql/functions/types/tau.asciidoc index 50e9c47238e34..aa22828b31bb1 100644 --- a/docs/reference/esql/functions/types/tau.asciidoc +++ b/docs/reference/esql/functions/types/tau.asciidoc @@ -5,5 +5,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== result - +double |=== diff --git a/docs/reference/ilm/ilm-tutorial.asciidoc b/docs/reference/ilm/ilm-tutorial.asciidoc index c7f2c3537b5e8..0885f685ed091 100644 --- a/docs/reference/ilm/ilm-tutorial.asciidoc +++ b/docs/reference/ilm/ilm-tutorial.asciidoc @@ -184,7 +184,7 @@ stream's write index. This process repeats each time a rollover condition is met. You can search across all of the data stream's backing indices, managed by the `timeseries_policy`, with the `timeseries` data stream name. -Write operations are routed to the current write index. Read operations will be handled by all +You will point ingest towards the alias which will route write operations to its current write index. Read operations will be handled by all backing indices. [discrete] @@ -282,14 +282,15 @@ DELETE /_index_template/timeseries_template [[manage-time-series-data-without-data-streams]] === Manage time series data without data streams -Even though <> are a convenient way to scale -and manage time series data, they are designed to be append-only. We recognise there -might be use-cases where data needs to be updated or deleted in place and the -data streams don't support delete and update requests directly, -so the index APIs would need to be used directly on the data stream's backing indices. +Even though <> are a convenient way to scale and manage time series +data, they are designed to be append-only. We recognise there might be use-cases where data needs to +be updated or deleted in place and the data streams don't support delete and update requests +directly, so the index APIs would need to be used directly on the data stream's backing indices. In +these cases we still recommend using a data stream. -In these cases, you can use an index alias to manage indices containing the time series data -and periodically roll over to a new index. +If you frequently send multiple documents using the same `_id` expecting last-write-wins, you can +use an index alias instead of a data stream to manage indices containing the time series data and +periodically roll over to a new index. To automate rollover and management of time series indices with {ilm-init} using an index alias, you: diff --git a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc index 79be6205a8c88..b6310050a4f25 100644 --- a/docs/reference/ilm/set-up-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/set-up-lifecycle-policy.asciidoc @@ -81,6 +81,8 @@ To use a policy that triggers the rollover action, you need to configure the policy in the index template used to create each new index. You specify the name of the policy and the alias used to reference the rolling indices. +TIP: An `index.lifecycle.rollover_alias` setting is only required if using {ilm} with an alias. It is unnecessary when using <>. + You can use the {kib} Create template wizard to create a template. To access the wizard, open the menu and go to *Stack Management > Index Management*. In the *Index Templates* tab, click *Create template*. @@ -128,8 +130,9 @@ DELETE _index_template/my_template [[create-initial-index]] ==== Create an initial managed index -When you set up policies for your own rolling indices, you need to manually create the first index -managed by a policy and designate it as the write index. +When you set up policies for your own rolling indices, if you are not using the recommended +<>, you need to manually create the first index managed by a policy and +designate it as the write index. IMPORTANT: When you enable {ilm} for {beats} or the {ls} {es} output plugin, the necessary policies and configuration changes are applied automatically. diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 76698501fd416..34248cc5f98d3 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -145,10 +145,16 @@ the alias points to one data stream. + Only the `add` action supports this parameter. +// tag::alias-options[] `must_exist`:: (Optional, Boolean) -If `true`, the alias must exist to perform the action. Defaults to `false`. Only -the `remove` action supports this parameter. +Affects the behavior when attempting to remove an alias which does not exist. +If `true`, removing an alias which does not exist will cause all actions to fail. +If `false`, removing an alias which does not exist will only cause that removal to fail. +Defaults to `false`. +// end::alias-options[] ++ +Only the `remove` action supports this parameter. // tag::alias-options[] `routing`:: @@ -168,3 +174,51 @@ stream aliases don't support this parameter. Only the `add` action supports this parameter. ===== ==== + + + +[role="child_attributes"] +[[indices-aliases-api-response-body]] +==== {api-response-body-title} + +`acknowledged`:: +(Boolean) +If `true`, the request received a response from the master node within the +`timeout` period. + +`errors`:: +(Boolean) +If `true`, at least one of the requested actions failed. + +`action_results`:: +(Optional, array of objects) Results for each requested action. ++ +.Properties of `action_results` objects +[%collapsible%open] +==== + +`action`:: +(object) +Description of the associated action request. ++ +.Properties of `action` object +[%collapsible%open] +===== +`type`:: +(string) The type of the associated action, one of `add`, `remove`, or `remove_index`. + +`indices`:: +(array of strings) List of indices in the associated action. + +`aliases`:: +(array of strings) List of aliases in the associated action. +===== + +`status`:: +(integer) HTTP status code returned for the action. + +`error`:: +(Optional, object) Contains additional information about the failed action. ++ +Only present if the action failed. +==== diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 5b693f51d65da..72f752563491b 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Deletes an {infer} model deployment. - -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +Deletes an {infer} endpoint. + +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 1a11904a169ca..2cfc17a3b6203 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Retrieves {infer} model information. +Retrieves {infer} endpoint information. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] @@ -37,10 +39,10 @@ own model, use the <>. You can get information in a single API request for: -* a single {infer} model by providing the task type and the model ID, -* all of the {infer} models for a certain task type by providing the task type -and a wildcard expression, -* all of the {infer} models by using a wildcard expression. +* a single {infer} endpoint by providing the task type and the {infer} ID, +* all of the {infer} endpoints for a certain task type by providing the task +type and a wildcard expression, +* all of the {infer} endpoints by using a wildcard expression. [discrete] diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index cdc6bfe254ea2..d700a396e08bf 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -4,12 +4,16 @@ experimental[] -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. -You can use the following APIs to manage {infer} models and perform {infer}: +The {infer} APIs enable you to create {infer} endpoints and use {ml} models of +different providers - such as Cohere, OpenAI, or HuggingFace - as a service. Use +the following APIs to manage {infer} models and perform {infer}: * <> * <> diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 08a58d7789e33..8c6a219f8e247 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -4,12 +4,14 @@ experimental[] -Performs an inference task on an input text by using an {infer} model. +Performs an inference task on an input text by using an {infer} endpoint. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your -own model, use the <>. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or +Hugging Face. For built-in models and models uploaded though Eland, the {infer} +APIs offer an alternative way to use and manage trained models. However, if you +do not plan to use the {infer} APIs to use these models or if you want to use +non-NLP models, use the <>. [discrete] @@ -34,8 +36,9 @@ own model, use the <>. The perform {infer} API enables you to use {ml} models to perform specific tasks on data that you provide as an input. The API returns a response with the -results of the tasks. The {infer} model you use can perform one specific task -that has been defined when the model was created with the <>. +results of the tasks. The {infer} endpoint you use can perform one specific task +that has been defined when the endpoint was created with the +<>. [discrete] @@ -51,6 +54,13 @@ The unique identifier of the {infer} endpoint. (Optional, string) The type of {infer} task that the model performs. +[[post-inference-api-query-params]] +== {api-query-parms-title} + +`timeout`:: +(Optional, timeout) +Controls the amount of time to wait for the inference to complete. Defaults to 30 +seconds. [discrete] [[post-inference-api-request-body]] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 110ec9d6fa98c..9825e71adea0d 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -4,7 +4,7 @@ experimental[] -Creates a model to perform an {infer} task. +Creates an {infer} endpoint to perform an {infer} task. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or @@ -18,7 +18,7 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-request]] ==== {api-request-title} -`PUT /_inference//` +`PUT /_inference//` [discrete] @@ -33,8 +33,8 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-desc]] ==== {api-description-title} -The create {infer} API enables you to create and configure a {ml} model to -perform a specific {infer} task. +The create {infer} API enables you to create an {infer} endpoint and configure a +{ml} model to perform a specific {infer} task. The following services are available through the {infer} API: @@ -165,7 +165,7 @@ want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. `model_id`::: -(Optional, string) +(Required, string) The name of the model to use for the {infer} task. Refer to the https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] for the list of available text embedding models. @@ -346,7 +346,7 @@ Example response: ===== Hugging Face service The following example shows how to create an {infer} endpoint called -`hugging-face_embeddings` to perform a `text_embedding` task type. +`hugging-face-embeddings` to perform a `text_embedding` task type. [source,console] ------------------------------------------------------------ @@ -371,6 +371,19 @@ endpoint URL. Select the model you want to use on the new endpoint creation page task under the Advanced configuration section. Create the endpoint. Copy the URL after the endpoint initialization has been finished. +[discrete] +[[inference-example-hugging-face-supported-models]] +The list of recommended models for the Hugging Face service: + +* https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2[all-MiniLM-L6-v2] +* https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2[all-MiniLM-L12-v2] +* https://huggingface.co/sentence-transformers/all-mpnet-base-v2[all-mpnet-base-v2] +* https://huggingface.co/intfloat/e5-base-v2[e5-base-v2] +* https://huggingface.co/intfloat/e5-small-v2[e5-small-v2] +* https://huggingface.co/intfloat/multilingual-e5-base[multilingual-e5-base] +* https://huggingface.co/intfloat/multilingual-e5-small[multilingual-e5-small] + + [discrete] [[inference-example-eland]] ===== Models uploaded by Eland via the elasticsearch service @@ -431,4 +444,3 @@ PUT _inference/completion/openai_completion } ------------------------------------------------------------ // TEST[skip:TBD] - diff --git a/docs/reference/ingest/apis/geoip-stats-api.asciidoc b/docs/reference/ingest/apis/geoip-stats-api.asciidoc index 6ef0db546342b..84a2b00737e5a 100644 --- a/docs/reference/ingest/apis/geoip-stats-api.asciidoc +++ b/docs/reference/ingest/apis/geoip-stats-api.asciidoc @@ -4,8 +4,8 @@ GeoIP stats ++++ -Gets download statistics for GeoIP2 databases used with the -<>. +Gets statistics about the <>, including +download statistics for GeoIP2 databases used with it. [source,console] ---- @@ -60,7 +60,7 @@ Total number of database updates skipped. `nodes`:: (object) -Downloaded GeoIP2 databases for each node. +Statistics for each node. + .Properties of `nodes` [%collapsible%open] @@ -90,4 +90,4 @@ Downloaded database files, including related license files. {es} stores these files in the node's <>: `$ES_TMPDIR/geoip-databases/`. ===== -==== \ No newline at end of file +==== diff --git a/docs/reference/ingest/processors/geoip.asciidoc b/docs/reference/ingest/processors/geoip.asciidoc index 7e0e53747834a..12e7a5f10135c 100644 --- a/docs/reference/ingest/processors/geoip.asciidoc +++ b/docs/reference/ingest/processors/geoip.asciidoc @@ -9,7 +9,7 @@ IPv4 or IPv6 address. [[geoip-automatic-updates]] By default, the processor uses the GeoLite2 City, GeoLite2 Country, and GeoLite2 -ASN GeoIP2 databases from http://dev.maxmind.com/geoip/geoip2/geolite2/[MaxMind], shared under the +ASN IP geolocation databases from http://dev.maxmind.com/geoip/geoip2/geolite2/[MaxMind], shared under the CC BY-SA 4.0 license. It automatically downloads these databases if your nodes can connect to `storage.googleapis.com` domain and either: * `ingest.geoip.downloader.eager.download` is set to true @@ -38,7 +38,7 @@ field instead. | Name | Required | Default | Description | `field` | yes | - | The field to get the ip address from for the geographical lookup. | `target_field` | no | geoip | The field that will hold the geographical information looked up from the MaxMind database. -| `database_file` | no | GeoLite2-City.mmdb | The database filename referring to a database the module ships with (GeoLite2-City.mmdb, GeoLite2-Country.mmdb, or GeoLite2-ASN.mmdb) or a custom database in the `ingest-geoip` config directory. +| `database_file` | no | GeoLite2-City.mmdb | The database filename referring to one of the automatically downloaded GeoLite2 databases (GeoLite2-City.mmdb, GeoLite2-Country.mmdb, or GeoLite2-ASN.mmdb) or the name of a supported database file in the `ingest-geoip` config directory. | `properties` | no | [`continent_name`, `country_iso_code`, `country_name`, `region_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. | `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document | `first_only` | no | `true` | If `true` only first found geoip data will be returned, even if `field` contains array @@ -47,15 +47,22 @@ field instead. *Depends on what is available in `database_file`: -* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`, -`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` +* If a GeoLite2 City or GeoIP2 City database is used, then the following fields may be added under the `target_field`: `ip`, +`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`. -* If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`, +* If a GeoLite2 Country or GeoIP2 Country database is used, then the following fields may be added under the `target_field`: `ip`, `country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties were configured in `properties`. * If the GeoLite2 ASN database is used, then the following fields may be added under the `target_field`: `ip`, `asn`, `organization_name` and `network`. The fields actually added depend on what has been found and which properties were configured in `properties`. +* If the GeoIP2 Anonymous IP database is used, then the following fields may be added under the `target_field`: `ip`, +`hosting_provider`, `tor_exit_node`, `anonymous_vpn`, `anonymous`, `public_proxy`, and `residential_proxy`. The fields actually added +depend on what has been found and which properties were configured in `properties`. +* If the GeoIP2 Enterprise database is used, then the following fields may be added under the `target_field`: `ip`, +`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `location`, `asn`, +`organization_name`, `network`, `hosting_provider`, `tor_exit_node`, `anonymous_vpn`, `anonymous`, `public_proxy`, and `residential_proxy`. +The fields actually added depend on what has been found and which properties were configured in `properties`. Here is an example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field: @@ -109,7 +116,7 @@ Which returns: Here is an example that uses the default country database and adds the geographical information to the `geo` field based on the `ip` field. Note that -this database is included in the module. So this: +this database is downloaded automatically. So this: [source,console] -------------------------------------------------- @@ -316,14 +323,14 @@ GET /my_ip_locations/_search //// [[manage-geoip-database-updates]] -==== Manage your own GeoIP2 database updates +==== Manage your own IP geolocation database updates -If you can't <> your GeoIP2 -databases from the Elastic endpoint, you have a few other options: +If you can't <> your IP geolocation databases +from the Elastic endpoint, you have a few other options: * <> * <> -* <> +* <> [[use-proxy-geoip-endpoint]] **Use a proxy endpoint** @@ -375,7 +382,7 @@ settings API>> to set <>. [[manually-update-geoip-databases]] -**Manually update your GeoIP2 databases** +**Manually update your IP geolocation databases** . Use the <> to set `ingest.geoip.downloader.enabled` to `false`. This disables automatic updates @@ -414,14 +421,14 @@ Note that these settings are node settings and apply to all `geoip` processors, [[ingest-geoip-downloader-enabled]] `ingest.geoip.downloader.enabled`:: (<>, Boolean) -If `true`, {es} automatically downloads and manages updates for GeoIP2 databases +If `true`, {es} automatically downloads and manages updates for IP geolocation databases from the `ingest.geoip.downloader.endpoint`. If `false`, {es} does not download updates and deletes all downloaded databases. Defaults to `true`. [[ingest-geoip-downloader-eager-download]] `ingest.geoip.downloader.eager.download`:: (<>, Boolean) -If `true`, {es} downloads GeoIP2 databases immediately, regardless of whether a +If `true`, {es} downloads IP geolocation databases immediately, regardless of whether a pipeline exists with a geoip processor. If `false`, {es} only begins downloading the databases if a pipeline with a geoip processor exists or is added. Defaults to `false`. @@ -429,7 +436,7 @@ to `false`. [[ingest-geoip-downloader-endpoint]] `ingest.geoip.downloader.endpoint`:: (<>, string) -Endpoint URL used to download updates for GeoIP2 databases. For example, `https://myDomain.com/overview.json`. +Endpoint URL used to download updates for IP geolocation databases. For example, `https://myDomain.com/overview.json`. Defaults to `https://geoip.elastic.co/v1/database`. {es} stores downloaded database files in each node's <> at `$ES_TMPDIR/geoip-databases/`. Note that {es} will make a GET request to `${ingest.geoip.downloader.endpoint}?elastic_geoip_service_tos=agree`, @@ -440,6 +447,6 @@ The GeoIP downloader uses the JDK's builtin cacerts. If you're using a custom en [[ingest-geoip-downloader-poll-interval]] `ingest.geoip.downloader.poll.interval`:: (<>, <>) -How often {es} checks for GeoIP2 database updates at the +How often {es} checks for IP geolocation database updates at the `ingest.geoip.downloader.endpoint`. Must be greater than `1d` (one day). Defaults to `3d` (three days). diff --git a/docs/reference/mapping/types/sparse-vector.asciidoc b/docs/reference/mapping/types/sparse-vector.asciidoc index 17a193eef1d4d..6c7ad6550753e 100644 --- a/docs/reference/mapping/types/sparse-vector.asciidoc +++ b/docs/reference/mapping/types/sparse-vector.asciidoc @@ -26,6 +26,8 @@ PUT my-index See <> for a complete example on adding documents to a `sparse_vector` mapped field using ELSER. +NOTE: `sparse_vector` fields can not be included in indices that were *created* on {es} versions between 8.0 and 8.10 + NOTE: `sparse_vector` fields only support single-valued fields and strictly positive values. Multi-valued fields and negative values will be rejected. diff --git a/docs/reference/migration/migrate_8_13.asciidoc b/docs/reference/migration/migrate_8_13.asciidoc index c9e726d940b1d..dca10671e57bc 100644 --- a/docs/reference/migration/migrate_8_13.asciidoc +++ b/docs/reference/migration/migrate_8_13.asciidoc @@ -16,14 +16,17 @@ coming::[8.13.0] [[breaking-changes-8.13]] === Breaking changes -The following changes in {es} 8.13 might affect your applications -and prevent them from operating normally. -Before upgrading to 8.13, review these changes and take the described steps -to mitigate the impact. +There are no breaking changes in 8.13. - -There are no notable breaking changes in {es} 8.13. -But there are some less critical breaking changes. +[discrete] +[[migrate-notable-changes-8.13]] +=== Notable changes +The following are notable, non-breaking updates to be aware of: + +* Changes to features that are in Technical Preview. +* Changes to log formats. +* Changes to non-public APIs. +* Behaviour changes that repair critical bugs. [discrete] [[breaking_813_index_setting_changes]] diff --git a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc index 1ab5de76a94b0..e4e10e2ae2fc5 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc @@ -537,4 +537,4 @@ The API returns the following results: // TESTRESPONSE[s/"job_version" : "8.4.0"/"job_version" : $body.job_version/] // TESTRESPONSE[s/1656087283340/$body.$_path/] // TESTRESPONSE[s/"superuser"/"_es_test_root"/] -// TESTRESPONSE[s/"ignore_throttled" : true/"ignore_throttled" : true,"failure_store":"false"/] +// TESTRESPONSE[s/"ignore_throttled" : true/"ignore_throttled" : true,"failure_store":"exclude"/] diff --git a/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc b/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc index f7b08b40bb7ef..df3c54794dc06 100644 --- a/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-troubleshooting.asciidoc @@ -77,6 +77,46 @@ org.elasticsearch.transport.ConnectTransportException: [][192.168.0.42:9443] *co server is enabled>> on the remote cluster. * Ensure no firewall is blocking the communication. +[[remote-clusters-unreliable-network]] +===== Remote cluster connection is unreliable + +====== Symptom + +The local cluster can connect to the remote cluster, but the connection does +not work reliably. For example, some cross-cluster requests may succeed while +others report connection errors, time out, or appear to be stuck waiting for +the remote cluster to respond. + +When {es} detects that the remote cluster connection is not working, it will +report the following message in its logs: +[source,txt,subs=+quotes] +---- +[2023-06-28T16:36:47,264][INFO ][o.e.t.ClusterConnectionManager] [local-node] transport connection to [{my-remote#192.168.0.42:9443}{...}] closed by remote +---- +This message will also be logged if the node of the remote cluster to which +{es} is connected is shut down or restarted. + +Note that with some network configurations it could take minutes or hours for +the operating system to detect that a connection has stopped working. Until the +failure is detected and reported to {es}, requests involving the remote cluster +may time out or may appear to be stuck. + +====== Resolution + +* Ensure that the network between the clusters is as reliable as possible. + +* Ensure that the network is configured to permit <>. + +* Ensure that the network is configured to detect faulty connections quickly. + In particular, you must enable and fully support TCP keepalives, and set a + short <>. + +* On Linux systems, execute `ss -tonie` to verify the details of the + configuration of each network connection between the clusters. + +* If the problems persist, capture network packets at both ends of the + connection and analyse the traffic to look for delays and lost messages. + [[remote-clusters-troubleshooting-tls-trust]] ===== TLS trust not established diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index e3c8da281f2a1..05c97d51a38e7 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -7,6 +7,8 @@ This section summarizes the changes in each release. * <> +* <> +* <> * <> * <> * <> @@ -63,6 +65,8 @@ This section summarizes the changes in each release. -- include::release-notes/8.14.0.asciidoc[] +include::release-notes/8.13.2.asciidoc[] +include::release-notes/8.13.1.asciidoc[] include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.2.asciidoc[] include::release-notes/8.12.1.asciidoc[] diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 47855773d0543..bcb533049f27d 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -7,6 +7,9 @@ Also see <>. [float] === Known issues +* Due to a bug in the bundled JDK 22 nodes might crash abruptly under high memory pressure. + We recommend <> asap to mitigate the issue. + * Nodes upgraded to 8.13.0 fail to load downsampling persistent tasks. This prevents them from joining the cluster, blocking its upgrade (issue: {es-issue}106880[#106880]) + This affects clusters running version 8.10 or later, with an active downsampling @@ -386,7 +389,7 @@ Security:: Snapshot/Restore:: * Add s3 `HeadObject` request to request stats {es-pull}105105[#105105] -* Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs {es-pull}105044[#105044] +* Expose `OperationPurpose` in S3 access logs using a https://docs.aws.amazon.com/AmazonS3/latest/userguide/LogFormat.html#LogFormatCustom[custom query-string parameter] {es-pull}105044[#105044] * Fix blob cache race, decay, time dependency {es-pull}104784[#104784] * Pause shard snapshots on graceful shutdown {es-pull}101717[#101717] * Retry indefinitely for s3 indices blob read errors {es-pull}103300[#103300] diff --git a/docs/reference/release-notes/8.13.1.asciidoc b/docs/reference/release-notes/8.13.1.asciidoc new file mode 100644 index 0000000000000..9f5f34d27eb79 --- /dev/null +++ b/docs/reference/release-notes/8.13.1.asciidoc @@ -0,0 +1,33 @@ +[[release-notes-8.13.1]] +== {es} version 8.13.1 + +Also see <>. + +[[bug-8.13.1]] +[float] +=== Bug fixes + +Aggregations:: +* Add test to exercise reduction of terms aggregation order by key {es-pull}106799[#106799] + +Downsampling:: +* Gate reading of optional string array for bwc {es-pull}106878[#106878] + +Machine Learning:: +* Fix Array out of bounds exception in the XLM Roberta tokenizer {es-pull}106655[#106655] + +Search:: +* Fix concurrency bug in `AbstractStringScriptFieldAutomatonQuery` {es-pull}106678[#106678] (issue: {es-issue}105911[#105911]) +* Fix the background set of significant terms aggregations in case the data is in different shards than the foreground set {es-pull}106564[#106564] + +Transform:: +* Fail checkpoint on missing clusters {es-pull}106793[#106793] (issues: {es-issue}104533[#104533], {es-issue}106790[#106790]) + +[[enhancement-8.13.1]] +[float] +=== Enhancements + +Transform:: +* Raise loglevel of events related to transform lifecycle from DEBUG to INFO {es-pull}106602[#106602] + + diff --git a/docs/reference/release-notes/8.13.2.asciidoc b/docs/reference/release-notes/8.13.2.asciidoc new file mode 100644 index 0000000000000..1842c4d959ecb --- /dev/null +++ b/docs/reference/release-notes/8.13.2.asciidoc @@ -0,0 +1,34 @@ +[[release-notes-8.13.2]] +== {es} version 8.13.2 + +Also see <>. + +[[bug-8.13.2]] +[float] +=== Bug fixes + +Aggregations:: +* Address concurrency issue in top hits aggregation {es-pull}106990[#106990] + +Application:: +* [Connector API] Support numeric for configuration select option value type {es-pull}107059[#107059] + +Downsampling:: +* Fix a downsample persistent task assignment bug {es-pull}106247[#106247] +* Fix downsample action request serialization {es-pull}106920[#106920] + +ES|QL:: +* ESQL: Fix fully pruned aggregates {es-pull}106673[#106673] (issue: {es-issue}106427[#106427]) + +Packaging:: +* Downgrade JDK to JDK 21.0.2 {es-pull}107137[#107137] (issue: {es-issue}106987[#106987]) + +[[enhancement-8.13.2]] +[float] +=== Enhancements + +Security:: +* Query API Key Information API support for the `typed_keys` request parameter {es-pull}106873[#106873] (issue: {es-issue}106817[#106817]) +* Query API Keys support for both `aggs` and `aggregations` keywords {es-pull}107054[#107054] (issue: {es-issue}106839[#106839]) + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 25096779521e4..8d9d743a239f5 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -31,46 +31,20 @@ endif::[] // tag::notable-highlights[] [discrete] -[[improve_storage_efficiency_for_non_metric_fields_in_tsdb]] -=== Improve storage efficiency for non-metric fields in TSDB -Adds a new `doc_values` encoding for non-metric fields in TSDB that takes advantage of TSDB's index sorting. -While terms that are used in multiple documents (such as the host name) are already stored only once in the terms dictionary, -there are a lot of repetitions in the references to the terms dictionary that are stored in `doc_values` (ordinals). -In TSDB, documents (and therefore `doc_values`) are implicitly sorted by dimenstions and timestamp. -This means that for each time series, we are storing long consecutive runs of the same ordinal. -With this change, we are introducing an encoding that detects and efficiently stores runs of the same value (such as `1 1 1 2 2 2 …`), -and runs of cycling values (such as `1 2 1 2 …`). -In our testing, we have seen a reduction in storage size by about 13%. -The effectiveness of this encoding depends on how many non-metric fields, such as dimensions, are used. -The more non-metric fields, the more effective this improvement will be. +[[add_global_retention_in_data_stream_lifecycle]] +=== Add global retention in data stream lifecycle +Data stream lifecycle now supports configuring retention on a cluster level, namely global retention. Global retention +allows us to configure two different retentions: -{es-pull}99747[#99747] +- `default_retention` is applied to all data streams managed by the data stream lifecycle that do not have retention +defined on the data stream level. +- `max_retention` is applied to all data streams managed by the data stream lifecycle and it allows any data stream +data to be deleted after the `max_retention` has passed. -[discrete] -[[ga_release_of_synonyms_api]] -=== GA Release of Synonyms API -Removes the beta label for the Synonyms API to make it GA. - -{es-pull}103223[#103223] - -[discrete] -[[flag_in_field_caps_to_return_only_fields_with_values_in_index]] -=== Flag in `_field_caps` to return only fields with values in index -We added support for filtering the field capabilities API output by removing -fields that don't have a value. This can be done through the newly added -`include_empty_fields` parameter, which defaults to true. - -{es-pull}103651[#103651] - -[discrete] -[[new_lucene_9_10_release]] -=== New Lucene 9.10 release -- https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. -- https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search -- https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. -- https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. +Furthermore, we introduce the term `effective_retention` which is the retention applied at a certain moment to a data +stream considering all the available retention configurations. -{es-pull}105578[#105578] +{es-pull}105682[#105682] // end::notable-highlights[] diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index d0fd4087c275c..39d9e90079bff 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1218,9 +1218,9 @@ tag::timeoutparms[] tag::master-timeout[] `master_timeout`:: (Optional, <>) -Period to wait for a connection to the master node. If no response is received -before the timeout expires, the request fails and returns an error. Defaults to -`30s`. +Period to wait for the master node. If the master node is not available before +the timeout expires, the request fails and returns an error. Defaults to `30s`. +Can also be set to `-1` to indicate that the request should never timeout. end::master-timeout[] tag::timeout[] diff --git a/docs/reference/rest-api/watcher/start.asciidoc b/docs/reference/rest-api/watcher/start.asciidoc index c565ca8693331..10811ac0b8617 100644 --- a/docs/reference/rest-api/watcher/start.asciidoc +++ b/docs/reference/rest-api/watcher/start.asciidoc @@ -27,10 +27,7 @@ information, see <>. [[watcher-api-start-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] //[[watcher-api-start-request-body]] //==== {api-request-body-title} diff --git a/docs/reference/rest-api/watcher/stop.asciidoc b/docs/reference/rest-api/watcher/stop.asciidoc index a981b4ccb0f69..c06090a3cd999 100644 --- a/docs/reference/rest-api/watcher/stop.asciidoc +++ b/docs/reference/rest-api/watcher/stop.asciidoc @@ -27,10 +27,7 @@ information, see <>. [[watcher-api-stop-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] //[[watcher-api-stop-request-body]] //==== {api-request-body-title} diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index b5619f8dda7b9..53abf0f0458af 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -6,9 +6,11 @@ The instructions in this tutorial shows you how to use the {infer} API with various services to perform semantic search on your data. The following examples -use Cohere's `embed-english-v3.0` model and OpenAI's `text-embedding-ada-002` -second generation embedding model. You can use any Cohere and OpenAI models, -they are all supported by the {infer} API. +use Cohere's `embed-english-v3.0` model, the `all-mpnet-base-v2` model from +HuggingFace, and OpenAI's `text-embedding-ada-002` second generation embedding +model. You can use any Cohere and OpenAI models, they are all supported by the +{infer} API. For a list of supported models available on HuggingFace, refer to +<>. Click the name of the service you want to use on any of the widgets below to review the corresponding instructions. @@ -91,7 +93,7 @@ GET _tasks/ // TEST[skip:TBD] You can also cancel the reindexing process if you don't want to wait until the -reindexing process is fully complete which might take hours: +reindexing process is fully complete which might take hours for large data sets: [source,console] ---- @@ -104,7 +106,7 @@ POST _tasks//_cancel [[infer-semantic-search]] ==== Semantic search -After the dataset has been enriched with the embeddings, you can query the data +After the data set has been enriched with the embeddings, you can query the data using {ref}/knn-search.html#knn-semantic-search[semantic search]. Pass a `query_vector_builder` to the k-nearest neighbor (kNN) vector search API, and provide the query text and the model you have used to create the embeddings. diff --git a/docs/reference/setup/secure-settings.asciidoc b/docs/reference/setup/secure-settings.asciidoc index cb88be94e17b6..04113c949122b 100644 --- a/docs/reference/setup/secure-settings.asciidoc +++ b/docs/reference/setup/secure-settings.asciidoc @@ -25,7 +25,11 @@ are node-specific settings that must have the same value on every node. Just like the settings values in `elasticsearch.yml`, changes to the keystore contents are not automatically applied to the running {es} node. Re-reading settings requires a node restart. However, certain secure settings are marked as -*reloadable*. Such settings can be <>. +*reloadable*. Such settings can be re-read and applied on a running node. + +You can define these settings before the node is started, +or call the <> +after the settings are defined to apply them to a running node. The values of all secure settings, *reloadable* or not, must be identical across all cluster nodes. After making the desired secure settings changes, diff --git a/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc index 7c17d776e5024..fab734e8413c3 100644 --- a/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc @@ -42,10 +42,7 @@ Name of the snapshot repository that both source and target snapshot belong to. [[clone-snapshot-api-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `timeout`:: (Optional, <>) Specifies the period of time to wait for @@ -55,4 +52,4 @@ fails and returns an error. Defaults to `30s`. `indices`:: (Required, string) A comma-separated list of indices to include in the snapshot. -<> is supported. \ No newline at end of file +<> is supported. diff --git a/docs/reference/snapshot-restore/apis/delete-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/delete-repo-api.asciidoc index 96000cb731a96..d3ebeeac3c036 100644 --- a/docs/reference/snapshot-restore/apis/delete-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/delete-repo-api.asciidoc @@ -51,10 +51,7 @@ supported. [[delete-snapshot-repo-api-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `timeout`:: (Optional, <>) Specifies the period of time to wait for diff --git a/docs/reference/snapshot-restore/apis/get-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/get-repo-api.asciidoc index ecc8d896a5b6c..1f03a44c5e49f 100644 --- a/docs/reference/snapshot-restore/apis/get-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/get-repo-api.asciidoc @@ -59,10 +59,7 @@ cluster, omit this parameter or use `*` or `_all`. only. If `false`, the request gets information from the master node. Defaults to `false`. -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[get-snapshot-repo-api-response-body]] diff --git a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc index a50d4e3311937..1154b970b907d 100644 --- a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc @@ -52,10 +52,7 @@ IMPORTANT: Several options for this API can be specified using a query parameter or a request body parameter. If both parameters are specified, only the query parameter is used. -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `timeout`:: (Optional, <>) Specifies the period of time to wait for diff --git a/docs/reference/snapshot-restore/apis/verify-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/verify-repo-api.asciidoc index c0af86df2aca8..f0fc659df1a41 100644 --- a/docs/reference/snapshot-restore/apis/verify-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/verify-repo-api.asciidoc @@ -47,10 +47,7 @@ Name of the snapshot repository to verify. [[verify-snapshot-repo-api-query-params]] ==== {api-query-parms-title} -`master_timeout`:: -(Optional, <>) Specifies the period of time to wait for -a connection to the master node. If no response is received before the timeout -expires, the request fails and returns an error. Defaults to `30s`. +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `timeout`:: (Optional, <>) Specifies the period of time to wait for diff --git a/docs/reference/snapshot-restore/repository-azure.asciidoc b/docs/reference/snapshot-restore/repository-azure.asciidoc index 35cf454906050..f3d04159bc025 100644 --- a/docs/reference/snapshot-restore/repository-azure.asciidoc +++ b/docs/reference/snapshot-restore/repository-azure.asciidoc @@ -6,8 +6,10 @@ You can use https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-i [[repository-azure-usage]] ==== Setup -To enable Azure repositories, you have first to define your azure storage settings as -{ref}/secure-settings.html[secure settings], before starting up the node: +To enable Azure repositories, you have first to define your Azure storage settings as +{ref}/secure-settings.html[secure settings]. + +You can define these settings before the node is started, or call the <> after the settings are defined to apply them to a running node. [source,sh] ---------------------------------------------------------------- diff --git a/docs/reference/snapshot-restore/repository-gcs.asciidoc b/docs/reference/snapshot-restore/repository-gcs.asciidoc index b359952715a73..551df223ba0d3 100644 --- a/docs/reference/snapshot-restore/repository-gcs.asciidoc +++ b/docs/reference/snapshot-restore/repository-gcs.asciidoc @@ -117,6 +117,10 @@ PUT _snapshot/my_gcs_repository // TEST[skip:we don't have gcs setup while testing this] The `credentials_file` settings are {ref}/secure-settings.html#reloadable-secure-settings[reloadable]. +You can define these settings before the node is started, +or call the <> +after the settings are defined to apply them to a running node. + After you reload the settings, the internal `gcs` clients, which are used to transfer the snapshot contents, utilize the latest settings from the keystore. diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 0c79793ee6c5a..9ee630c37eee2 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -56,7 +56,7 @@ For more information about creating and updating the {es} keystore, see {ref}/secure-settings.html[Secure settings]. For example, if you want to use specific credentials to access S3 then run the -following commands to add these credentials to the keystore: +following commands to add these credentials to the keystore. [source,sh] ---- @@ -80,8 +80,12 @@ bin/elasticsearch-keystore remove s3.client.default.session_token ---- *All* client secure settings of this repository type are -{ref}/secure-settings.html#reloadable-secure-settings[reloadable]. After you -reload the settings, the internal `s3` clients, used to transfer the snapshot +{ref}/secure-settings.html#reloadable-secure-settings[reloadable]. +You can define these settings before the node is started, +or call the <> +after the settings are defined to apply them to a running node. + +After you reload the settings, the internal `s3` clients, used to transfer the snapshot contents, will utilize the latest settings from the keystore. Any existing `s3` repositories, as well as any newly created ones, will pick up the new values stored in the keystore. @@ -541,13 +545,17 @@ MinIO-backed repositories as well as repositories stored on AWS S3. Other S3-compatible storage systems may also work with {es}, but these are not covered by the {es} test suite. -Note that some storage systems claim to be S3-compatible but do not faithfully -emulate S3's behaviour in full. The `repository-s3` type requires full -compatibility with S3. In particular it must support the same set of API -endpoints, return the same errors in case of failures, and offer consistency and -performance at least as good as S3 even when accessed concurrently by multiple -nodes. You will need to work with the supplier of your storage system to address -any incompatibilities you encounter. Please do not report {es} issues involving +There are many systems, including some from very well-known storage vendors, +which claim to offer an S3-compatible API despite failing to emulate S3's +behaviour in full. If you are using such a system for your snapshots, consider +using a <> based +on a standardized protocol such as NFS to access your storage system instead. +The `repository-s3` type requires full compatibility with S3. In particular it +must support the same set of API endpoints, with the same parameters, return +the same errors in case of failures, and offer consistency and performance at +least as good as S3 even when accessed concurrently by multiple nodes. You will +need to work with the supplier of your storage system to address any +incompatibilities you encounter. Please do not report {es} issues involving storage systems which claim to be S3-compatible unless you can demonstrate that the same issue exists when using a genuine AWS S3 repository. diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc index 44d2f60966caa..069dcb61f81b0 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc @@ -7,6 +7,12 @@ id="infer-api-ingest-cohere"> Cohere + + + + + +