From b84bd458b67ac457557f168d27e8aa519e90ae92 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Mon, 6 May 2024 09:18:17 -0400 Subject: [PATCH 001/117] [DOCS] clarify that the repo location setting accepts only one value (#108267) --- .../snapshot-restore/repository-shared-file-system.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc b/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc index be5347845a2fb..4387c2568c18c 100644 --- a/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc +++ b/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc @@ -33,7 +33,8 @@ in snapshots. Data files are not compressed. Defaults to `true`. (Required, string) Location of the shared filesystem used to store and retrieve snapshots. This location must be registered in the `path.repo` setting on all master and data -nodes in the cluster. +nodes in the cluster. +Unlike `path.repo`, this setting supports only a single file path. `max_number_of_snapshots`:: (Optional, integer) From 58729edc303ed5107649b1bd69dd899f13d0be23 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Mon, 6 May 2024 09:18:51 -0400 Subject: [PATCH 002/117] add gatekeeper workaround (#108265) --- docs/reference/setup/install/targz.asciidoc | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/reference/setup/install/targz.asciidoc b/docs/reference/setup/install/targz.asciidoc index 470299abe9ac1..d40a4bfdd7e74 100644 --- a/docs/reference/setup/install/targz.asciidoc +++ b/docs/reference/setup/install/targz.asciidoc @@ -48,6 +48,21 @@ WARNING: Version {version} of {es} has not yet been released. endif::[] +[IMPORTANT] +.macOS Gatekeeper warnings +==== +Apple's rollout of stricter notarization requirements affected the notarization of the {version} {es} artifacts. If macOS displays a dialog when you first run {es} that interrupts it, then you need to take an action to allow it to run. + +To prevent Gatekeeper checks on the {es} files, run the following command on the downloaded .tar.gz archive or the directory to which was extracted: + +[source,sh] +---- +xattr -d -r com.apple.quarantine +---- + +Alternatively, you can add a security override by following the instructions in the _If you want to open an app that hasn't been notarized or is from an unidentified developer_ section of https://support.apple.com/en-us/HT202491[Safely open apps on your Mac]. +==== + The MacOS archive for {es} v{version} can be downloaded and installed as follows: ["source","sh",subs="attributes"] From e604a055e7f6a094c233b92ebcc20df484238070 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 6 May 2024 15:33:35 +0200 Subject: [PATCH 003/117] Optimize FieldData#toString methods for single value fields (#108291) This commit create singleton instances when applicable in the FieldData#toString methods. --- .../index/fielddata/FieldData.java | 138 +++++++++++++- .../AbstractFieldDataImplTestCase.java | 2 + .../index/fielddata/GeoFieldDataTests.java | 2 + .../SortedNumericDoubleFieldDataTests.java | 178 ++++++++++++++++++ .../SortedNumericFieldDataTests.java | 158 ++++++++++++++++ 5 files changed, 477 insertions(+), 1 deletion(-) create mode 100644 server/src/test/java/org/elasticsearch/index/fielddata/SortedNumericDoubleFieldDataTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/fielddata/SortedNumericFieldDataTests.java diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/FieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/FieldData.java index 4c1fb5e1e1502..8a32e50473218 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/FieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/FieldData.java @@ -11,9 +11,11 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.geo.SpatialPoint; import java.io.IOException; @@ -254,6 +256,12 @@ public static boolean isMultiValued(SortedSetDocValues values) { * NOTE: this is very slow! */ public static SortedBinaryDocValues toString(final SortedNumericDocValues values) { + { + final NumericDocValues singleton = DocValues.unwrapSingleton(values); + if (singleton != null) { + return FieldData.singleton(toString(singleton)); + } + } return toString(new ToStringValues() { @Override public boolean advanceExact(int doc) throws IOException { @@ -269,12 +277,37 @@ public void get(List list) throws IOException { }); } + /** + * Return a {@link String} representation of the provided values. That is + * typically used for scripts or for the `map` execution mode of terms aggs. + * NOTE: this is very slow! + */ + public static BinaryDocValues toString(final NumericDocValues values) { + return toString(new ToStringValue() { + @Override + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + + @Override + public CharSequence get() throws IOException { + return Long.toString(values.longValue()); + } + }); + } + /** * Return a {@link String} representation of the provided values. That is * typically used for scripts or for the `map` execution mode of terms aggs. * NOTE: this is very slow! */ public static SortedBinaryDocValues toString(final SortedNumericDoubleValues values) { + { + final NumericDoubleValues singleton = FieldData.unwrapSingleton(values); + if (singleton != null) { + return FieldData.singleton(toString(singleton)); + } + } return toString(new ToStringValues() { @Override public boolean advanceExact(int doc) throws IOException { @@ -290,12 +323,37 @@ public void get(List list) throws IOException { }); } + /** + * Return a {@link String} representation of the provided values. That is + * typically used for scripts or for the `map` execution mode of terms aggs. + * NOTE: this is very slow! + */ + public static BinaryDocValues toString(final NumericDoubleValues values) { + return toString(new ToStringValue() { + @Override + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + + @Override + public CharSequence get() throws IOException { + return Double.toString(values.doubleValue()); + } + }); + } + /** * Return a {@link String} representation of the provided values. That is * typically used for scripts or for the `map` execution mode of terms aggs. * NOTE: this is slow! */ public static SortedBinaryDocValues toString(final SortedSetDocValues values) { + { + final SortedDocValues singleton = DocValues.unwrapSingleton(values); + if (singleton != null) { + return FieldData.singleton(toString(singleton)); + } + } return new SortedBinaryDocValues() { @Override @@ -312,7 +370,26 @@ public int docValueCount() { public BytesRef nextValue() throws IOException { return values.lookupOrd(values.nextOrd()); } + }; + } + + /** + * Return a {@link String} representation of the provided values. That is + * typically used for scripts or for the `map` execution mode of terms aggs. + * NOTE: this is slow! + */ + public static BinaryDocValues toString(final SortedDocValues values) { + return new AbstractBinaryDocValues() { + + @Override + public BytesRef binaryValue() throws IOException { + return values.lookupOrd(values.ordValue()); + } + @Override + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } }; } @@ -322,6 +399,12 @@ public BytesRef nextValue() throws IOException { * NOTE: this is very slow! */ public static SortedBinaryDocValues toString(final MultiGeoPointValues values) { + { + final GeoPointValues singleton = FieldData.unwrapSingleton(values); + if (singleton != null) { + return FieldData.singleton(toString(singleton)); + } + } return toString(new ToStringValues() { @Override public boolean advanceExact(int doc) throws IOException { @@ -337,6 +420,25 @@ public void get(List list) throws IOException { }); } + /** + * Return a {@link String} representation of the provided values. That is + * typically used for scripts or for the `map` execution mode of terms aggs. + * NOTE: this is very slow! + */ + public static BinaryDocValues toString(final GeoPointValues values) { + return toString(new ToStringValue() { + @Override + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + + @Override + public CharSequence get() throws IOException { + return values.pointValue().toString(); + } + }); + } + private static SortedBinaryDocValues toString(final ToStringValues toStringValues) { return new SortingBinaryDocValues() { @@ -362,6 +464,27 @@ public boolean advanceExact(int docID) throws IOException { }; } + private static BinaryDocValues toString(final ToStringValue toStringValue) { + return new AbstractBinaryDocValues() { + private final BytesRefBuilder builder = new BytesRefBuilder(); + + @Override + public BytesRef binaryValue() { + return builder.toBytesRef(); + } + + @Override + public boolean advanceExact(int docID) throws IOException { + if (toStringValue.advanceExact(docID)) { + builder.clear(); + builder.copyChars(toStringValue.get()); + return true; + } + return false; + } + }; + } + private interface ToStringValues { /** @@ -370,11 +493,24 @@ private interface ToStringValues { */ boolean advanceExact(int doc) throws IOException; - /** Fill the list of charsquences with the list of values for the current document. */ + /** Fill the list of {@link CharSequence} with the list of values for the current document. */ void get(List values) throws IOException; } + private interface ToStringValue { + + /** + * Advance this instance to the given document id + * @return true if there is a value for this document + */ + boolean advanceExact(int doc) throws IOException; + + /** return the {@link CharSequence} for the current document. */ + CharSequence get() throws IOException; + + } + private static class DoubleCastedValues extends NumericDoubleValues { private final NumericDocValues values; diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index 68671f73372ba..8b6644b382bac 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -91,6 +91,7 @@ public void testSingleValueAllSet() throws Exception { assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(minRamBytesUsed())); SortedBinaryDocValues bytesValues = fieldData.getBytesValues(); + assertNotNull(FieldData.unwrapSingleton(bytesValues)); assertTrue(bytesValues.advanceExact(0)); assertThat(bytesValues.docValueCount(), equalTo(1)); @@ -183,6 +184,7 @@ public void testMultiValueAllSet() throws Exception { assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(minRamBytesUsed())); SortedBinaryDocValues bytesValues = fieldData.getBytesValues(); + assertNull(FieldData.unwrapSingleton(bytesValues)); assertValues(bytesValues, 0, two(), four()); assertValues(bytesValues, 1, one()); assertValues(bytesValues, 2, three()); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java index 9200aa0c236d9..8c14be344b48a 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java @@ -150,6 +150,7 @@ public void testSingleValueAllSet() throws Exception { assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(minRamBytesUsed())); MultiGeoPointValues fieldValues = ((LeafGeoPointFieldData) fieldData).getPointValues(); + assertNotNull(FieldData.unwrapSingleton(fieldValues)); assertValues(fieldValues, 0); assertValues(fieldValues, 1); assertValues(fieldValues, 2); @@ -182,6 +183,7 @@ public void testMultiValueAllSet() throws Exception { assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(minRamBytesUsed())); MultiGeoPointValues fieldValues = ((LeafGeoPointFieldData) fieldData).getPointValues(); + assertNull(FieldData.unwrapSingleton(fieldValues)); assertValues(fieldValues, 0); assertValues(fieldValues, 1); assertValues(fieldValues, 2); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/SortedNumericDoubleFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/SortedNumericDoubleFieldDataTests.java new file mode 100644 index 0000000000000..662b8bd5fd5ec --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/fielddata/SortedNumericDoubleFieldDataTests.java @@ -0,0 +1,178 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.fielddata; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.DoubleField; +import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.Term; +import org.apache.lucene.util.BytesRef; + +public class SortedNumericDoubleFieldDataTests extends AbstractFieldDataImplTestCase { + private void addField(Document d, String name, String value) { + d.add(new StringField(name, value, Store.YES)); + d.add(new SortedSetDocValuesField(name, new BytesRef(value))); + } + + private void addField(Document d, String name, double value) { + d.add(new DoubleField(name, value, Store.NO)); + } + + @Override + protected String one() { + return "1.0"; + } + + @Override + protected String two() { + return "2.0"; + } + + @Override + protected String three() { + return "3.0"; + } + + @Override + protected String four() { + return "4.0"; + } + + @Override + protected void fillSingleValueAllSet() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2.0); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 1.0); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "3"); + addField(d, "value", 3.0); + writer.addDocument(d); + } + + @Override + protected void add2SingleValuedDocumentsAndDeleteOneOfThem() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2.0); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + addField(d, "value", 4.0); + writer.addDocument(d); + + writer.commit(); + + writer.deleteDocuments(new Term("_id", "1")); + } + + @Override + protected void fillSingleValueWithMissing() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2.0); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + // d.add(new StringField("value", one(), Field.Store.NO)); // MISSING.... + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "3"); + addField(d, "value", 3.0); + writer.addDocument(d); + } + + @Override + protected void fillMultiValueAllSet() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2.0); + addField(d, "value", 4.0); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + addField(d, "value", 1.0); + writer.addDocument(d); + writer.commit(); // TODO: Have tests with more docs for sorting + + d = new Document(); + addField(d, "_id", "3"); + addField(d, "value", 3.0); + writer.addDocument(d); + } + + @Override + protected void fillMultiValueWithMissing() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2.0); + addField(d, "value", 4.0); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + // d.add(new StringField("value", one(), Field.Store.NO)); // MISSING + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "3"); + addField(d, "value", 3.0); + writer.addDocument(d); + } + + @Override + protected void fillAllMissing() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "3"); + writer.addDocument(d); + } + + @Override + protected void fillExtendedMvSet() { + throw new UnsupportedOperationException(); + } + + @Override + protected String getFieldDataType() { + return "double"; + } + + protected boolean hasDocValues() { + return true; + } + + protected long minRamBytesUsed() { + // minimum number of bytes that this fielddata instance is expected to require + return 0L; + } + + public void testSortMultiValuesFields() { + assumeTrue("Does not apply for Numeric double doc values", false); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/SortedNumericFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/SortedNumericFieldDataTests.java new file mode 100644 index 0000000000000..aae3778d805a9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/fielddata/SortedNumericFieldDataTests.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.fielddata; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.LongField; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.Term; +import org.apache.lucene.util.BytesRef; + +public class SortedNumericFieldDataTests extends AbstractFieldDataImplTestCase { + private void addField(Document d, String name, String value) { + d.add(new StringField(name, value, Store.YES)); + d.add(new SortedSetDocValuesField(name, new BytesRef(value))); + } + + private void addField(Document d, String name, Long value) { + d.add(new LongField(name, value, Store.NO)); + } + + @Override + protected void fillSingleValueAllSet() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2L); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 1L); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "3"); + addField(d, "value", 3L); + writer.addDocument(d); + } + + @Override + protected void add2SingleValuedDocumentsAndDeleteOneOfThem() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2L); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + addField(d, "value", 4L); + writer.addDocument(d); + + writer.commit(); + + writer.deleteDocuments(new Term("_id", "1")); + } + + @Override + protected void fillSingleValueWithMissing() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2L); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + // d.add(new StringField("value", one(), Field.Store.NO)); // MISSING.... + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "3"); + addField(d, "value", 3L); + writer.addDocument(d); + } + + @Override + protected void fillMultiValueAllSet() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2L); + addField(d, "value", 4L); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + addField(d, "value", 1L); + writer.addDocument(d); + writer.commit(); // TODO: Have tests with more docs for sorting + + d = new Document(); + addField(d, "_id", "3"); + addField(d, "value", 3L); + writer.addDocument(d); + } + + @Override + protected void fillMultiValueWithMissing() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + addField(d, "value", 2L); + addField(d, "value", 4L); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + // d.add(new StringField("value", one(), Field.Store.NO)); // MISSING + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "3"); + addField(d, "value", 3L); + writer.addDocument(d); + } + + @Override + protected void fillAllMissing() throws Exception { + Document d = new Document(); + addField(d, "_id", "1"); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "2"); + writer.addDocument(d); + + d = new Document(); + addField(d, "_id", "3"); + writer.addDocument(d); + } + + @Override + protected void fillExtendedMvSet() { + throw new UnsupportedOperationException(); + } + + @Override + protected String getFieldDataType() { + return "long"; + } + + protected boolean hasDocValues() { + return true; + } + + protected long minRamBytesUsed() { + // minimum number of bytes that this fielddata instance is expected to require + return 0L; + } + + public void testSortMultiValuesFields() { + assumeTrue("Does not apply for Numeric doc values", false); + } +} From 1be1110740c6b8331ea776d4715e51ce11fa4486 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Mon, 6 May 2024 15:52:25 +0200 Subject: [PATCH 004/117] [DOCS] Clarify `retriever` is not API (#108295) --- docs/reference/search/retriever.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 6301f439e9b5b..c47ccd60afc05 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -1,5 +1,5 @@ [[retriever]] -=== Retriever API +=== Retriever preview::["This functionality is in technical preview and may be changed or removed in a future release. The syntax will likely change before GA. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] From 609f5f8015c899c8269cf5f2d66b021b8b943aa3 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Mon, 6 May 2024 15:57:08 +0200 Subject: [PATCH 005/117] Extend timeout of assertBusy in DeprecationHttpIT for remaining places (#108229) --- .../elasticsearch/xpack/deprecation/DeprecationHttpIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java index a0ce8b628e662..a59ab89512b00 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java @@ -140,7 +140,7 @@ public void testDeprecatedSettingsReturnWarnings() throws Exception { List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); assertThat(documents, hasSize(2)); - }); + }, 30, TimeUnit.SECONDS); } finally { cleanupSettings(); } @@ -260,7 +260,7 @@ private void doTestDeprecationWarningsAppearInHeaders(String xOpaqueId) throws E var documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId); logger.warn(documents); assertThat(documents, hasSize(headerMatchers.size())); - }); + }, 30, TimeUnit.SECONDS); } public void testDeprecationRouteThrottling() throws Exception { From c89de11e57afcf53fa13dbf5936536c5fdfdd84f Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 6 May 2024 16:01:21 +0200 Subject: [PATCH 006/117] Optimise frequent item sets aggregation for single value fields (#108130) Similar to #107832, this commit optimize requent item sets aggregation for single value fields. --- docs/changelog/108130.yaml | 5 + .../mr/ItemSetMapReduceValueSource.java | 101 ++++++++++++------ 2 files changed, 76 insertions(+), 30 deletions(-) create mode 100644 docs/changelog/108130.yaml diff --git a/docs/changelog/108130.yaml b/docs/changelog/108130.yaml new file mode 100644 index 0000000000000..5b431bdb0cc1b --- /dev/null +++ b/docs/changelog/108130.yaml @@ -0,0 +1,5 @@ +pr: 108130 +summary: Optimise frequent item sets aggregation for single value fields +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java index 8a7d2afa958d9..c9ec772eb3321 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java @@ -9,7 +9,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; @@ -19,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; @@ -204,15 +209,19 @@ static class GlobalOrdinalsStrategy implements ExecutionStrategy { @Override public ValueCollector getValueCollector(LeafReaderContext ctx) throws IOException { this.docValues = source.globalOrdinalsValues(ctx); - ; - final Tuple> empty = new Tuple<>(field, Collections.emptyList()); final SortedSetDocValues values = this.docValues; + final SortedDocValues singleton = DocValues.unwrapSingleton(values); + final Tuple> empty = new Tuple<>(field, Collections.emptyList()); + return singleton != null ? getValueCollector(singleton, empty) : getValueCollector(values, empty); + } + + private ValueCollector getValueCollector(SortedSetDocValues values, Tuple> empty) { return doc -> { if (values.advanceExact(doc)) { - int valuesCount = values.docValueCount(); + final int valuesCount = values.docValueCount(); if (valuesCount == 1) { - long v = values.nextOrd(); + final long v = values.nextOrd(); assert v >= 0; if (bitSetFilter == null || bitSetFilter.get(v)) { return new Tuple<>(field, Collections.singletonList(v)); @@ -220,14 +229,9 @@ public ValueCollector getValueCollector(LeafReaderContext ctx) throws IOExceptio return empty; } - if (valuesCount == 0) { - return empty; - } - - List objects = new ArrayList<>(valuesCount); - + final List objects = new ArrayList<>(valuesCount); for (int i = 0; i < valuesCount; ++i) { - long v = values.nextOrd(); + final long v = values.nextOrd(); assert v >= 0; if (bitSetFilter == null || bitSetFilter.get(v)) { objects.add(v); @@ -239,6 +243,20 @@ public ValueCollector getValueCollector(LeafReaderContext ctx) throws IOExceptio }; } + private ValueCollector getValueCollector(SortedDocValues values, Tuple> empty) { + return doc -> { + if (values.advanceExact(doc)) { + final long v = values.ordValue(); + assert v >= 0; + if (bitSetFilter == null || bitSetFilter.get(v)) { + return new Tuple<>(field, Collections.singletonList(v)); + } + return empty; + } + return empty; + }; + } + @Override public boolean usesOrdinals() { return true; @@ -265,28 +283,27 @@ static class MapStrategy implements ExecutionStrategy { @Override public ValueCollector getValueCollector(LeafReaderContext ctx) throws IOException { final SortedBinaryDocValues values = source.bytesValues(ctx); + final BinaryDocValues singleton = FieldData.unwrapSingleton(values); final Tuple> empty = new Tuple<>(field, Collections.emptyList()); + return singleton != null ? getValueCollector(singleton, empty) : getValueCollector(values, empty); + } + private ValueCollector getValueCollector(SortedBinaryDocValues values, Tuple> empty) { return doc -> { if (values.advanceExact(doc)) { - int valuesCount = values.docValueCount(); + final int valuesCount = values.docValueCount(); if (valuesCount == 1) { - BytesRef v = values.nextValue(); + final BytesRef v = values.nextValue(); if (stringFilter == null || stringFilter.accept(v)) { return new Tuple<>(field, Collections.singletonList(BytesRef.deepCopyOf(v))); } return empty; } - if (valuesCount == 0) { - return empty; - } - - List objects = new ArrayList<>(valuesCount); - + final List objects = new ArrayList<>(valuesCount); for (int i = 0; i < valuesCount; ++i) { - BytesRef v = values.nextValue(); + final BytesRef v = values.nextValue(); if (stringFilter == null || stringFilter.accept(v)) { objects.add(BytesRef.deepCopyOf(v)); } @@ -297,6 +314,19 @@ public ValueCollector getValueCollector(LeafReaderContext ctx) throws IOExceptio }; } + private ValueCollector getValueCollector(BinaryDocValues values, Tuple> empty) { + return doc -> { + if (values.advanceExact(doc)) { + final BytesRef v = values.binaryValue(); + if (stringFilter == null || stringFilter.accept(v)) { + return new Tuple<>(field, Collections.singletonList(BytesRef.deepCopyOf(v))); + } + return empty; + } + return empty; + }; + } + @Override public boolean usesOrdinals() { return false; @@ -374,29 +404,28 @@ public NumericValueSource( @Override ValueCollector getValueCollector(LeafReaderContext ctx) throws IOException { final SortedNumericDocValues values = source.longValues(ctx); + final NumericDocValues singleton = DocValues.unwrapSingleton(values); final Field field = getField(); final Tuple> empty = new Tuple<>(field, Collections.emptyList()); + return singleton != null ? getValueCollector(singleton, empty, field) : getValueCollector(values, empty, field); + } + private ValueCollector getValueCollector(SortedNumericDocValues values, Tuple> empty, Field field) { return doc -> { if (values.advanceExact(doc)) { - int valuesCount = values.docValueCount(); + final int valuesCount = values.docValueCount(); if (valuesCount == 1) { - long v = values.nextValue(); + final long v = values.nextValue(); if (longFilter == null || longFilter.accept(v)) { - return new Tuple<>(getField(), Collections.singletonList(v)); + return new Tuple<>(field, Collections.singletonList(v)); } return empty; } - if (valuesCount == 0) { - return empty; - } - - List objects = new ArrayList<>(valuesCount); - + final List objects = new ArrayList<>(valuesCount); for (int i = 0; i < valuesCount; ++i) { - long v = values.nextValue(); + final long v = values.nextValue(); if (longFilter == null || longFilter.accept(v)) { objects.add(v); } @@ -407,5 +436,17 @@ ValueCollector getValueCollector(LeafReaderContext ctx) throws IOException { }; } + private ValueCollector getValueCollector(NumericDocValues values, Tuple> empty, Field field) { + return doc -> { + if (values.advanceExact(doc)) { + final long v = values.longValue(); + if (longFilter == null || longFilter.accept(v)) { + return new Tuple<>(field, Collections.singletonList(v)); + } + return empty; + } + return empty; + }; + } } } From 31afff92f87da18a284b7d4b66089c09769eccb6 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Mon, 6 May 2024 16:02:14 +0200 Subject: [PATCH 007/117] Invalidate cross cluster API key docs (#108297) This PR documents privilege requirements for cross-cluster API key invalidation, which were updated in https://github.com/elastic/elasticsearch/pull/107411. --- .../rest-api/security/invalidate-api-keys.asciidoc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/reference/rest-api/security/invalidate-api-keys.asciidoc b/docs/reference/rest-api/security/invalidate-api-keys.asciidoc index e4cc91000c9c9..57a36a97634ac 100644 --- a/docs/reference/rest-api/security/invalidate-api-keys.asciidoc +++ b/docs/reference/rest-api/security/invalidate-api-keys.asciidoc @@ -15,9 +15,10 @@ Invalidates one or more API keys. [[security-api-invalidate-api-key-prereqs]] ==== {api-prereq-title} -* To use this API, you must have at least the `manage_api_key` or the `manage_own_api_key` cluster privilege. -The `manage_api_key` privilege allows deleting any API keys. -The `manage_own_api_key` only allows deleting API keys that are owned by the user. +* To use this API, you must have at least the `manage_security`, `manage_api_key`, or `manage_own_api_key` cluster privilege. +The `manage_security` privilege allows deleting any API key, including both REST and <>. +The `manage_api_key` privilege allows deleting any REST API key, but not cross cluster API keys. +The `manage_own_api_key` only allows deleting REST API keys owned by the user. In addition, with the `manage_own_api_key` privilege, an invalidation request must be issued in one of the three formats: 1. Set the parameter `owner=true` From 8c49dd20205a9861d68af598c49812b2f14242f2 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Mon, 6 May 2024 16:52:27 +0200 Subject: [PATCH 008/117] Make ip.ImplictCastingEqual test reliable (#108314) This adds a sort to a test to make the results reliable. --- .../plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index f987b27e4737a..8d3c0c9186c6c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -435,7 +435,7 @@ fe80::cae2:65ff:fece:feb9 | gamma implictCastingEqual required_feature: esql.string_literal_auto_casting_extended -from hosts | where mv_first(ip0) == "127.0.0.1" | keep host, ip0; +from hosts | where mv_first(ip0) == "127.0.0.1" | keep host, ip0 | sort host; host:keyword | ip0:ip alpha | 127.0.0.1 From 3913b6f72034e25dda364de9c204f88c89d98242 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 6 May 2024 07:54:27 -0700 Subject: [PATCH 009/117] Fix random limit in AsyncOperatorTests (#108289) Adjust the lower bound to include the case where the number of positions is zero. Closes #107847 --- .../org/elasticsearch/compute/operator/AsyncOperatorTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index 9ff04a04f9eb3..ae4558d5f8f71 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -127,7 +127,7 @@ public void doClose() { intermediateOperators.add(asyncOperator); final Iterator it; if (randomBoolean()) { - int limit = between(1, ids.size()); + int limit = between(0, ids.size()); it = ids.subList(0, limit).iterator(); intermediateOperators.add(new LimitOperator(limit)); } else { From 089fd7d7da788d724f4e59a52dd5dba025f3d5d9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 6 May 2024 11:06:50 -0400 Subject: [PATCH 010/117] ESQL: Rework integration-only csv testing (#108313) This reworks the integration-test-only csv testing for `metadata` to use the `required_feature:` syntax instead of the `-IT_tests_only` extension. This is a little more flexible and way nicer on the eyes. --- docs/reference/esql/metadata-fields.asciidoc | 8 ++-- ..._tests_only.csv-spec => metadata.csv-spec} | 47 ++++++++++++------- .../xpack/esql/plugin/EsqlFeatures.java | 8 +++- .../elasticsearch/xpack/esql/CsvTests.java | 9 ++-- 4 files changed, 47 insertions(+), 25 deletions(-) rename x-pack/plugin/esql/qa/testFixtures/src/main/resources/{metadata-IT_tests_only.csv-spec => metadata.csv-spec} (79%) diff --git a/docs/reference/esql/metadata-fields.asciidoc b/docs/reference/esql/metadata-fields.asciidoc index f06c9cad26f12..c4a416a593145 100644 --- a/docs/reference/esql/metadata-fields.asciidoc +++ b/docs/reference/esql/metadata-fields.asciidoc @@ -34,11 +34,11 @@ like other index fields: [source.merge.styled,esql] ---- -include::{esql-specs}/metadata-IT_tests_only.csv-spec[tag=multipleIndices] +include::{esql-specs}/metadata.csv-spec[tag=multipleIndices] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/metadata-IT_tests_only.csv-spec[tag=multipleIndices-result] +include::{esql-specs}/metadata.csv-spec[tag=multipleIndices-result] |=== Similar to index fields, once an aggregation is performed, a @@ -47,9 +47,9 @@ used as a grouping field: [source.merge.styled,esql] ---- -include::{esql-specs}/metadata-IT_tests_only.csv-spec[tag=metaIndexInAggs] +include::{esql-specs}/metadata.csv-spec[tag=metaIndexInAggs] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/metadata-IT_tests_only.csv-spec[tag=metaIndexInAggs-result] +include::{esql-specs}/metadata.csv-spec[tag=metaIndexInAggs-result] |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec similarity index 79% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-IT_tests_only.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec index 0e970cccd3ddf..bcb9718048085 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec @@ -1,5 +1,5 @@ - -simpleKeep#[skip:-8.12.99] +simpleKeep +required_feature: esql.metadata_fields from employees metadata _index, _version | sort emp_no | limit 2 | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long @@ -7,7 +7,8 @@ emp_no:integer |_index:keyword |_version:long 10002 |employees |1 ; -aliasWithSameName#[skip:-8.12.99] +aliasWithSameName +required_feature: esql.metadata_fields from employees metadata _index, _version | sort emp_no | limit 2 | eval _index = _index, _version = _version | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long @@ -15,16 +16,17 @@ emp_no:integer |_index:keyword |_version:long 10002 |employees |1 ; -inComparison#[skip:-8.12.99] +inComparison +required_feature: esql.metadata_fields from employees metadata _index, _version | sort emp_no | where _index == "employees" | where _version == 1 | keep emp_no | limit 2; - emp_no:integer 10001 10002 ; -metaIndexInAggs#[skip:-8.12.99] +metaIndexInAggs +required_feature: esql.metadata_fields // tag::metaIndexInAggs[] FROM employees METADATA _index, _id | STATS max = MAX(emp_no) BY _index @@ -37,7 +39,8 @@ max:integer |_index:keyword // end::metaIndexInAggs-result[] ; -metaIndexAliasedInAggs#[skip:-8.12.99] +metaIndexAliasedInAggs +required_feature: esql.metadata_fields from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i; @@ -45,35 +48,40 @@ max:integer |_i:keyword 10100 |employees ; -metaVersionInAggs#[skip:-8.12.99] +metaVersionInAggs +required_feature: esql.metadata_fields from employees metadata _version | stats min = min(emp_no) by _version; min:integer |_version:long 10001 |1 ; -metaVersionAliasedInAggs#[skip:-8.12.99] +metaVersionAliasedInAggs +required_feature: esql.metadata_fields from employees metadata _version | eval _v = _version | stats min = min(emp_no) by _v; min:integer |_v:long 10001 |1 ; -inAggsAndAsGroups#[skip:-8.12.99] +inAggsAndAsGroups +required_feature: esql.metadata_fields from employees metadata _index, _version | stats max = max(_version) by _index; max:long |_index:keyword 1 |employees ; -inAggsAndAsGroupsAliased#[skip:-8.12.99] +inAggsAndAsGroupsAliased +required_feature: esql.metadata_fields from employees metadata _index, _version | eval _i = _index, _v = _version | stats max = max(_v) by _i; max:long |_i:keyword 1 |employees ; -inFunction#[skip:-8.12.99] +inFunction +required_feature: esql.metadata_fields from employees metadata _index, _version | sort emp_no | where length(_index) == length("employees") | where abs(_version) == 1 | keep emp_no | limit 2; emp_no:integer @@ -81,14 +89,16 @@ emp_no:integer 10002 ; -inArithmetics#[skip:-8.12.99] +inArithmetics +required_feature: esql.metadata_fields from employees metadata _index, _version | eval i = _version + 2 | stats min = min(emp_no) by i; min:integer |i:long 10001 |3 ; -inSort#[skip:-8.12.99] +inSort +required_feature: esql.metadata_fields from employees metadata _index, _version | sort _version, _index, emp_no | keep emp_no, _version, _index | limit 2; emp_no:integer |_version:long |_index:keyword @@ -96,14 +106,16 @@ emp_no:integer |_version:long |_index:keyword 10002 |1 |employees ; -withMvFunction#[skip:-8.12.99] +withMvFunction +required_feature: esql.metadata_fields from employees metadata _version | eval i = mv_avg(_version) + 2 | stats min = min(emp_no) by i; min:integer |i:double 10001 |3.0 ; -overwritten#[skip:-8.12.99] +overwritten +required_feature: esql.metadata_fields from employees metadata _index, _version | sort emp_no | eval _index = 3, _version = "version" | keep emp_no, _index, _version | limit 3; emp_no:integer |_index:integer |_version:keyword @@ -112,7 +124,8 @@ emp_no:integer |_index:integer |_version:keyword 10003 |3 |version ; -multipleIndices#[skip:-8.12.99] +multipleIndices +required_feature: esql.metadata_fields // tag::multipleIndices[] FROM ul_logs, apps METADATA _index, _version | WHERE id IN (13, 14) AND _version == 1 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 3a8a34b54ee7a..f6b534f7316df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -131,6 +131,11 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature STRING_LITERAL_AUTO_CASTING_EXTENDED = new NodeFeature("esql.string_literal_auto_casting_extended"); + /** + * Support for metadata fields. + */ + public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); + @Override public Set getFeatures() { return Set.of( @@ -151,7 +156,8 @@ public Set getFeatures() { CASTING_OPERATOR, MV_ORDERING_SORTED_ASCENDING, METRICS_COUNTER_FIELDS, - STRING_LITERAL_AUTO_CASTING_EXTENDED + STRING_LITERAL_AUTO_CASTING_EXTENDED, + METADATA_FIELDS ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 06a9319079087..c865b21723a9e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -73,6 +73,7 @@ import org.elasticsearch.xpack.esql.planner.Mapper; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.planner.TestPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.plugin.EsqlFeatures; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.DisabledSearchStats; @@ -216,11 +217,13 @@ public CsvTests(String fileName, String groupName, String testName, Integer line public final void test() throws Throwable { try { + assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, Version.CURRENT)); + /* - * We're intentionally not NodeFeatures here because we expect all - * of the features to be supported in this unit test. + * The csv tests support all but a few features. The unsupported features + * are tested in integration tests. */ - assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, Version.CURRENT)); + assumeFalse("metadata fields aren't supported", testCase.requiredFeatures.contains(EsqlFeatures.METADATA_FIELDS.id())); doTest(); } catch (Throwable th) { throw reworkException(th); From 0378a77b43f4b69d41dd5c60f218c512437394c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Mon, 6 May 2024 17:18:52 +0200 Subject: [PATCH 011/117] Fix role descriptor's hashCode, equals and isEmpty implementations (#108255) Added missing `remoteClusterPermissions` to `hashCode`, `equals` and `isEmpty` implementations. Resolves https://github.com/elastic/elasticsearch/issues/108253, https://github.com/elastic/elasticsearch/issues/108285 Note: Marking as `>non-issue` since the PR that introduced `remoteClusterPermissions` is not released yet. --- .../xpack/core/security/authz/RoleDescriptor.java | 3 +++ .../permission/RemoteClusterPermissionGroup.java | 10 ++++------ .../xpack/core/security/support/StringMatcher.java | 13 ------------- .../action/user/GetUserPrivilegesResponseTests.java | 1 - 4 files changed, 7 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index d8a2900021783..caa5567364cd3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -357,6 +357,7 @@ public boolean equals(Object o) { if (metadata.equals(that.getMetadata()) == false) return false; if (Arrays.equals(runAs, that.runAs) == false) return false; if (Arrays.equals(remoteIndicesPrivileges, that.remoteIndicesPrivileges) == false) return false; + if (remoteClusterPermissions.equals(that.remoteClusterPermissions) == false) return false; return restriction.equals(that.restriction); } @@ -370,6 +371,7 @@ public int hashCode() { result = 31 * result + Arrays.hashCode(runAs); result = 31 * result + metadata.hashCode(); result = 31 * result + Arrays.hashCode(remoteIndicesPrivileges); + result = 31 * result + remoteClusterPermissions.hashCode(); result = 31 * result + restriction.hashCode(); return result; } @@ -382,6 +384,7 @@ public boolean isEmpty() { && runAs.length == 0 && metadata.size() == 0 && remoteIndicesPrivileges.length == 0 + && remoteClusterPermissions.groups().isEmpty() && restriction.isEmpty(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionGroup.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionGroup.java index 0f5a755e9fe01..1c34a7829fcbb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionGroup.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionGroup.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Objects; /** * Represents a group of permissions for a remote cluster. For example: @@ -107,15 +106,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RemoteClusterPermissionGroup that = (RemoteClusterPermissionGroup) o; - return Arrays.equals(clusterPrivileges, that.clusterPrivileges) - && Arrays.equals(remoteClusterAliases, that.remoteClusterAliases) - && Objects.equals(remoteClusterAliasMatcher, that.remoteClusterAliasMatcher); + // remoteClusterAliasMatcher property is intentionally omitted + return Arrays.equals(clusterPrivileges, that.clusterPrivileges) && Arrays.equals(remoteClusterAliases, that.remoteClusterAliases); } @Override public int hashCode() { - int result = Objects.hash(remoteClusterAliasMatcher); - result = 31 * result + Arrays.hashCode(clusterPrivileges); + // remoteClusterAliasMatcher property is intentionally omitted + int result = Arrays.hashCode(clusterPrivileges); result = 31 * result + Arrays.hashCode(remoteClusterAliases); return result; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/StringMatcher.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/StringMatcher.java index 5e3a39a6e16f5..ede11fe157487 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/StringMatcher.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/StringMatcher.java @@ -191,17 +191,4 @@ private static String getPatternsDescription(Collection patterns) { return description; } } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - StringMatcher that = (StringMatcher) o; - return Objects.equals(description, that.description) && Objects.equals(predicate, that.predicate); - } - - @Override - public int hashCode() { - return Objects.hash(description, predicate); - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java index 3d2ce2b3e6251..437f58449b4de 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/GetUserPrivilegesResponseTests.java @@ -66,7 +66,6 @@ public void testSerialization() throws IOException { assertThat(copy.getRemoteIndexPrivileges(), equalTo(original.getRemoteIndexPrivileges())); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108285") public void testSerializationForCurrentVersion() throws Exception { final TransportVersion version = TransportVersionUtils.randomCompatibleVersion(random()); final boolean canIncludeRemoteIndices = version.onOrAfter(TransportVersions.V_8_8_0); From d8d25ebdd756b243e37b5ac05651295cc3d69b7c Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 6 May 2024 11:19:02 -0400 Subject: [PATCH 012/117] ESQL: Log queries at debug level (#108257) Previously we were logging all ESQL queries. That's a lot! Plus maybe there's PII in there or something. Let's not do that unless you ask for it. This changes the query logging to the `debug` log level you can still get at these if you want them, but you don't have them by default. you have to turn it on. --- docs/changelog/108257.yaml | 5 ++ .../xpack/esql/qa/single_node/RestEsqlIT.java | 56 +++++++++++++++++++ .../esql/action/EsqlResponseListener.java | 18 ++++-- .../esql/action/RestEsqlAsyncQueryAction.java | 2 +- .../esql/action/RestEsqlQueryAction.java | 2 +- 5 files changed, 75 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/108257.yaml diff --git a/docs/changelog/108257.yaml b/docs/changelog/108257.yaml new file mode 100644 index 0000000000000..ce2c72353af82 --- /dev/null +++ b/docs/changelog/108257.yaml @@ -0,0 +1,5 @@ +pr: 108257 +summary: "ESQL: Log queries at debug level" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 4de2a0f565c71..4f43e54a82546 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -14,15 +14,18 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.LogType; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.ClassRule; import java.io.IOException; +import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.List; @@ -31,6 +34,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; import static org.hamcrest.core.Is.is; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) @@ -105,6 +109,58 @@ public void testPragmaNotAllowed() throws IOException { assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("[pragma] only allowed in snapshot builds")); } + public void testDoNotLogWithInfo() throws IOException { + try { + setLoggingLevel("INFO"); + RequestObjectBuilder builder = requestObjectBuilder().query("ROW DO_NOT_LOG_ME = 1"); + Map result = runEsql(builder); + assertEquals(2, result.size()); + Map colA = Map.of("name", "DO_NOT_LOG_ME", "type", "integer"); + assertEquals(List.of(colA), result.get("columns")); + assertEquals(List.of(List.of(1)), result.get("values")); + try (InputStream log = cluster.getNodeLog(0, LogType.SERVER)) { + Streams.readAllLines(log, line -> { assertThat(line, not(containsString("DO_NOT_LOG_ME"))); }); + } + } finally { + setLoggingLevel(null); + } + } + + public void testDoLogWithDebug() throws IOException { + try { + setLoggingLevel("DEBUG"); + RequestObjectBuilder builder = requestObjectBuilder().query("ROW DO_LOG_ME = 1"); + Map result = runEsql(builder); + assertEquals(2, result.size()); + Map colA = Map.of("name", "DO_LOG_ME", "type", "integer"); + assertEquals(List.of(colA), result.get("columns")); + assertEquals(List.of(List.of(1)), result.get("values")); + try (InputStream log = cluster.getNodeLog(0, LogType.SERVER)) { + boolean[] found = new boolean[] { false }; + Streams.readAllLines(log, line -> { + if (line.contains("DO_LOG_ME")) { + found[0] = true; + } + }); + assertThat(found[0], equalTo(true)); + } + } finally { + setLoggingLevel(null); + } + } + + private void setLoggingLevel(String level) throws IOException { + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity(""" + { + "persistent": { + "logger.org.elasticsearch.xpack.esql.action": $LEVEL$ + } + } + """.replace("$LEVEL$", level == null ? "null" : '"' + level + '"')); + client().performRequest(request); + } + public void testIncompatibleMappingsErrors() throws IOException { // create first index Request request = new Request("PUT", "/index1"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java index 0022866cf1742..3b6f612c658e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java @@ -154,13 +154,20 @@ private RestResponse buildResponse(EsqlQueryResponse esqlResponse) throws IOExce } /** - * Log the execution time and query when handling an ES|QL response. + * Log internal server errors all the time and log queries if debug is enabled. */ public ActionListener wrapWithLogging() { + ActionListener listener = ActionListener.wrap(this::onResponse, ex -> { + logOnFailure(LOGGER, ex); + onFailure(ex); + }); + if (LOGGER.isDebugEnabled() == false) { + return listener; + } return ActionListener.wrap(r -> { - onResponse(r); + listener.onResponse(r); // At this point, the StopWatch should already have been stopped, so we log a consistent time. - LOGGER.info( + LOGGER.debug( "Finished execution of ESQL query.\nQuery string: [{}]\nExecution time: [{}]ms", esqlQuery, stopWatch.stop().getMillis() @@ -168,9 +175,8 @@ public ActionListener wrapWithLogging() { }, ex -> { // In case of failure, stop the time manually before sending out the response. long timeMillis = stopWatch.stop().getMillis(); - LOGGER.info("Failed execution of ESQL query.\nQuery string: [{}]\nExecution time: [{}]ms", esqlQuery, timeMillis); - logOnFailure(LOGGER, ex); - onFailure(ex); + LOGGER.debug("Failed execution of ESQL query.\nQuery string: [{}]\nExecution time: [{}]ms", esqlQuery, timeMillis); + listener.onFailure(ex); }); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index 51baa900ce322..3f0289d49535a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -47,7 +47,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } RestEsqlQueryAction.defaultVersionForOldClients(esqlRequest, request); - LOGGER.info("Beginning execution of ESQL async query.\nQuery string: [{}]", esqlRequest.query()); + LOGGER.debug("Beginning execution of ESQL async query.\nQuery string: [{}]", esqlRequest.query()); return channel -> { RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index d8fbe4ae35c1d..97a7f8e0e9e7d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -47,7 +47,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } defaultVersionForOldClients(esqlRequest, request); - LOGGER.info("Beginning execution of ESQL query.\nQuery string: [{}]", esqlRequest.query()); + LOGGER.debug("Beginning execution of ESQL query.\nQuery string: [{}]", esqlRequest.query()); return channel -> { RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); From e6b43a17099eff099a05572ff0b2724485e54211 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 6 May 2024 09:05:55 -0700 Subject: [PATCH 013/117] Fix BlockHash DirectEncoder (#108283) The DirectEncoder currently returns the incorrect value for the positionCount() method, which should be the number of positions ready in the current batch. We need to keep track of whether a position is loaded via encodeNextBatch() and consumed via the read() method. However, we can always return 1 for positionCount(), indicating that one position is already loaded. Our tests failed to catch this because mv_ordering wasn't enabled when generating test blocks, effectively disabling the DirectEncoders. Closes #108268 --- docs/changelog/108283.yaml | 6 ++++++ .../compute/operator/mvdedupe/BatchEncoder.java | 4 ++-- .../compute/data/BasicBlockTests.java | 14 ++++++++++++++ 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/108283.yaml diff --git a/docs/changelog/108283.yaml b/docs/changelog/108283.yaml new file mode 100644 index 0000000000000..6341a8775b729 --- /dev/null +++ b/docs/changelog/108283.yaml @@ -0,0 +1,6 @@ +pr: 108283 +summary: Fix `BlockHash` `DirectEncoder` +area: ES|QL +type: bug +issues: + - 108268 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/BatchEncoder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/BatchEncoder.java index 8c584f441f646..5460210b688eb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/BatchEncoder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/BatchEncoder.java @@ -276,13 +276,13 @@ public final void encodeNextBatch() { @Override public final int positionCount() { - return Math.max(valueCount, 1); + return 1; // always has one position already loaded } @Override public final int valueCount(int positionOffset) { assert positionOffset == 0 : positionOffset; - return positionCount(); + return Math.max(valueCount, 1); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 6852cd52862b2..92c471ee9e6b7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -1011,6 +1011,7 @@ public static RandomBlock randomBlock( int maxDupsPerPosition ) { List> values = new ArrayList<>(); + Block.MvOrdering mvOrdering = Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING; try (var builder = elementType.newBlockBuilder(positionCount, blockFactory)) { boolean bytesRefFromPoints = randomBoolean(); Supplier pointSupplier = randomBoolean() ? GeometryTestUtils::randomPoint : ShapeTestUtils::randomPoint; @@ -1071,6 +1072,19 @@ public static RandomBlock randomBlock( if (valueCount != 1 || dupCount != 0) { builder.endPositionEntry(); } + if (dupCount > 0) { + mvOrdering = Block.MvOrdering.UNORDERED; + } else if (mvOrdering != Block.MvOrdering.UNORDERED) { + List dedupedAndSortedList = valuesAtPosition.stream().sorted().distinct().toList(); + if (dedupedAndSortedList.size() != valuesAtPosition.size()) { + mvOrdering = Block.MvOrdering.UNORDERED; + } else if (dedupedAndSortedList.equals(valuesAtPosition) == false) { + mvOrdering = Block.MvOrdering.DEDUPLICATED_UNORDERD; + } + } + } + if (randomBoolean()) { + builder.mvOrdering(mvOrdering); } return new RandomBlock(values, builder.build()); } From 0117ea158e8e842d68f2a6b4ae1b86fc2d834167 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 6 May 2024 12:08:15 -0400 Subject: [PATCH 014/117] ESQL: Add min and max to IntVector (#108216) This adds `min` and `max` methods to `IntVector` which returns the minimum and maximum values in the vector. It'll calculate that min and max and cache it if required. This is useful because we use `IntVector` as ordinals for things - and it's especially useful to know that the `max` is less than the size of thing we're looking up. --- .../compute/data/ConstantIntVector.java | 16 +++++++ .../compute/data/IntArrayVector.java | 40 +++++++++++++++++ .../compute/data/IntBigArrayVector.java | 40 +++++++++++++++++ .../elasticsearch/compute/data/IntVector.java | 10 +++++ .../compute/data/ConstantNullVector.java | 12 +++++ .../compute/data/X-ArrayVector.java.st | 44 +++++++++++++++++++ .../compute/data/X-BigArrayVector.java.st | 44 +++++++++++++++++++ .../compute/data/X-ConstantVector.java.st | 18 ++++++++ .../compute/data/X-Vector.java.st | 12 +++++ .../compute/data/BasicBlockTests.java | 34 ++++++++++++++ .../compute/data/BigArrayVectorTests.java | 5 +++ 11 files changed, 275 insertions(+) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index 56573e985c387..c6d463af7cfad 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -39,6 +39,22 @@ public IntVector filter(int... positions) { return blockFactory().newConstantIntVector(value, positions.length); } + /** + * The minimum value in the block. + */ + @Override + public int min() { + return value; + } + + /** + * The maximum value in the block. + */ + @Override + public int max() { + return value; + } + @Override public ElementType elementType() { return ElementType.INT; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index 5273ab0546151..a2b6697a38634 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -27,6 +27,16 @@ final class IntArrayVector extends AbstractVector implements IntVector { private final int[] values; + /** + * The minimum value in the block. + */ + private Integer min; + + /** + * The minimum value in the block. + */ + private Integer max; + IntArrayVector(int[] values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; @@ -92,6 +102,36 @@ public static long ramBytesEstimated(int[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } + /** + * The minimum value in the block. + */ + @Override + public int min() { + if (min == null) { + int v = Integer.MAX_VALUE; + for (int i = 0; i < getPositionCount(); i++) { + v = Math.min(v, values[i]); + } + min = v; + } + return min; + } + + /** + * The maximum value in the block. + */ + @Override + public int max() { + if (max == null) { + int v = Integer.MIN_VALUE; + for (int i = 0; i < getPositionCount(); i++) { + v = Math.max(v, values[i]); + } + max = v; + } + return max; + } + @Override public long ramBytesUsed() { return ramBytesEstimated(values); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index cab2baa9b00b1..fe89782bad0ec 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -26,6 +26,16 @@ public final class IntBigArrayVector extends AbstractVector implements IntVector private final IntArray values; + /** + * The minimum value in the block. + */ + private Integer min; + + /** + * The minimum value in the block. + */ + private Integer max; + public IntBigArrayVector(IntArray values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; @@ -61,6 +71,36 @@ public int getInt(int position) { return values.get(position); } + /** + * The minimum value in the block. + */ + @Override + public int min() { + if (min == null) { + int v = values.get(0); + for (int i = 1; i < getPositionCount(); i++) { + v = Math.min(v, values.get(i)); + } + min = v; + } + return min; + } + + /** + * The maximum value in the block. + */ + @Override + public int max() { + if (max == null) { + int v = values.get(0); + for (int i = 1; i < getPositionCount(); i++) { + v = Math.max(v, values.get(i)); + } + max = v; + } + return max; + } + @Override public ElementType elementType() { return ElementType.INT; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 2b1562860db15..8f6f42b66fbe6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -27,6 +27,16 @@ public sealed interface IntVector extends Vector permits ConstantIntVector, IntA @Override IntVector filter(int... positions); + /** + * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. + */ + int min(); + + /** + * The maximum value in the Vector. An empty Vector will return {@link Integer#MIN_VALUE}. + */ + int max(); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a IntVector, and both vectors are {@link #equals(IntVector, IntVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java index e262259424fa2..4deededdf41c5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java @@ -75,6 +75,18 @@ public long getLong(int position) { throw new UnsupportedOperationException("null vector"); } + @Override + public int min() { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public int max() { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + @Override public ElementType elementType() { return ElementType.NULL; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index dc95512b6439b..7eeb7765e3b1e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -47,6 +47,18 @@ $else$ private final $type$[] values; $endif$ +$if(int)$ + /** + * The minimum value in the block. + */ + private Integer min; + + /** + * The minimum value in the block. + */ + private Integer max; +$endif$ + $Type$ArrayVector($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; @@ -158,6 +170,38 @@ $endif$ return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } +$if(int)$ + /** + * The minimum value in the block. + */ + @Override + public int min() { + if (min == null) { + int v = Integer.MAX_VALUE; + for (int i = 0; i < getPositionCount(); i++) { + v = Math.min(v, values[i]); + } + min = v; + } + return min; + } + + /** + * The maximum value in the block. + */ + @Override + public int max() { + if (max == null) { + int v = Integer.MIN_VALUE; + for (int i = 0; i < getPositionCount(); i++) { + v = Math.max(v, values[i]); + } + max = v; + } + return max; + } +$endif$ + @Override public long ramBytesUsed() { return ramBytesEstimated(values); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index 6a20385604aa0..d6a8723748c1f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -26,6 +26,18 @@ public final class $Type$BigArrayVector extends AbstractVector implements $Type$ private final $Array$ values; +$if(int)$ + /** + * The minimum value in the block. + */ + private Integer min; + + /** + * The minimum value in the block. + */ + private Integer max; +$endif$ + public $Type$BigArrayVector($Array$ values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; @@ -67,6 +79,38 @@ $endif$ return values.get(position); } +$if(int)$ + /** + * The minimum value in the block. + */ + @Override + public int min() { + if (min == null) { + int v = values.get(0); + for (int i = 1; i < getPositionCount(); i++) { + v = Math.min(v, values.get(i)); + } + min = v; + } + return min; + } + + /** + * The maximum value in the block. + */ + @Override + public int max() { + if (max == null) { + int v = values.get(0); + for (int i = 1; i < getPositionCount(); i++) { + v = Math.max(v, values.get(i)); + } + max = v; + } + return max; + } +$endif$ + @Override public ElementType elementType() { return ElementType.$TYPE$; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index b33f91f8c648f..37cb2d2412522 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -58,6 +58,24 @@ $endif$ return blockFactory().newConstant$Type$Vector(value, positions.length); } +$if(int)$ + /** + * The minimum value in the block. + */ + @Override + public int min() { + return value; + } + + /** + * The maximum value in the block. + */ + @Override + public int max() { + return value; + } +$endif$ + @Override public ElementType elementType() { return ElementType.$TYPE$; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index a7f805ea02570..746ccc97a2819 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -54,6 +54,18 @@ $endif$ @Override $Type$Vector filter(int... positions); +$if(int)$ + /** + * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. + */ + int min(); + + /** + * The maximum value in the Vector. An empty Vector will return {@link Integer#MIN_VALUE}. + */ + int max(); +$endif$ + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a $Type$Vector, and both vectors are {@link #equals($Type$Vector, $Type$Vector) equal}. diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 92c471ee9e6b7..3d80e560cc4d2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -200,6 +200,8 @@ public void testIntBlock() { } assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); assertEmptyLookup(blockFactory, block); + assertThat(block.asVector().min(), equalTo(0)); + assertThat(block.asVector().max(), equalTo(positionCount - 1)); try (IntBlock.Builder blockBuilder = blockFactory.newIntBlockBuilder(1)) { IntBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); @@ -228,6 +230,36 @@ public void testIntBlock() { IntStream.range(0, positionCount).forEach(vectorBuilder::appendInt); IntVector vector = vectorBuilder.build(); assertSingleValueDenseBlock(vector.asBlock()); + assertThat(vector.min(), equalTo(0)); + assertThat(vector.max(), equalTo(positionCount - 1)); + releaseAndAssertBreaker(vector.asBlock()); + } + } + } + + public void testIntBlockEmpty() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + IntBlock block; + if (randomBoolean()) { + try (IntBlock.Builder blockBuilder = blockFactory.newIntBlockBuilder(0)) { + block = blockBuilder.build(); + } + } else { + block = blockFactory.newIntArrayVector(new int[] {}, 0).asBlock(); + } + + assertThat(block.getPositionCount(), equalTo(0)); + assertLookup(block, positions(blockFactory, 1000), singletonList(null)); + assertEmptyLookup(blockFactory, block); + assertThat(block.asVector().min(), equalTo(Integer.MAX_VALUE)); + assertThat(block.asVector().max(), equalTo(Integer.MIN_VALUE)); + releaseAndAssertBreaker(block); + + try (IntVector.Builder vectorBuilder = blockFactory.newIntVectorBuilder(0)) { + IntVector vector = vectorBuilder.build(); + assertThat(vector.min(), equalTo(Integer.MAX_VALUE)); + assertThat(vector.max(), equalTo(Integer.MIN_VALUE)); releaseAndAssertBreaker(vector.asBlock()); } } @@ -254,6 +286,8 @@ public void testConstantIntBlock() { } assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); assertEmptyLookup(blockFactory, block); + assertThat(block.asVector().min(), equalTo(value)); + assertThat(block.asVector().max(), equalTo(value)); releaseAndAssertBreaker(block); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java index 067cff2feba08..af4c643a90625 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java @@ -17,7 +17,9 @@ import org.elasticsearch.test.EqualsHashCodeTestUtils; import java.io.IOException; +import java.util.Arrays; import java.util.List; +import java.util.OptionalInt; import java.util.stream.IntStream; import static java.util.Collections.singletonList; @@ -25,6 +27,7 @@ import static org.elasticsearch.compute.data.BasicBlockTests.assertLookup; import static org.elasticsearch.compute.data.BasicBlockTests.positions; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -107,6 +110,8 @@ public void testInt() throws IOException { } assertLookup(vector.asBlock(), positions(blockFactory, positionCount + 1000), singletonList(null)); assertEmptyLookup(blockFactory, vector.asBlock()); + assertThat(OptionalInt.of(vector.min()), equalTo(Arrays.stream(values).min())); + assertThat(OptionalInt.of(vector.max()), equalTo(Arrays.stream(values).max())); assertSerialization(block); assertThat(vector.toString(), containsString("IntBigArrayVector[positions=" + positionCount)); } From 846b74f16572587ca897e18986ccf84017b117c8 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Mon, 6 May 2024 17:32:42 -0700 Subject: [PATCH 015/117] Fix coordination request and document rejection metrics (#107915) * Fix coordination request rejection ratio metric * change test * fix metrics * minor tuning * Publish plain counters, do aggregation in kibana --- .../metrics/NodeIndexingMetricsIT.java | 389 +++++++++++++----- .../org/elasticsearch/TransportVersions.java | 2 + .../cluster/stats/ClusterStatsNodes.java | 5 +- .../elasticsearch/index/IndexingPressure.java | 5 +- .../index/stats/IndexingPressureStats.java | 19 +- .../monitor/metrics/NodeMetrics.java | 49 +-- .../cluster/node/stats/NodeStatsTests.java | 1 + 7 files changed, 327 insertions(+), 143 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 97f052367fbc6..7b26cc5edf1bc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -14,8 +14,10 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; @@ -38,12 +40,29 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class NodeIndexingMetricsIT extends ESIntegTestCase { + public static class TestAPMInternalSettings extends Plugin { + @Override + public List> getSettings() { + return List.of( + Setting.timeSetting("telemetry.agent.metrics_interval", TimeValue.timeValueSeconds(0), Setting.Property.NodeScope) + ); + } + } + @Override protected Collection> nodePlugins() { - return List.of(TestTelemetryPlugin.class); + return List.of(TestTelemetryPlugin.class, TestAPMInternalSettings.class); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put("telemetry.agent.metrics_interval", TimeValue.timeValueSeconds(0)) // disable metrics cache refresh delay + .build(); } - public void testNodeIndexingMetricsArePublishing() throws Exception { + public void testNodeIndexingMetricsArePublishing() { final String dataNode = internalCluster().startNode(); ensureStableCluster(1); @@ -74,107 +93,108 @@ public void testNodeIndexingMetricsArePublishing() throws Exception { // simulate async apm `polling` call for metrics plugin.collect(); - assertBusy(() -> { - var indexingTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.docs.total"); - assertThat(indexingTotal.getLong(), equalTo((long) docsCount)); + var indexingTotal = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.docs.total"); + assertThat(indexingTotal.getLong(), equalTo((long) docsCount)); - var indexingCurrent = getRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.docs.current.total"); - assertThat(indexingCurrent.getLong(), equalTo(0L)); + var indexingCurrent = getSingleRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.docs.current.total"); + assertThat(indexingCurrent.getLong(), equalTo(0L)); - var indexingFailedTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.indexing.failed.total"); - assertThat(indexingFailedTotal.getLong(), equalTo(0L)); + var indexingFailedTotal = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.indexing.failed.total"); + assertThat(indexingFailedTotal.getLong(), equalTo(0L)); - var deletionTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.deletion.docs.total"); - assertThat(deletionTotal.getLong(), equalTo((long) deletesCount)); + var deletionTotal = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.deletion.docs.total"); + assertThat(deletionTotal.getLong(), equalTo((long) deletesCount)); - var deletionCurrent = getRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.deletion.docs.current.total"); - assertThat(deletionCurrent.getLong(), equalTo(0L)); + var deletionCurrent = getSingleRecordedMetric(plugin::getLongGaugeMeasurement, "es.indexing.deletion.docs.current.total"); + assertThat(deletionCurrent.getLong(), equalTo(0L)); - var indexingTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.time"); - assertThat(indexingTime.getLong(), greaterThan(0L)); + var indexingTime = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.time"); + assertThat(indexingTime.getLong(), greaterThan(0L)); - var deletionTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.deletion.time"); - assertThat(deletionTime.getLong(), greaterThanOrEqualTo(0L)); + var deletionTime = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.deletion.time"); + assertThat(deletionTime.getLong(), greaterThanOrEqualTo(0L)); - var throttleTime = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.throttle.time"); - assertThat(throttleTime.getLong(), equalTo(0L)); + var throttleTime = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.throttle.time"); + assertThat(throttleTime.getLong(), equalTo(0L)); - var noopTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.noop.total"); - assertThat(noopTotal.getLong(), equalTo(0L)); + var noopTotal = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indices.noop.total"); + assertThat(noopTotal.getLong(), equalTo(0L)); - var coordinatingOperationsSize = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.coordinating_operations.size" - ); - assertThat(coordinatingOperationsSize.getLong(), greaterThan(0L)); - - var coordinatingOperationsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.coordinating_operations.total" - ); - // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks - assertThat(coordinatingOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); + var coordinatingOperationsSize = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.size" + ); + assertThat(coordinatingOperationsSize.getLong(), greaterThan(0L)); - var coordinatingOperationsCurrentSize = getRecordedMetric( - plugin::getLongGaugeMeasurement, - "es.indexing.coordinating_operations.current.size" - ); - assertThat(coordinatingOperationsCurrentSize.getLong(), equalTo(0L)); + var coordinatingOperationsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.total" + ); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(coordinatingOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); - var coordinatingOperationsCurrentTotal = getRecordedMetric( - plugin::getLongGaugeMeasurement, - "es.indexing.coordinating_operations.current.total" - ); - assertThat(coordinatingOperationsCurrentTotal.getLong(), equalTo(0L)); + var coordinatingOperationsCurrentSize = getSingleRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.coordinating_operations.current.size" + ); + assertThat(coordinatingOperationsCurrentSize.getLong(), equalTo(0L)); - var coordinatingOperationsRejectionsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.coordinating_operations.rejections.total" - ); - assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(0L)); + var coordinatingOperationsCurrentTotal = getSingleRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.coordinating_operations.current.total" + ); + assertThat(coordinatingOperationsCurrentTotal.getLong(), equalTo(0L)); - var coordinatingOperationsRejectionsRatio = getRecordedMetric( - plugin::getDoubleGaugeMeasurement, - "es.indexing.coordinating_operations.rejections.ratio" - ); - assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(0.0)); + var coordinatingOperationsRejectionsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.rejections.total" + ); + assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(0L)); - var primaryOperationsSize = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.size"); - assertThat(primaryOperationsSize.getLong(), greaterThan(0L)); + var coordinatingOperationsRejectionsRatio = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.requests.total" + ); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(coordinatingOperationsRejectionsRatio.getLong(), equalTo((long) docsCount + deletesCount)); - var primaryOperationsTotal = getRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.total"); - // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks - assertThat(primaryOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); + var primaryOperationsSize = getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.size"); + assertThat(primaryOperationsSize.getLong(), greaterThan(0L)); - var primaryOperationsCurrentSize = getRecordedMetric( - plugin::getLongGaugeMeasurement, - "es.indexing.primary_operations.current.size" - ); - assertThat(primaryOperationsCurrentSize.getLong(), equalTo(0L)); + var primaryOperationsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.total" + ); + // Note: `delete` request goes thru `TransportBulkAction` invoking coordinating/primary limit checks + assertThat(primaryOperationsTotal.getLong(), equalTo((long) docsCount + deletesCount)); - var primaryOperationsCurrentTotal = getRecordedMetric( - plugin::getLongGaugeMeasurement, - "es.indexing.primary_operations.current.total" - ); - assertThat(primaryOperationsCurrentTotal.getLong(), equalTo(0L)); + var primaryOperationsCurrentSize = getSingleRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.primary_operations.current.size" + ); + assertThat(primaryOperationsCurrentSize.getLong(), equalTo(0L)); - var primaryOperationsRejectionsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.primary_operations.rejections.total" - ); - assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); + var primaryOperationsCurrentTotal = getSingleRecordedMetric( + plugin::getLongGaugeMeasurement, + "es.indexing.primary_operations.current.total" + ); + assertThat(primaryOperationsCurrentTotal.getLong(), equalTo(0L)); - var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( - plugin::getDoubleGaugeMeasurement, - "es.indexing.primary_operations.document.rejections.ratio" - ); - assertThat(primaryOperationsDocumentRejectionsRatio.getDouble(), equalTo(0.0)); + var primaryOperationsRejectionsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.rejections.total" + ); + assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo(0L)); - }); + var primaryOperationsDocumentRejectionsRatio = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.primary_operations.document.rejections.total" + ); + assertThat(primaryOperationsDocumentRejectionsRatio.getLong(), equalTo(0L)); } - public void testCoordinatingRejectionMetricsArePublishing() throws Exception { + public void testCoordinatingRejectionMetricsArePublishing() { // lower Indexing Pressure limits to trigger coordinating rejections final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB")); @@ -200,23 +220,91 @@ public void testCoordinatingRejectionMetricsArePublishing() throws Exception { // simulate async apm `polling` call for metrics plugin.collect(); - // this bulk request is too big to pass coordinating limit check - assertBusy(() -> { - var coordinatingOperationsRejectionsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.coordinating_operations.rejections.total" - ); - assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); + // this bulk request is too big to pass coordinating limit check, it has to be reported towards `rejections` total metric + var coordinatingOperationsRejectionsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.rejections.total" + ); + assertThat(coordinatingOperationsRejectionsTotal.getLong(), equalTo(1L)); - var coordinatingOperationsRejectionsRatio = getRecordedMetric( - plugin::getDoubleGaugeMeasurement, - "es.indexing.coordinating_operations.rejections.ratio" - ); - assertThat(coordinatingOperationsRejectionsRatio.getDouble(), equalTo(1.0)); - }); + // `requests` metric should remain to `0` + var coordinatingOperationsRequestsTotal = getSingleRecordedMetric( + plugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating_operations.requests.total" + ); + assertThat(coordinatingOperationsRequestsTotal.getLong(), equalTo(0L)); } - public void testPrimaryDocumentRejectionMetricsArePublishing() throws Exception { + public void testCoordinatingRejectionMetricsSpiking() throws Exception { + + // lower Indexing Pressure limits to trigger coordinating rejections + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "1KB")); + ensureStableCluster(1); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + assertAcked(prepareCreate("test").get()); + + // simulate steady processing of bulk requests + // every request should pass thru coordinating limit check + int successfulBulkCount = randomIntBetween(10, 200); + for (int bulk = 0; bulk < successfulBulkCount; bulk++) { + final BulkRequestBuilder bulkRequestBuilder = new BulkRequestBuilder(client(dataNode)); + final int batchSize = randomIntBetween(1, 5); + for (int i = 0; i < batchSize; i++) { + bulkRequestBuilder.add(new IndexRequest("test").source("field", randomAlphaOfLength(10))); + } + BulkResponse bulkResponse = bulkRequestBuilder.get(); + assertFalse(bulkResponse.hasFailures()); + } + + // simulate async apm `polling` call for metrics + plugin.collect(); + + // assert no rejections were reported + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.coordinating_operations.rejections.total") + .getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.coordinating_operations.requests.total").getLong(), + equalTo((long) successfulBulkCount) + ); + + // simulate spike of rejected coordinating operations after steady processing + int rejectedBulkCount = randomIntBetween(1, 20); + for (int bulk = 0; bulk < rejectedBulkCount; bulk++) { + final BulkRequestBuilder bulkRequestBuilder = new BulkRequestBuilder(client(dataNode)); + final int batchSize = randomIntBetween(100, 1000); + for (int i = 0; i < batchSize; i++) { + bulkRequestBuilder.add(new IndexRequest("test").source("field", randomAlphaOfLength(100))); + } + // big batch should not pass thru coordinating limit check + expectThrows(EsRejectedExecutionException.class, bulkRequestBuilder); + } + + // simulate async apm `polling` call for metrics + plugin.collect(); + + assertThat( + getLatestRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.coordinating_operations.rejections.total") + .getLong(), + equalTo((long) rejectedBulkCount) + ); + // number of successfully processed coordinating requests should remain as seen before + assertThat( + getLatestRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.coordinating_operations.requests.total").getLong(), + equalTo((long) successfulBulkCount) + ); + + } + + public void testPrimaryDocumentRejectionMetricsArePublishing() { // setting low Indexing Pressure limits to trigger primary rejections final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "2KB").build()); @@ -264,34 +352,119 @@ public void testPrimaryDocumentRejectionMetricsArePublishing() throws Exception plugin.collect(); // this bulk request is too big to pass coordinating limit check - assertBusy(() -> { - var primaryOperationsRejectionsTotal = getRecordedMetric( - plugin::getLongAsyncCounterMeasurement, - "es.indexing.primary_operations.rejections.total" + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.rejections.total").getLong(), + equalTo((long) numberOfShards) + ); + + // all unsuccessful indexing operations (aka documents) should be reported towards `.document.rejections.total` metric + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.document.rejections.total") + .getLong(), + equalTo((long) batchCountOne) + ); + + // all successful indexing operations (aka documents) should be reported towards `.primary_operations.total` metric + assertThat( + getSingleRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.total").getLong(), + equalTo((long) batchCountTwo) + ); + } + + public void testPrimaryDocumentRejectionMetricsFluctuatingOverTime() throws Exception { + + // setting low Indexing Pressure limits to trigger primary rejections + final String dataNode = internalCluster().startNode(Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "4KB").build()); + // setting high Indexing Pressure limits to pass coordinating checks + final String coordinatingNode = internalCluster().startCoordinatingOnlyNode( + Settings.builder().put(MAX_INDEXING_BYTES.getKey(), "100MB").build() + ); + ensureStableCluster(2); + + // for simplicity do not mix small and big documents in single index/shard + assertAcked(prepareCreate("test-index-one", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)).get()); + assertAcked(prepareCreate("test-index-two", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)).get()); + + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + plugin.resetMeter(); + + // how many times are we going to gauge metrics + // simulate time flow and assert that results of previous calls do not impact current metric readings + int numberOfMetricCollectionRounds = randomIntBetween(2, 10); + logger.debug("--> running {} rounds of gauging metrics", numberOfMetricCollectionRounds); + + // to simulate cumulative property of underneath metric counters + int prevRejectedDocumentsNumber = 0; + int prevAcceptedDocumentsNumber = 0; + + for (int i = 0; i < numberOfMetricCollectionRounds; i++) { + + final BulkRequest bulkRequestOne = new BulkRequest(); + + // construct bulk request of small and big documents (big are not supposed to pass thru a primary memory limit gate) + int acceptedDocumentsNumber = randomIntBetween(1, 5); + for (int j = 0; j < acceptedDocumentsNumber; j++) { + bulkRequestOne.add(new IndexRequest("test-index-one").source("field", randomAlphaOfLength(1))); + } + + final BulkRequest bulkRequestTwo = new BulkRequest(); + int rejectedDocumentsNumber = randomIntBetween(1, 20); + for (int j = 0; j < rejectedDocumentsNumber; j++) { + bulkRequestTwo.add(new IndexRequest("test-index-two").source("field", randomAlphaOfLength(5120))); + } + + logger.debug("--> round: {}, small docs: {}, big docs: {}", i, acceptedDocumentsNumber, rejectedDocumentsNumber); + + // requests are sent thru coordinating node + + final BulkResponse bulkResponseOne = client(coordinatingNode).bulk(bulkRequestOne).actionGet(); + assertThat(bulkResponseOne.hasFailures(), equalTo(false)); + + final BulkResponse bulkResponseTwo = client(coordinatingNode).bulk(bulkRequestTwo).actionGet(); + assertThat(bulkResponseTwo.hasFailures(), equalTo(true)); + assertThat( + Arrays.stream(bulkResponseTwo.getItems()).filter(r -> r.status() == RestStatus.TOO_MANY_REQUESTS).count(), + equalTo((long) rejectedDocumentsNumber) ); - assertThat(primaryOperationsRejectionsTotal.getLong(), equalTo((long) numberOfShards)); - var primaryOperationsDocumentRejectionsRatio = getRecordedMetric( - plugin::getDoubleGaugeMeasurement, - "es.indexing.primary_operations.document.rejections.ratio" + // simulate async apm `polling` call for metrics + plugin.collect(); + + // all unsuccessful indexing operations (aka documents) should be reported towards `.document.rejections.total` metric + assertThat( + getLatestRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.document.rejections.total") + .getLong(), + equalTo((long) rejectedDocumentsNumber + prevRejectedDocumentsNumber) ); - // ratio of rejected documents vs all indexing documents + prevRejectedDocumentsNumber += rejectedDocumentsNumber; + + // all successful indexing operations (aka documents) should be reported towards `.primary_operations.total` metric assertThat( - equals(primaryOperationsDocumentRejectionsRatio.getDouble(), (double) batchCountOne / (batchCountOne + batchCountTwo)), - equalTo(true) + getLatestRecordedMetric(plugin::getLongAsyncCounterMeasurement, "es.indexing.primary_operations.total").getLong(), + equalTo((long) acceptedDocumentsNumber + prevAcceptedDocumentsNumber) ); - }); + prevAcceptedDocumentsNumber += acceptedDocumentsNumber; + } } - private static Measurement getRecordedMetric(Function> metricGetter, String name) { + private static Measurement getSingleRecordedMetric(Function> metricGetter, String name) { final List measurements = metricGetter.apply(name); assertFalse("Indexing metric is not recorded", measurements.isEmpty()); assertThat(measurements.size(), equalTo(1)); return measurements.get(0); } - private static boolean equals(double expected, double actual) { + private static Measurement getLatestRecordedMetric(Function> metricGetter, String name) { + final List measurements = metricGetter.apply(name); + assertFalse("Indexing metric is not recorded", measurements.isEmpty()); + return measurements.get(measurements.size() - 1); + } + + private static boolean doublesEquals(double expected, double actual) { final double eps = .0000001; return Math.abs(expected - actual) < eps; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index f9ab7944714a4..3f66147181593 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -192,6 +192,8 @@ static TransportVersion def(int id) { public static final TransportVersion ROLE_REMOTE_CLUSTER_PRIVS = def(8_649_00_0); public static final TransportVersion NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS = def(8_650_00_0); public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX = def(8_651_00_0); + public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index 6ffe7ac390260..70060fc834452 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -786,6 +786,7 @@ static class IndexPressureStats implements ToXContentFragment { long memoryLimit = 0; long totalCoordinatingOps = 0; + long totalCoordinatingRequests = 0; long totalPrimaryOps = 0; long totalReplicaOps = 0; long currentCoordinatingOps = 0; @@ -813,6 +814,7 @@ static class IndexPressureStats implements ToXContentFragment { currentPrimaryOps += nodeStatIndexingPressureStats.getCurrentPrimaryOps(); currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); + totalCoordinatingRequests += nodeStatIndexingPressureStats.getTotalCoordinatingRequests(); } } indexingPressureStats = new IndexingPressureStats( @@ -834,7 +836,8 @@ static class IndexPressureStats implements ToXContentFragment { currentCoordinatingOps, currentPrimaryOps, currentReplicaOps, - primaryDocumentRejections + primaryDocumentRejections, + totalCoordinatingRequests ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index 7696cf99b75cd..7f07cdd1c3b1a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -46,6 +46,7 @@ public class IndexingPressure { private final AtomicLong totalReplicaBytes = new AtomicLong(0); private final AtomicLong totalCoordinatingOps = new AtomicLong(0); + private final AtomicLong totalCoordinatingRequests = new AtomicLong(0); private final AtomicLong totalPrimaryOps = new AtomicLong(0); private final AtomicLong totalReplicaOps = new AtomicLong(0); @@ -109,6 +110,7 @@ public Releasable markCoordinatingOperationStarted(int operations, long bytes, b totalCombinedCoordinatingAndPrimaryBytes.getAndAdd(bytes); totalCoordinatingBytes.getAndAdd(bytes); totalCoordinatingOps.getAndAdd(operations); + totalCoordinatingRequests.getAndIncrement(); return wrapReleasable(() -> { logger.trace(() -> Strings.format("removing [%d] coordinating operations and [%d] bytes", operations, bytes)); this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); @@ -221,7 +223,8 @@ public IndexingPressureStats stats() { currentCoordinatingOps.get(), currentPrimaryOps.get(), currentReplicaOps.get(), - primaryDocumentRejections.get() + primaryDocumentRejections.get(), + totalCoordinatingRequests.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index 608fa3128bf09..1316776ec39b2 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -37,6 +37,7 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { // These fields will be used for additional back-pressure and metrics in the future private final long totalCoordinatingOps; + private final long totalCoordinatingRequests; private final long totalPrimaryOps; private final long totalReplicaOps; private final long currentCoordinatingOps; @@ -77,6 +78,12 @@ public IndexingPressureStats(StreamInput in) throws IOException { } else { primaryDocumentRejections = -1L; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT)) { + totalCoordinatingRequests = in.readVLong(); + } else { + totalCoordinatingRequests = -1L; + } } public IndexingPressureStats( @@ -98,7 +105,8 @@ public IndexingPressureStats( long currentCoordinatingOps, long currentPrimaryOps, long currentReplicaOps, - long primaryDocumentRejections + long primaryDocumentRejections, + long totalCoordinatingRequests ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -121,6 +129,7 @@ public IndexingPressureStats( this.currentReplicaOps = currentReplicaOps; this.primaryDocumentRejections = primaryDocumentRejections; + this.totalCoordinatingRequests = totalCoordinatingRequests; } @Override @@ -146,6 +155,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_DOCUMENT_REJECTIONS_COUNT)) { out.writeVLong(primaryDocumentRejections); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT)) { + out.writeVLong(totalCoordinatingRequests); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -224,6 +237,10 @@ public long getPrimaryDocumentRejections() { return primaryDocumentRejections; } + public long getTotalCoordinatingRequests() { + return totalCoordinatingRequests; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index 68cbcdb5657f9..c46aa4181bf05 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -19,7 +19,6 @@ import org.elasticsearch.monitor.jvm.GcNames; import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.node.NodeService; -import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.MeterRegistry; @@ -529,23 +528,16 @@ private void registerAsyncMetrics(MeterRegistry registry) { ); metrics.add( - registry.registerDoubleGauge( - "es.indexing.coordinating_operations.rejections.ratio", - "Ratio of rejected coordinating operations", - "ratio", - () -> { - var totalCoordinatingOperations = Optional.ofNullable(stats.getOrRefresh()) - .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getTotalCoordinatingOps) - .orElse(0L); - var totalCoordinatingRejections = Optional.ofNullable(stats.getOrRefresh()) + registry.registerLongAsyncCounter( + "es.indexing.coordinating_operations.requests.total", + "Total number of coordinating requests", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getCoordinatingRejections) - .orElse(0L); - // rejections do not count towards `totalCoordinatingOperations` - var totalOps = totalCoordinatingOperations + totalCoordinatingRejections; - return new DoubleWithAttributes(totalOps != 0 ? (double) totalCoordinatingRejections / totalOps : 0.0); - } + .map(IndexingPressureStats::getTotalCoordinatingRequests) + .orElse(0L) + ) ) ); @@ -620,23 +612,16 @@ private void registerAsyncMetrics(MeterRegistry registry) { ); metrics.add( - registry.registerDoubleGauge( - "es.indexing.primary_operations.document.rejections.ratio", - "Ratio of rejected primary operations", - "ratio", - () -> { - var totalPrimaryOperations = Optional.ofNullable(stats.getOrRefresh()) - .map(NodeStats::getIndexingPressureStats) - .map(IndexingPressureStats::getTotalPrimaryOps) - .orElse(0L); - var totalPrimaryDocumentRejections = Optional.ofNullable(stats.getOrRefresh()) + registry.registerLongAsyncCounter( + "es.indexing.primary_operations.document.rejections.total", + "Total number of rejected indexing documents", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) .map(NodeStats::getIndexingPressureStats) .map(IndexingPressureStats::getPrimaryDocumentRejections) - .orElse(0L); - // primary document rejections do not count towards `totalPrimaryOperations` - var totalOps = totalPrimaryOperations + totalPrimaryDocumentRejections; - return new DoubleWithAttributes(totalOps != 0 ? (double) totalPrimaryDocumentRejections / totalOps : 0.0); - } + .orElse(0L) + ) ) ); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index e502904004fef..e90f2ab8f50d2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1039,6 +1039,7 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } From 2ef9e1ab24807c18cf67fa4d178db9d366306735 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 7 May 2024 14:51:30 +1000 Subject: [PATCH 016/117] [Test mute] AwaitsFix #108321 See title --- .../java/org/elasticsearch/index/translog/TranslogTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 463f268657187..72abe322c702b 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -4000,6 +4000,7 @@ static boolean hasCircularReference(Exception cause) { return false; } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108321") public void testDisabledFsync() throws IOException { var config = new TranslogConfig( shardId, From 029624000b039516f60c14228492e3164bce6c61 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 7 May 2024 15:09:44 +1000 Subject: [PATCH 017/117] Rename snapshottedCommits to acquiredIndexCommits (#107934) As title says. Rename to be more accurate. --- .../index/engine/CombinedDeletionPolicy.java | 34 +++++++++---------- .../index/engine/InternalEngine.java | 4 +-- .../engine/CombinedDeletionPolicyTests.java | 4 +-- .../index/engine/EngineTestCase.java | 4 +-- .../test/InternalTestCluster.java | 6 ++-- .../CcrRestoreSourceServiceTests.java | 4 +-- 6 files changed, 28 insertions(+), 28 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java b/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java index 270bcd2297a67..a69cc42163dd2 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java @@ -42,7 +42,7 @@ public class CombinedDeletionPolicy extends IndexDeletionPolicy { private final TranslogDeletionPolicy translogDeletionPolicy; private final SoftDeletesPolicy softDeletesPolicy; private final LongSupplier globalCheckpointSupplier; - private final Map snapshottedCommits; // Number of snapshots held against each commit point. + private final Map acquiredIndexCommits; // Number of references held against each commit point. interface CommitsListener { @@ -71,7 +71,7 @@ interface CommitsListener { this.softDeletesPolicy = softDeletesPolicy; this.globalCheckpointSupplier = globalCheckpointSupplier; this.commitsListener = commitsListener; - this.snapshottedCommits = new HashMap<>(); + this.acquiredIndexCommits = new HashMap<>(); } @Override @@ -120,7 +120,7 @@ public void onCommit(List commits) throws IOException { } for (int i = 0; i < keptPosition; i++) { final IndexCommit commit = commits.get(i); - if (snapshottedCommits.containsKey(commit) == false) { + if (acquiredIndexCommits.containsKey(commit) == false) { deleteCommit(commit); if (deletedCommits == null) { deletedCommits = new ArrayList<>(); @@ -213,7 +213,7 @@ synchronized IndexCommit acquireIndexCommit(boolean acquiringSafeCommit) { assert safeCommit != null : "Safe commit is not initialized yet"; assert lastCommit != null : "Last commit is not initialized yet"; final IndexCommit snapshotting = acquiringSafeCommit ? safeCommit : lastCommit; - snapshottedCommits.merge(snapshotting, 1, Integer::sum); // increase refCount + acquiredIndexCommits.merge(snapshotting, 1, Integer::sum); // increase refCount return wrapCommit(snapshotting); } @@ -224,27 +224,27 @@ protected IndexCommit wrapCommit(IndexCommit indexCommit) { /** * Releases an index commit that acquired by {@link #acquireIndexCommit(boolean)}. * - * @return true if the snapshotting commit can be clean up. + * @return true if the acquired commit can be clean up. */ - synchronized boolean releaseCommit(final IndexCommit snapshotCommit) { - final IndexCommit releasingCommit = ((SnapshotIndexCommit) snapshotCommit).getIndexCommit(); - assert snapshottedCommits.containsKey(releasingCommit) - : "Release non-snapshotted commit;" - + "snapshotted commits [" - + snapshottedCommits + synchronized boolean releaseCommit(final IndexCommit acquiredCommit) { + final IndexCommit releasingCommit = ((SnapshotIndexCommit) acquiredCommit).getIndexCommit(); + assert acquiredIndexCommits.containsKey(releasingCommit) + : "Release non-acquired commit;" + + "acquired commits [" + + acquiredIndexCommits + "], releasing commit [" + releasingCommit + "]"; // release refCount - final Integer refCount = snapshottedCommits.compute(releasingCommit, (key, count) -> { + final Integer refCount = acquiredIndexCommits.compute(releasingCommit, (key, count) -> { if (count == 1) { return null; } return count - 1; }); - assert refCount == null || refCount > 0 : "Number of snapshots can not be negative [" + refCount + "]"; - // The commit can be clean up only if no pending snapshot and it is neither the safe commit nor last commit. + assert refCount == null || refCount > 0 : "Number of references for acquired commit can not be negative [" + refCount + "]"; + // The commit can be clean up only if no refCount and it is neither the safe commit nor last commit. return refCount == null && releasingCommit.equals(safeCommit) == false && releasingCommit.equals(lastCommit) == false; } @@ -296,10 +296,10 @@ private static Set listOfNewFileNames(IndexCommit previous, IndexCommit } /** - * Checks whether the deletion policy is holding on to snapshotted commits + * Checks whether the deletion policy is holding on to acquired index commits */ - synchronized boolean hasSnapshottedCommits() { - return snapshottedCommits.isEmpty() == false; + synchronized boolean hasAcquiredIndexCommits() { + return acquiredIndexCommits.isEmpty() == false; } /** diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 69e3be9bb2113..80fed0f3092e5 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -664,8 +664,8 @@ Translog getTranslog() { } // Package private for testing purposes only - boolean hasSnapshottedCommits() { - return combinedDeletionPolicy.hasSnapshottedCommits(); + boolean hasAcquiredIndexCommits() { + return combinedDeletionPolicy.hasAcquiredIndexCommits(); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java index dfd4ad1fc0a45..176cb50f78e0f 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java @@ -349,8 +349,8 @@ protected int getDocCountOfCommit(IndexCommit indexCommit) { } @Override - synchronized boolean releaseCommit(IndexCommit indexCommit) { - return super.releaseCommit(wrapCommit(indexCommit)); + synchronized boolean releaseCommit(IndexCommit acquiredCommit) { + return super.releaseCommit(wrapCommit(acquiredCommit)); } }; diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 02520c4ac723a..94a61e57be5b1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -1444,10 +1444,10 @@ public static void waitForOpsToComplete(InternalEngine engine, long seqNo) throw assertBusy(() -> assertThat(engine.getLocalCheckpointTracker().getProcessedCheckpoint(), greaterThanOrEqualTo(seqNo))); } - public static boolean hasSnapshottedCommits(Engine engine) { + public static boolean hasAcquiredIndexCommits(Engine engine) { assert engine instanceof InternalEngine : "only InternalEngines have snapshotted commits, got: " + engine.getClass(); InternalEngine internalEngine = (InternalEngine) engine; - return internalEngine.hasSnapshottedCommits(); + return internalEngine.hasAcquiredIndexCommits(); } public static final class PrimaryTermSupplier implements LongSupplier { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 66e2664c7b8b9..03af54de96482 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1294,7 +1294,7 @@ public void beforeIndexDeletion() throws Exception { assertNoPendingIndexOperations(); assertAllPendingWriteLimitsReleased(); assertOpenTranslogReferences(); - assertNoSnapshottedIndexCommit(); + assertNoAcquiredIndexCommit(); } private void assertAllPendingWriteLimitsReleased() throws Exception { @@ -1357,7 +1357,7 @@ private void assertOpenTranslogReferences() throws Exception { }, 60, TimeUnit.SECONDS); } - private void assertNoSnapshottedIndexCommit() throws Exception { + private void assertNoAcquiredIndexCommit() throws Exception { assertBusy(() -> { for (NodeAndClient nodeAndClient : nodes.values()) { IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); @@ -1368,7 +1368,7 @@ private void assertNoSnapshottedIndexCommit() throws Exception { if (engine instanceof InternalEngine) { assertFalse( indexShard.routingEntry().toString() + " has unreleased snapshotted index commits", - EngineTestCase.hasSnapshottedCommits(engine) + EngineTestCase.hasAcquiredIndexCommits(engine) ); } } catch (AlreadyClosedException ignored) { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java index e8badfbee1e3e..ed5670a4bcc3b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceServiceTests.java @@ -214,9 +214,9 @@ public void testGetSessionDoesNotLeakFileIfClosed() throws IOException { sessionReader.readFileBytes(files.get(1).name(), new BytesArray(new byte[10])); } - assertTrue(EngineTestCase.hasSnapshottedCommits(IndexShardTestCase.getEngine(indexShard))); + assertTrue(EngineTestCase.hasAcquiredIndexCommits(IndexShardTestCase.getEngine(indexShard))); restoreSourceService.closeSession(sessionUUID); - assertFalse(EngineTestCase.hasSnapshottedCommits(IndexShardTestCase.getEngine(indexShard))); + assertFalse(EngineTestCase.hasAcquiredIndexCommits(IndexShardTestCase.getEngine(indexShard))); closeShards(indexShard); // Exception will be thrown if file is not closed. From b71fc0c561afe925f625436b0fd5453a2cf28d3d Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 7 May 2024 09:42:17 +0200 Subject: [PATCH 018/117] Migrate remaining usage of skip version in YAML specs to cluster_features (#108055) --- .../test/aggregations/date_histogram.yml | 6 +- .../test/aggregations/global_with_aliases.yml | 4 +- .../aggregations/ignored_metadata_field.yml | 34 +++++------ .../stats_metric_fail_formatting.yml | 4 +- .../data_stream/160_unsupported_setting.yml | 6 +- .../test/data_stream/lifecycle/20_basic.yml | 3 +- .../test/ingest/220_drop_processor.yml | 8 +-- .../test/ingest/310_reroute_processor.yml | 3 +- .../rest-api-spec/test/ingest/60_fail.yml | 4 +- .../rest-api-spec/test/30_inner_hits.yml | 6 +- .../rest-api-spec/test/info/10_info.yml | 4 +- .../test/ingest/80_ingest_simulate.yml | 4 +- .../test/cat.segments/10_basic.yml | 9 ++- .../test/field_caps/40_time_series.yml | 9 ++- .../test/field_caps/50_fieldtype_filter.yml | 4 +- .../rest-api-spec/test/health/10_basic.yml | 4 +- .../rest-api-spec/test/health/30_feature.yml | 4 +- .../test/health/40_diagnosis.yml | 4 +- .../test/indices.get_alias/10_basic.yml | 6 +- .../15_composition.yml | 24 ++++---- .../20_update_non_dynamic_settings.yml | 4 +- .../indices.put_settings/all_path_options.yml | 3 +- .../indices.simulate_template/10_basic.yml | 24 ++++---- .../test/indices.validate_query/10_basic.yml | 4 +- .../test/nodes.stats/30_discovery.yml | 18 +++--- .../test/range/20_synthetic_source.yml | 4 +- .../test/search.highlight/10_unified.yml | 4 +- .../test/search.highlight/issue69009.yml | 4 +- .../search.inner_hits/20_highlighting.yml | 4 +- .../10_standard_retriever.yml | 4 +- .../search.retrievers/20_knn_retriever.yml | 4 +- .../search.vectors/100_knn_nested_search.yml | 22 +++---- .../110_knn_query_with_filter.yml | 4 +- .../120_knn_query_multiple_shards.yml | 6 +- .../130_knn_query_nested_search.yml | 12 ++-- .../140_knn_query_with_other_queries.yml | 6 +- .../150_knn_search_missing_params.yml | 4 +- .../160_knn_query_missing_params.yml | 4 +- ...70_knn_search_hex_encoded_byte_vectors.yml | 4 +- ...175_knn_query_hex_encoded_byte_vectors.yml | 4 +- .../test/search.vectors/40_knn_search.yml | 58 +++++++++--------- .../search.vectors/40_knn_search_cosine.yml | 10 ++-- .../41_knn_search_byte_quantized.yml | 12 ++-- .../search.vectors/42_knn_search_flat.yml | 4 +- .../42_knn_search_int8_flat.yml | 4 +- .../search.vectors/45_knn_search_byte.yml | 12 ++-- .../50_dense_vector_field_usage.yml | 12 ++-- .../60_dense_vector_dynamic_mapping.yml | 16 ++--- .../60_knn_search_filter_alias.yml | 4 +- .../80_dense_vector_indexed_by_default.yml | 12 ++-- .../112_field_collapsing_with_rescore.yml | 4 +- .../search/140_pre_filter_search_shards.yml | 7 +-- .../test/search/160_exists_query.yml | 6 +- .../test/search/330_fetch_fields.yml | 60 +++++++++---------- .../test/search/350_binary_field.yml | 4 +- .../rest-api-spec/test/search/370_profile.yml | 28 ++++----- .../search/380_sort_segments_on_timestamp.yml | 6 +- .../test/search/510_fragment_trimming_fix.yml | 4 +- .../test/search/520_fetch_fields.yml | 3 +- .../test/simulate.ingest/10_basic.yml | 4 +- ...dimension_and_metric_in_non_tsdb_index.yml | 5 +- .../rest-api-spec/test/tsdb/100_composite.yml | 9 ++- .../rest-api-spec/test/tsdb/10_settings.yml | 4 +- .../test/tsdb/110_field_caps.yml | 9 ++- .../test/tsdb/15_timestamp_mapping.yml | 4 +- .../rest-api-spec/test/tsdb/20_mapping.yml | 4 +- .../test/tsdb/25_id_generation.yml | 4 +- .../rest-api-spec/test/tsdb/40_search.yml | 9 ++- .../rest-api-spec/test/tsdb/50_alias.yml | 9 ++- .../test/tsdb/80_index_resize.yml | 11 +++- .../test/tsdb/90_unsupported_operations.yml | 9 ++- .../test/update/85_fields_meta.yml | 3 +- .../test/rest/ESRestTestFeatureService.java | 7 ++- .../resources/rest-api-spec/test/10_apm.yml | 3 +- .../test/license/100_license.yml | 6 +- .../rest-api-spec/test/rrf/100_rank_rrf.yml | 4 +- .../test/rrf/150_rank_rrf_pagination.yml | 4 +- .../test/rrf/200_rank_rrf_script.yml | 6 +- .../rest-api-spec/test/10_analyze.yml | 20 +++---- .../test/aggregate-metrics/10_basic.yml | 4 +- .../test/constant_keyword/10_basic.yml | 10 +++- .../rest-api-spec/test/dlm/10_usage.yml | 6 +- .../test/esql/110_insensitive_equals.yml | 7 +-- .../rest-api-spec/test/health/10_usage.yml | 4 +- .../rest-api-spec/test/ml/inference_crud.yml | 15 ++--- .../test/ml/learning_to_rank_rescorer.yml | 9 +-- .../text_expansion_search_rank_features.yml | 6 +- .../text_expansion_search_sparse_vector.yml | 6 +- .../rest-api-spec/test/profiling/10_basic.yml | 8 +-- .../rest-api-spec/test/rollup/get_jobs.yml | 3 +- .../search-business-rules/10_pinned_query.yml | 4 +- .../test/spatial/20_geo_centroid.yml | 3 +- .../test/transform/transforms_cat_apis.yml | 3 +- .../test/painless/40_exception.yml | 6 +- .../92_put_watch_with_indices_options.yml | 21 +++++-- .../test/watcher/usage/10_basic.yml | 3 +- .../mixed_cluster/40_ml_datafeed_crud.yml | 6 +- 97 files changed, 432 insertions(+), 385 deletions(-) diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml index b1b9623c8769c..9f30deebc9fbd 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/date_histogram.yml @@ -382,9 +382,9 @@ setup: --- "Daylight with offset date_histogram test": - - skip: - version: "- 7.16.99" - reason: Bug fixed before 7.16.99 + - requires: + cluster_features: "gte_v7.17.0" + reason: Bug fixed with 7.17 - do: search: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/global_with_aliases.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/global_with_aliases.yml index f1ec41bdfe622..864b122e72020 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/global_with_aliases.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/global_with_aliases.yml @@ -1,6 +1,6 @@ "global agg with a terms filtered alias": - - skip: - version: "- 8.9.99" + - requires: + cluster_features: "gte_v8.10.0" reason: Fixed in 8.10 - do: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ignored_metadata_field.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ignored_metadata_field.yml index fd15d24a5f3ca..34ae07c35bb2a 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ignored_metadata_field.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/ignored_metadata_field.yml @@ -65,8 +65,8 @@ setup: --- "terms aggregation on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -100,8 +100,8 @@ setup: --- "terms aggregation on _ignored metadata field with top hits": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -136,8 +136,8 @@ setup: --- "date histogram aggregation with terms on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -172,8 +172,8 @@ setup: --- "cardinality aggregation on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -189,8 +189,8 @@ setup: --- "value count aggregation on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -206,8 +206,8 @@ setup: --- "date range aggregation with terms on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -249,8 +249,8 @@ setup: --- "random sampler aggregation with terms on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - do: search: @@ -280,10 +280,10 @@ setup: --- "filter aggregation on _ignored metadata field": - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: "_ignored metadata field aggregation support added in 8.15" - features: close_to + test_runner_features: close_to - do: search: body: diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml index 650c8447c5b10..d9298a832e650 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml @@ -26,8 +26,8 @@ setup: --- "fail formatting": - - skip: - version: "- 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: fixed in 8.15.0 - do: catch: /Cannot format stat \[sum\] with format \[DocValueFormat.DateTime\(format\[date_hour_minute_second_millis\] locale\[\], Z, MILLISECONDS\)\]/ diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/160_unsupported_setting.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/160_unsupported_setting.yml index d74bd2e598a86..5b6ece610af32 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/160_unsupported_setting.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/160_unsupported_setting.yml @@ -1,8 +1,8 @@ bad setting fails: - skip: - version: all - reason: https://github.com/elastic/elasticsearch/issues/78677 - features: allowed_warnings + awaits_fix: https://github.com/elastic/elasticsearch/issues/78677 + - requires: + test_runner_features: allowed_warnings - do: allowed_warnings: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml index 18aee1bf77232..1cf44312ae7d5 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml @@ -52,8 +52,7 @@ setup: --- "Get data stream with default lifecycle": - skip: - version: all - reason: https://github.com/elastic/elasticsearch/pull/100187 + awaits_fix: https://github.com/elastic/elasticsearch/pull/100187 - do: indices.get_data_lifecycle: diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml index c47dacacde3d8..68d1fa3da0dfc 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -98,8 +98,8 @@ teardown: --- "Test Drop Processor with Upsert (_bulk)": - - skip: - version: ' - 8.12.0' + - requires: + cluster_features: "gte_v8.12.1" reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: @@ -139,8 +139,8 @@ teardown: --- "Test Drop Processor with Upsert (_update)": - - skip: - version: ' - 8.12.0' + - requires: + cluster_features: "gte_v8.12.1" reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_reroute_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_reroute_processor.yml index e2f4e32777a1f..b2cbb352448ab 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_reroute_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_reroute_processor.yml @@ -24,8 +24,7 @@ teardown: --- "Test first matching router terminates pipeline": - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102144" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/102144" - do: ingest.put_pipeline: id: "pipeline-with-two-data-stream-processors" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml index 0bf623e8ff263..58c59e6852306 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml @@ -76,8 +76,8 @@ teardown: --- "Test Fail Processor with Upsert (bulk)": - - skip: - version: ' - 8.12.0' + - requires: + cluster_features: "gte_v8.12.1" reason: 'https://github.com/elastic/elasticsearch/issues/36746 fixed in 8.12.1' - do: ingest.put_pipeline: diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml index 40d646cc645f5..35b509eec9b45 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml @@ -119,9 +119,9 @@ teardown: --- profile fetch: - - skip: - version: ' - 8.14.99' - reason: fetch fields and stored_fields using ValueFetcher + - requires: + cluster_features: "gte_v8.15.0" + reason: "fetch fields and stored_fields using ValueFetcher" - do: search: diff --git a/modules/rest-root/src/yamlRestTest/resources/rest-api-spec/test/info/10_info.yml b/modules/rest-root/src/yamlRestTest/resources/rest-api-spec/test/info/10_info.yml index 91ae0a7160698..556f53357135f 100644 --- a/modules/rest-root/src/yamlRestTest/resources/rest-api-spec/test/info/10_info.yml +++ b/modules/rest-root/src/yamlRestTest/resources/rest-api-spec/test/info/10_info.yml @@ -11,7 +11,9 @@ --- "Info build flavor": - skip: - version: "8.3.0 - 8.3.2" + known_issues: + - cluster_feature: "gte_v8.3.0" + fixed_by: "gte_v8.3.3" reason: "build flavor in info was missing in 8.3.0 to 8.3.2" - do: {info: {}} - match: { version.build_flavor: default } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml index 6d6ee1f6bed41..a42b987a9bddd 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'ingest simulate added in 8.12' --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.segments/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.segments/10_basic.yml index 25f1230fb521e..646530214bf09 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.segments/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.segments/10_basic.yml @@ -172,9 +172,14 @@ --- tsdb: + - requires: + cluster_features: "gte_v8.5.0" + reason: "Serialization for segment stats fixed in 8.5.0" - skip: - version: " - 8.4.99, 8.7.00 - 8.9.99" - reason: Serialization for segment stats fixed in 8.5.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/40_time_series.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/40_time_series.yml index 2d43d22da4ccf..4af42f3e2dfbb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/40_time_series.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/40_time_series.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.1.0" + reason: "Introduced in 8.1.0" - skip: - version: " - 8.0.99, 8.7.00 - 8.9.99" - reason: introduced in 8.1.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml index 667caf1ba92a7..e50ab9c65e0f7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: "- 8.1.99" + - requires: + cluster_features: "gte_v8.2.0" reason: Field type filters were added in 8.2 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml index a000a9eac16ad..4e97b9fd44109 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml @@ -1,7 +1,7 @@ --- "cluster health basic test": - - skip: - version: "- 8.6.99" + - requires: + cluster_features: "gte_v8.7.0" reason: "health was added in 8.2.0, master_is_stable in 8.4.0, and REST API updated in 8.7" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/30_feature.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/30_feature.yml index 449954220a1ea..335d02421b0a1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/30_feature.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/30_feature.yml @@ -1,7 +1,7 @@ --- "cluster health test drilling down into a feature": - - skip: - version: "- 8.6.99" + - requires: + cluster_features: "gte_v8.7.0" reason: "the API path changed in 8.7" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/40_diagnosis.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/40_diagnosis.yml index 76b81354b7413..0d9ac3017420c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/40_diagnosis.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/40_diagnosis.yml @@ -1,7 +1,7 @@ --- "Diagnosis": - - skip: - version: "- 8.6.99" + - requires: + cluster_features: "gte_v8.7.0" reason: "the API path changed in 8.7" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml index d765decda68a8..4f26a69712e83 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_alias/10_basic.yml @@ -333,9 +333,9 @@ setup: --- "Deprecated local parameter": - - skip: - version: "- 8.11.99" - features: ["warnings"] + - requires: + cluster_features: "gte_v8.12.0" + test_runner_features: ["warnings"] reason: verifying deprecation warnings from 8.12.0 onwards - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml index 280a645318dd9..45bcf64f98945 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_index_template/15_composition.yml @@ -352,10 +352,10 @@ --- "Composable index templates that include subobjects: false at root": - - skip: - version: ' - 8.10.99' - reason: 'https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0' - features: allowed_warnings + - requires: + cluster_features: "gte_v8.11.0" + reason: "https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0" + test_runner_features: "allowed_warnings" - do: cluster.put_component_template: @@ -399,10 +399,10 @@ --- "Composable index templates that include subobjects: false on arbitrary field": - - skip: - version: ' - 8.10.99' - reason: 'https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0' - features: allowed_warnings + - requires: + cluster_features: "gte_v8.11.0" + reason: "https://github.com/elastic/elasticsearch/issues/96768 fixed at 8.11.0" + test_runner_features: "allowed_warnings" - do: cluster.put_component_template: @@ -494,10 +494,10 @@ - match: { test-generic.mappings.properties.field.ignore_above: 1024 } --- "Using deprecated component templates and pipelines in index template": - - skip: - version: ' - 8.11.99' - reason: 'The deprecated flags have been introduced in 8.12.0' - features: allowed_warnings + - requires: + cluster_features: "gte_v8.12.0" + reason: "The deprecated flags have been introduced in 8.12.0" + test_runner_features: "allowed_warnings" - do: cluster.put_component_template: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/20_update_non_dynamic_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/20_update_non_dynamic_settings.yml index 07c0e8b7a8b2a..c75b437110413 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/20_update_non_dynamic_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/20_update_non_dynamic_settings.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'ability to update non-dynamic settings added in 8.12' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml index e0b8f56282c05..ae3eadded108b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml @@ -81,8 +81,7 @@ setup: --- "put settings in list of indices": - skip: - version: "all" - reason: list of indices not implemented yet + awaits_fix: list of indices not implemented yet - do: indices.put_settings: index: test_index1, test_index2 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml index 236653b7ca9ad..73ab9c18a8ec3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml @@ -1,9 +1,9 @@ --- "Simulate template without a template in the body": - - skip: - version: " - 7.8.99" + - requires: + cluster_features: "gte_v7.9.0" reason: "only available in 7.9+" - features: ["default_shards"] + test_runner_features: ["default_shards"] - do: indices.put_index_template: @@ -30,10 +30,10 @@ --- "Simulate index template specifying a new template": - - skip: - version: " - 7.8.99" + - requires: + cluster_features: "gte_v7.9.0" reason: "only available in 7.9+" - features: ["default_shards"] + test_runner_features: ["default_shards"] - do: indices.put_index_template: @@ -84,10 +84,10 @@ --- "Simulate template matches overlapping legacy and composable templates": - - skip: - version: " - 7.8.99" + - requires: + cluster_features: "gte_v7.9.0" reason: "only available in 7.9+" - features: ["allowed_warnings", "default_shards"] + test_runner_features: ["allowed_warnings", "default_shards"] - do: indices.put_template: @@ -147,10 +147,10 @@ --- "Simulate replacing a template with a newer version": - - skip: - version: " - 7.99.99" + - requires: + cluster_features: "gte_v8.0.0" reason: "not yet backported" - features: ["allowed_warnings", "default_shards"] + test_runner_features: ["allowed_warnings", "default_shards"] - do: indices.put_index_template: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.validate_query/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.validate_query/10_basic.yml index 2221d08c0b7e2..673d3877d356b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.validate_query/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.validate_query/10_basic.yml @@ -12,8 +12,8 @@ setup: --- "Validate query api": - - skip: - version: ' - 7.6.99' + - requires: + cluster_features: "gte_v7.7.0" reason: message changed in 7.7.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/30_discovery.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/30_discovery.yml index 9b12a2713e19c..50c96dcee0621 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/30_discovery.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/30_discovery.yml @@ -43,9 +43,9 @@ --- "Master timing stats": - - skip: - features: [arbitrary_key] - version: "- 7.15.99" + - requires: + test_runner_features: ["arbitrary_key"] + cluster_features: ["gte_v7.16.0"] reason: "master timing stats added in 7.16.0" - do: @@ -139,9 +139,9 @@ --- "Master cluster applier stats": - - skip: - features: [arbitrary_key] - version: "- 7.15.99" + - requires: + test_runner_features: ["arbitrary_key"] + cluster_features: ["gte_v7.16.0"] reason: "Cluster state applier stats available since 7.16.0" - do: @@ -161,9 +161,9 @@ --- "Master serialization stats": - - skip: - features: [arbitrary_key] - version: "- 7.15.99" + - requires: + test_runner_features: ["arbitrary_key"] + cluster_features: ["gte_v7.16.0"] reason: "master serialization stats added in 7.16.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml index 3551d022c2f4a..cdd1223d67f11 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: synthetic source support added in 8.15 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index a26bc22df8936..3ae8f8b09aa4a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -95,8 +95,8 @@ teardown: - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} --- "Test hybrid search with knn where automatically disables weighted mode": - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'kNN was not correctly skipped until 8.12' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml index cd3751dbb9653..f66b6216e2426 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'check of preTags and postTags params for empty values was added in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml index 17f328046833e..1043d2881d2c3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml @@ -86,8 +86,8 @@ setup: --- "Unified highlighter with stored fields and disabled source": - - skip: - version: "- 7.10.1" + - requires: + cluster_features: "gte_v7.10.2" reason: "bug fix introduced in 7.10.2" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml index 23682a19ea6f7..fcd5b49c984c9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'standard retriever added in 8.14' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/20_knn_retriever.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/20_knn_retriever.yml index 68755f80c428d..d08a8e2a6d39c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/20_knn_retriever.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/20_knn_retriever.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'kNN retriever added in 8.14' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml index 32558dbe5a8c0..72c6abab22600 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: 'nested kNN search added in 8.11' - do: indices.create: @@ -143,8 +143,8 @@ setup: - match: {hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0"} --- "nested kNN search inner_hits size > 1": - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'inner_hits on nested kNN search added in 8.13' - do: @@ -265,10 +265,10 @@ setup: - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "1" } --- "nested kNN search inner_hits & boosting": - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'inner_hits on nested kNN search added in 8.13' - features: close_to + test_runner_features: close_to - do: search: @@ -309,8 +309,8 @@ setup: - close_to: { hits.hits.2.inner_hits.nested.hits.hits.0._score: {value: 0.00002, error: 0.00001} } --- "nested kNN search inner_hits & profiling": - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'bugfix for inner_hits and profiling in 8.13' - do: search: @@ -329,8 +329,8 @@ setup: - is_true : profile --- "nested kNN search with filter that might match nested docs": - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'bugfix for matching non-nested docs in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml index 849df86a30568..618951711cffd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/110_knn_query_with_filter.yml @@ -1,7 +1,7 @@ # test how knn query interacts with filters setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml index eb70e5b7bcf64..c6f3e187f7953 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml @@ -1,9 +1,9 @@ # test how knn query interacts with filters setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - features: close_to + test_runner_features: "close_to" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml index 53cc7eb064270..79ff3f61742f8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - do: indices.create: @@ -212,8 +212,8 @@ setup: - match: {hits.total.value: 0} --- "nested kNN search inner_hits size > 1": - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'inner_hits on nested kNN search added in 8.13' - do: @@ -321,8 +321,8 @@ setup: - length: { hits.hits.4.inner_hits.nested.hits.hits: 1 } --- "nested kNN query search with filter that might match nested docs": - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'bugfix for matching non-nested docs in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml index 0ea24686ff839..28ecd8ef59c02 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml @@ -1,9 +1,9 @@ # test how knn query interact with other queries setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - features: close_to + test_runner_features: close_to - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/150_knn_search_missing_params.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/150_knn_search_missing_params.yml index 23c6b62842e9f..9716762a131b7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/150_knn_search_missing_params.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/150_knn_search_missing_params.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: '[k] and [num_candidates] were made optional for kNN search in 8.13.0' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml index 4a884b644c6a7..02962e049e267 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: '[k] and [num_candidates] were made optional for kNN query in 8.13.0' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml index 71f65220eba1e..44f17e2269027 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'hex encoding for byte vectors was added in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/175_knn_query_hex_encoded_byte_vectors.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/175_knn_query_hex_encoded_byte_vectors.yml index 9f850400a09cd..e01f3ec18b8c3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/175_knn_query_hex_encoded_byte_vectors.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/175_knn_query_hex_encoded_byte_vectors.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'hex encoding for byte vectors was added in 8.14' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index 8471bd8cb5a9a..7f0c24e217d14 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 7.99.99' + - requires: + cluster_features: "gte_v8.0.0" reason: 'kNN search added in 8.0' - do: indices.create: @@ -61,8 +61,8 @@ setup: --- "kNN search only": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: search: @@ -82,8 +82,8 @@ setup: - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} --- "kNN multi-field search only": - - skip: - version: ' - 8.6.99' + - requires: + cluster_features: "gte_v8.7.0" reason: 'multi-field kNN search added to search endpoint in 8.7' - do: search: @@ -101,8 +101,8 @@ setup: - match: {hits.hits.1.fields.name.0: "moose.jpg"} --- "kNN search plus query": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: search: @@ -128,8 +128,8 @@ setup: - match: {hits.hits.2.fields.name.0: "rabbit.jpg"} --- "kNN multi-field search with query": - - skip: - version: ' - 8.6.99' + - requires: + cluster_features: "gte_v8.7.0" reason: 'multi-field kNN search added to search endpoint in 8.7' - do: search: @@ -153,8 +153,8 @@ setup: - match: {hits.hits.2.fields.name.0: "moose.jpg"} --- "kNN search with filter": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: search: @@ -194,8 +194,8 @@ setup: --- "kNN search with explicit search_type": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: catch: bad_request @@ -238,10 +238,10 @@ setup: --- "kNN search with filter in _knn_search endpoint": - - skip: - version: ' - 8.1.99' + - requires: + cluster_features: "gte_v8.2.0" reason: 'kNN with filtering added in 8.2' - features: ["allowed_warnings"] + test_runner_features: ["allowed_warnings"] - do: allowed_warnings: - "The kNN search API has been replaced by the `knn` option in the search API." @@ -284,8 +284,8 @@ setup: --- "Test nonexistent field": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - do: catch: bad_request @@ -303,8 +303,8 @@ setup: --- "KNN Vector similarity search only": - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'kNN similarity added in 8.8' - do: search: @@ -324,8 +324,8 @@ setup: - match: {hits.hits.0.fields.name.0: "moose.jpg"} --- "Vector similarity with filter only": - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'kNN similarity added in 8.8' - do: search: @@ -361,10 +361,10 @@ setup: - length: {hits.hits: 0} --- "Knn search with mip": - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: 'mip similarity added in 8.11' - features: close_to + test_runner_features: "close_to" - do: indices.create: @@ -450,10 +450,10 @@ setup: - close_to: {hits.hits.0._score: {value: 33686.29, error: 0.01}} --- "Knn search with _name": - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: 'support for _name in knn was added in 8.15' - features: close_to + test_runner_features: "close_to" - do: search: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml index b1933ebde297d..842f71068a34b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 7.99.99' + - requires: + cluster_features: "gte_v8.0.0" reason: 'kNN search added in 8.0' - do: indices.create: @@ -96,10 +96,10 @@ setup: --- "kNN search only regular query": - - skip: - version: ' - 8.3.99' + - requires: + cluster_features: "gte_v8.4.0" reason: 'kNN added to search endpoint in 8.4' - features: close_to + test_runner_features: "close_to" - do: search: index: test diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml index b61bc939f8f88..e6c669ef7b534 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'kNN float to byte quantization added in 8.12' - do: indices.create: @@ -394,8 +394,8 @@ setup: type: int8_hnsw --- "Test create, merge, and search cosine": - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'kNN float to byte quantization added in 8.12' - do: indices.create: @@ -467,8 +467,8 @@ setup: - match: { hits.hits.2._id: "3"} --- "Test create, merge, and search dot_product": - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'kNN float to byte quantization added in 8.12' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml index 7da00a02d4285..1b439967ba163 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'kNN flat index added in 8.13' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml index 81d49dad21a70..880b8cab39684 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.12.99' + - requires: + cluster_features: "gte_v8.13.0" reason: 'kNN int8_flat index added in 8.13' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml index ea21bb69a77b8..983ac2719e71b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/45_knn_search_byte.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.5.99' + - requires: + cluster_features: "gte_v8.6.0" reason: 'byte-sized kNN search added in 8.6' - do: @@ -164,8 +164,8 @@ setup: --- "Vector similarity search only": - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'kNN similarity added in 8.8' - do: search: @@ -185,8 +185,8 @@ setup: - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} --- "Vector similarity with filter only": - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'kNN similarity added in 8.8' - do: search: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/50_dense_vector_field_usage.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/50_dense_vector_field_usage.yml index 854543f7b2144..db0437637fc20 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/50_dense_vector_field_usage.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/50_dense_vector_field_usage.yml @@ -1,8 +1,8 @@ setup: - - skip: - features: headers - version: ' - 7.99.99' + - requires: + cluster_features: "gte_v8.0.0" reason: 'kNN search added in 8.0' + test_runner_features: "headers" - do: indices.create: index: futest @@ -50,10 +50,10 @@ setup: --- "Field usage": - - skip: - version: ' - 8.0.99' + - requires: + cluster_features: "gte_v8.1.0" reason: 'dense_vector field usage was added in 8.1' - features: ["allowed_warnings"] + test_runner_features: ["allowed_warnings"] - do: allowed_warnings: - "The kNN search API has been replaced by the `knn` option in the search API." diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml index 545953d2645da..567d338da142c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml @@ -1,11 +1,11 @@ setup: - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: 'Dynamic mapping of floats to dense_vector was added in 8.11' --- "Fields indexed as strings won't be transformed into dense_vector": - - skip: - version: ' - 8.11.0' + - requires: + cluster_features: "gte_v8.11.1" reason: 'Bug fix was added in 8.11.1' - do: index: @@ -572,8 +572,8 @@ setup: --- "Fields mapped as dense_vector without dims or docs have correct cluster stats values": - - skip: - version: ' - 8.11.1' + - requires: + cluster_features: "gte_v8.11.2" reason: 'Bug fix was added in 8.11.2' - do: @@ -603,8 +603,8 @@ setup: --- "Fields mapped as dense_vector have correct cluster stats min max values": - - skip: - version: ' - 8.11.1' + - requires: + cluster_features: "gte_v8.11.2" reason: 'Bug fix was added in 8.11.2' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml index 0672e27b43c67..4dcfa58e79830 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.4.99' + - requires: + cluster_features: "gte_v8.5.0" reason: 'filtered alias for kNN search added in 8.5' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml index 407313a59c5e8..0238a1781d278 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: 'dense_vector indexed by default was added in 8.11' --- @@ -123,8 +123,8 @@ setup: ef_construction: 200 --- "Default index options for dense_vector": - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' - do: indices.create: @@ -149,8 +149,8 @@ setup: - match: { test_default_index_options.mappings.properties.vector.index_options.type: int8_hnsw } --- "Default index options for dense_vector element type byte": - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.14.0" reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml index 5048bc8d4307c..67819881f1b50 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: " - 8.14.99" + - requires: + cluster_features: "gte_v8.15.0" reason: Collapse with rescore added in 8.15.0 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml index a778fceee9476..0ae00dff6ce63 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml @@ -58,8 +58,7 @@ setup: --- "pre_filter_shard_size with shards that have no hit": - skip: - version: all - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/92058" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/92058" - do: index: index: index_1 @@ -240,8 +239,8 @@ setup: --- "prefilter on non-indexed date fields": - - skip: - version: "- 8.0.99" + - requires: + cluster_features: "gte_v8.1.0" reason: "doc values search was added in 8.1.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml index 3d0e4347fef6a..40ea75b81d59e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml @@ -1242,8 +1242,10 @@ setup: --- "Test exists query on text field with empty values": - skip: - version: '8.4.0 - 8.5.0' - reason: Regression introduced in 8.4.0, fixed in 8.5.1 + known_issues: + - cluster_feature: "gte_v8.4.0" + fixed_by: "gte_v8.5.1" + reason: "Regression introduced in 8.4.0, fixed in 8.5.1" - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index fd3d31f8245ea..52b55098ec4db 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -298,8 +298,8 @@ - is_false: hits.hits.0.fields.count_without_dv --- Test unmapped field: - - skip: - version: ' - 7.10.99' + - requires: + cluster_features: "gte_v7.11.0" reason: support was introduced in 7.11 - do: indices.create: @@ -364,8 +364,8 @@ Test unmapped field: - some other text --- Test unmapped fields inside disabled objects: - - skip: - version: ' - 7.10.99' + - requires: + cluster_features: "gte_v7.11.0" reason: support was introduced in 7.11 - do: indices.create: @@ -405,8 +405,8 @@ Test unmapped fields inside disabled objects: - b --- Test nested fields: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -479,8 +479,8 @@ Test nested fields: - is_false: hits.hits.0.fields --- Test nested field inside object structure: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -594,8 +594,8 @@ Test nested field inside object structure: hits.hits.1.fields.obj\.products.1: { "manufacturer" : ["RealTec"]} --- Test doubly nested fields: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -663,8 +663,8 @@ Test doubly nested fields: --- Test nested fields with unmapped subfields: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -730,8 +730,8 @@ Test nested fields with unmapped subfields: hits.hits.0.fields.user.0: { "address.city" : ["Berlin"]} --- Test nested fields with ignored subfields: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -773,8 +773,8 @@ Test nested fields with ignored subfields: - { "first" : [ "John" ] } --- Test nested field with sibling field resolving to DocValueFetcher: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: support was introduced in 7.12 - do: indices.create: @@ -824,8 +824,8 @@ Test nested field with sibling field resolving to DocValueFetcher: hits.hits.0.fields.products.1: { "manufacturer" : ["HyperSmart"]} --- "Test ignores malformed values while returning valid ones": - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: 'Behaviour changed in 7.12' - do: indices.create: @@ -859,8 +859,8 @@ Test nested field with sibling field resolving to DocValueFetcher: --- Test token_count inside nested field doesn't fail: - - skip: - version: ' - 7.11.99' + - requires: + cluster_features: "gte_v7.12.0" reason: 'fix introduced in 7.12.0' - do: indices.create: @@ -897,8 +897,8 @@ Test token_count inside nested field doesn't fail: --- error includes field name: - - skip: - version: ' - 7.15.99' + - requires: + cluster_features: "gte_v7.16.0" reason: 'error changed in 7.16.0' - do: @@ -934,8 +934,8 @@ error includes field name: --- error includes glob pattern: - - skip: - version: ' - 7.15.99' + - requires: + cluster_features: "gte_v7.16.0" reason: 'error changed in 7.16.0' - do: @@ -972,8 +972,8 @@ error includes glob pattern: --- error for flattened includes whole path: - - skip: - version: ' - 7.15.99' + - requires: + cluster_features: "gte_v7.16.0" reason: 'error changed in 7.16.0' - do: @@ -1011,8 +1011,8 @@ error for flattened includes whole path: --- test fetching metadata fields: - - skip: - version: ' - 7.99.99' + - requires: + cluster_features: "gte_v8.0.0" reason: 'fetching metadata via fields introduced in 8.0' - do: @@ -1123,8 +1123,8 @@ fetch geo_point: --- "Test with subobjects: false": - - skip: - version: ' - 8.9.99' + - requires: + cluster_features: "gte_v8.10.0" reason: 'https://github.com/elastic/elasticsearch/issues/96700 fixed in 8.10.0' - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml index 22f1e08ff5c29..455d06ba2a984 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml @@ -48,8 +48,8 @@ --- "binary synthetic source": - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: synthetic source support introduced in 8.15 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 7625f19557e9b..dc79961ae78cd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -21,8 +21,8 @@ setup: --- fetch fields: - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: _ignored is returned only from 8.15 on - do: @@ -56,8 +56,8 @@ fetch fields: --- fetch source: - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: _ignored is returned only from 8.15 on - do: @@ -87,8 +87,8 @@ fetch source: --- fetch nested source: - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: _ignored is returned only from 8.15 on - do: @@ -156,8 +156,8 @@ fetch nested source: --- disabling stored fields removes fetch sub phases: - - skip: - version: ' - 7.15.99' + - requires: + cluster_features: "gte_v7.16.0" reason: fetch profiling implemented in 7.16.0 - do: @@ -173,8 +173,8 @@ disabling stored fields removes fetch sub phases: --- dfs knn vector profiling: - - skip: - version: ' - 8.6.99' + - requires: + cluster_features: "gte_v8.7.0" reason: multi-knn dfs profiling implemented in 8.7.0 - do: @@ -237,8 +237,8 @@ dfs knn vector profiling: --- dfs knn vector profiling with vector_operations_count: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: vector_operations_count in dfs profiling added in 8.12.0 - do: @@ -303,8 +303,8 @@ dfs knn vector profiling with vector_operations_count: --- dfs profile for search with dfs_query_then_fetch: - - skip: - version: ' - 8.5.99' + - requires: + cluster_features: "gte_v8.6.0" reason: dfs profiling implemented in 8.6.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/380_sort_segments_on_timestamp.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/380_sort_segments_on_timestamp.yml index 34852a7b49624..4795b2096cfa0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/380_sort_segments_on_timestamp.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/380_sort_segments_on_timestamp.yml @@ -111,10 +111,10 @@ --- "Test if segments are missing @timestamp field we don't get errors": - - skip: - version: "- 7.99.99" + - requires: + cluster_features: "gte_v8.0.0" reason: "sorting segments was added in 7.16" - features: allowed_warnings + test_runner_features: "allowed_warnings" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml index 355ffeebfb1d3..4c1adc3c6c528 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.13.99' + - requires: + cluster_features: "gte_v8.15.0" reason: 'no trimming highlight snippets when number_of_fragments is 0 was introduced in 8.14' - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml index d5f8eb4b0762d..2b309f502f0c2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml @@ -144,8 +144,7 @@ fetch _seq_no via fields: --- fetch fields with none stored_fields: - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/107466" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/107466" - do: catch: "bad_request" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml index 52e80887f6b95..d4cf3ade2aa4e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/simulate.ingest/10_basic.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'ingest simulate added in 8.12' --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml index 3af4c1ff90394..322148f4e82ec 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml @@ -1,9 +1,10 @@ --- setup: - skip: - version: "8.7.00 - 8.9.99" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - --- add time series mappings: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/100_composite.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/100_composite.yml index 920111fafb07b..c5fe17b251d84 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/100_composite.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/100_composite.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "tsdb indexing changed in 8.2.0" - skip: - version: " - 8.1.99,8.7.00 - 8.9.99" - reason: "tsdb indexing changed in 8.2.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml index 7efb5f5e56926..485b5b1796ec4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml @@ -1,7 +1,9 @@ --- setup: - skip: - version: "8.7.00 - 8.9.99" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/110_field_caps.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/110_field_caps.yml index 4192bdf0cf2fb..5dbd0682947c2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/110_field_caps.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/110_field_caps.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.5.0" + reason: "metric params only on time series indexes introduced in 8.5.0" - skip: - version: " - 8.4.99,8.7.00 - 8.9.99" - reason: "metric params only on time series indexes introduced in 8.5.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml index 9f9d59317454b..5b90dcb705dba 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/15_timestamp_mapping.yml @@ -1,7 +1,9 @@ --- setup: - skip: - version: "8.7.00 - 8.9.99" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index b710f6b313da0..ade153d284548 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -1,7 +1,9 @@ --- setup: - skip: - version: "8.7.00 - 8.9.99" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml index 621906820e4ad..973832cf3ca73 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: "- 8.13.99" + - requires: + cluster_features: "gte_v8.14.0" reason: _tsid hashing introduced in 8.13 and tsid routing changed in 8.14 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml index 962926ca81fad..3c76653960386 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/40_search.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "_tsid hashing introduced in 8.13" - skip: - version: " - 8.1.99,8.7.00 - 8.12.99" - reason: _tsid hashing introduced in 8.13 + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.13.0" + reason: "_tsid hashing introduced in 8.13" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/50_alias.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/50_alias.yml index 5c5dc02ad4d09..9b1783b852a9f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/50_alias.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/50_alias.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "tsdb indexing changed in 8.2.0" - skip: - version: " - 8.1.99,8.7.00 - 8.9.99" - reason: "tsdb indexing changed in 8.2.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/80_index_resize.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/80_index_resize.yml index 12b29f68050bd..c32d3c50b0784 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/80_index_resize.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/80_index_resize.yml @@ -1,9 +1,14 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "tsdb indexing changed in 8.2.0" + test_runner_features: "arbitrary_key" - skip: - version: " - 8.1.99,8.7.00 - 8.12.99" - reason: _tsid hashing introduced in 8.13 - features: "arbitrary_key" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.13.0" + reason: "_tsid hashing introduced in 8.13" # Force allocating all shards to a single node so that we can shrink later. # In production you can move the shards to the single node after they've been diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml index 5f1368abcf436..976ac8f08f795 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml @@ -1,8 +1,13 @@ --- setup: + - requires: + cluster_features: "gte_v8.2.0" + reason: "tsdb indexing changed in 8.2.0" - skip: - version: " - 8.1.99,8.7.00 - 8.9.99" - reason: "tsdb indexing changed in 8.2.0, synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.10.0" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml index d9a0f65f36170..81be6f82d8a14 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml @@ -2,8 +2,7 @@ "Metadata Fields": - skip: - version: "all" - reason: "Update doesn't return metadata fields, waiting for #3259" + awaits_fix: "Update doesn't return metadata fields, waiting for #3259" - do: indices.create: diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index c9c39b206ada8..0e761622f4d5f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -81,15 +81,16 @@ public boolean clusterHasFeature(String featureId) { Matcher matcher = VERSION_FEATURE_PATTERN.matcher(featureId); if (matcher.matches()) { Version extractedVersion = Version.fromString(matcher.group(1)); - if (Version.V_8_14_0.before(extractedVersion)) { + if (Version.V_8_15_0.before(extractedVersion)) { // As of version 8.14.0 REST tests have been migrated to use features only. - // For migration purposes we provide a synthetic version feature gte_vX.Y.Z for any version at or before 8.14.0. + // For migration purposes we provide a synthetic version feature gte_vX.Y.Z for any version at or before 8.15.0 + // allowing for some transition period. throw new IllegalArgumentException( Strings.format( "Synthetic version features are only available before [%s] for migration purposes! " + "Please add a cluster feature to an appropriate FeatureSpecification; features only necessary for " + "testing can be supplied via ESRestTestCase#createAdditionalFeatureSpecifications()", - Version.V_8_14_0 + Version.V_8_15_0 ) ); } diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml index f4397ca18c101..4b45fda66835c 100644 --- a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml @@ -56,8 +56,7 @@ setup: --- "Test traces-apm-* data stream indexing": - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102360" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/102360" - do: index: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml index c84c66f8aa31d..cd227eec4e227 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/license/100_license.yml @@ -1,8 +1,8 @@ setup: - - skip: - features: close_to - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'rank added in 8.8' + test_runner_features: "close_to" - do: indices.create: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml index c9eaa01616175..a4972d0557dab 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'rank added in 8.8' - do: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml index 1c950be5bfbf9..575723853f0aa 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/150_rank_rrf_pagination.yml @@ -1,6 +1,6 @@ setup: - - skip: - version: ' - 8.14.99' + - requires: + cluster_features: "gte_v8.15.0" reason: 'pagination for rrf was added in 8.15' - do: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml index 0583e6d7ae51a..76cedf44d3dbe 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml @@ -1,8 +1,8 @@ setup: - - skip: - features: close_to - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: 'rank added in 8.8' + test_runner_features: "close_to" - do: indices.create: diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/10_analyze.yml b/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/10_analyze.yml index 648eb3766fffb..e5babad76eb05 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/10_analyze.yml +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/10_analyze.yml @@ -29,8 +29,8 @@ setup: --- "Analysis fails on readonly repositories": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: @@ -45,8 +45,8 @@ setup: --- "Analysis without details": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: @@ -100,8 +100,8 @@ setup: --- "Analysis with details": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: @@ -131,8 +131,8 @@ setup: --- "Analysis with ?human=false": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: @@ -157,8 +157,8 @@ setup: --- "Timeout with large blobs": - - skip: - version: "- 7.13.99" + - requires: + cluster_features: "gte_v7.14.0" reason: "abortWrites flag introduced in 7.14, and mixed-cluster support not required" - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml index 44758422ff415..2487235a2383e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml @@ -293,8 +293,8 @@ --- "Test fields api": - - skip: - version: "- 8.3.99" + - requires: + cluster_features: "gte_v8.4.0" reason: "Breaking change introduced in 8.4.0" - do: indices.create: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml index 861247350c9f5..485d2c1d99f47 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml @@ -417,10 +417,14 @@ setup: --- Cardinality agg: + - requires: + cluster_features: "gte_v7.7.0" + reason: "constant_keyword was added in 7.7" - skip: - version: " - 7.6.99, 8.9.00 - 8.10.99" - reason: "constant_keyword was added in 7.7, bug introduced in 8.9 and fixed in 8.11" - + known_issues: + - cluster_feature: "gte_v8.9.0" + fixed_by: "gte_v8.11.0" + reason: "bug introduced in 8.9 and fixed in 8.11" - do: indices.create: index: test3 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml index 3033c83af8e33..7ad16faae2314 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml @@ -1,9 +1,9 @@ --- "Test data stream lifecycle usage stats": - - skip: - version: "- 8.10.99" + - requires: + cluster_features: "gte_v8.11.0" reason: "the data stream lifecycle stats were updated to the usage api in 8.11" - features: allowed_warnings + test_runner_features: "allowed_warnings" - do: xpack.usage: {} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml index e505d11cbe137..ba5de9765db17 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml @@ -1,10 +1,9 @@ --- setup: - skip: - version: "all" - reason: "waiting for final decisions on supporting generic expressions on the right https://github.com/elastic/elasticsearch/issues/103599" - - features: allowed_warnings_regex + awaits_fix: "waiting for final decisions on supporting generic expressions on the right https://github.com/elastic/elasticsearch/issues/103599" + - requires: + test_runner_features: allowed_warnings_regex - do: indices.create: index: test diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/health/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/health/10_usage.yml index 207b703677661..f576b318c719f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/health/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/health/10_usage.yml @@ -17,8 +17,8 @@ setup: feature: disk --- "Usage stats on the health API": - - skip: - version: "- 8.6.99" + - requires: + cluster_features: "gte_v8.7.0" reason: "the health api stats were only added to the usage api in 8.7" - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml index 28bdf22453c0a..4a1b2379888da 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml @@ -564,8 +564,7 @@ setup: --- "Test delete given model referenced by pipeline": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: ingest.put_pipeline: @@ -594,8 +593,7 @@ setup: --- "Test force delete given model referenced by pipeline": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: ingest.put_pipeline: @@ -625,8 +623,7 @@ setup: --- "Test delete given model with alias referenced by pipeline": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: ml.put_trained_model_alias: @@ -659,8 +656,7 @@ setup: --- "Test force delete given model with alias referenced by pipeline": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/106652" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/106652" - do: ml.put_trained_model_alias: model_alias: "alias-to-a-classification-model" @@ -1117,8 +1113,7 @@ setup: --- "Test put with defer_definition_decompression with invalid definition and no memory estimate": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/94854" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/94854" - do: catch: /Model \[my-regression-model\] inference config type \[classification\] does not support definition target type \[regression\]/ diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml index e307e72d2ca4f..dac7b48617a2f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml @@ -135,8 +135,7 @@ setup: --- "Test rescore with stored model": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: search: @@ -171,8 +170,7 @@ setup: --- "Test rescore with stored model and smaller window_size": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: search: @@ -193,8 +191,7 @@ setup: --- "Test rescore with stored model and chained rescorers": - skip: - version: all - reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/80703" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/80703" - do: search: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml index 28a6ad826bc64..7991566bfe818 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml @@ -1,10 +1,10 @@ # This test uses the simple model defined in # TextExpansionQueryIT.java to create the token weights. setup: - - skip: - version: ' - 8.10.99' + - requires: + cluster_features: "gte_v8.11.0" reason: "sparse_vector field type reintroduced in 8.11" - features: headers + test_runner_features: headers - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml index 5a31af18f8269..50a3fa7e22d58 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml @@ -1,10 +1,10 @@ # This test uses the simple model defined in # TextExpansionQueryIT.java to create the token weights. setup: - - skip: - features: headers - version: ' - 8.7.99' + - requires: + cluster_features: "gte_v8.8.0" reason: "text_expansion query introduced in 8.8" + test_runner_features: "headers" - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 9df3731a09941..2aee382890c56 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -220,8 +220,8 @@ teardown: --- "Test topN functions from profiling-events": - - skip: - version: "- 8.13.99" + - requires: + cluster_features: "gte_v8.14.0" reason: "the topN functions API was added in 8.14.0" - do: @@ -251,8 +251,8 @@ teardown: --- "Test topN functions from test-events": - - skip: - version: "- 8.13.99" + - requires: + cluster_features: "gte_v8.14.0" reason: "the topN functions API was added in 8.14.0" - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml index cb81fe483c278..bd40e29d0b675 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -103,8 +103,7 @@ setup: "Test get all jobs": - skip: - version: all - reason: Job ordering isn't guaranteed right now, cannot test + awaits_fix: "Job ordering isn't guaranteed right now, cannot test" - do: headers: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml index eaa5b8b42a840..b91b9053eac7e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/search-business-rules/10_pinned_query.yml @@ -127,8 +127,8 @@ setup: --- "Test pinned query with knn query": - - skip: - version: ' - 8.11.99' + - requires: + cluster_features: "gte_v8.12.0" reason: 'knn as query added in 8.12' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml index 317a26cbfef52..0408167cbb656 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml @@ -210,8 +210,7 @@ setup: --- "Test geo_centroid aggregation on geo_shape shapes with grouping": - skip: - version: "all" - reason: "Awaits fix: https://github.com/elastic/elasticsearch/issues/95147" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/95147" - do: search: rest_total_hits_as_int: true diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml index 640f5af7b58c7..109e002f0aaa3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml @@ -141,8 +141,7 @@ teardown: --- "Test cat transform stats with batch transform": - skip: - version: "all" - reason: "Awaits fix: https://github.com/elastic/elasticsearch/issues/68350" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/68350" - do: transform.put_transform: transform_id: "airline-transform-batch" diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/painless/40_exception.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/painless/40_exception.yml index 7ecdc02eacd32..702b5eaafdba2 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/painless/40_exception.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/painless/40_exception.yml @@ -38,8 +38,10 @@ --- "Test painless exceptions are returned when logging a broken response": - skip: - version: "8.7.0 - 8.7.1" - reason: "self-referencing objects were in Painless instead of Mustache in 8.7.0 to 8.7.1" + known_issues: + - cluster_feature: "gte_v8.7.0" + fixed_by: "gte_v8.7.2" + reason: "self-referencing objects were in Painless instead of Mustache in 8.7.0 to 8.7.1" - do: cluster.health: diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/92_put_watch_with_indices_options.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/92_put_watch_with_indices_options.yml index eed10bdc179d4..d4964997f8c91 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/92_put_watch_with_indices_options.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/put_watch/92_put_watch_with_indices_options.yml @@ -6,9 +6,12 @@ setup: --- "Test put watch with allow no indices": + - requires: + test_runner_features: ["warnings"] - skip: - features: ["warnings"] - version: "7.10.1 - 7.10.2" + known_issues: + - cluster_feature: "gte_v7.10.1" + fixed_by: "gte_v7.10.3" reason: "watch parsing with partial indices options was broken in 7.10.1 and 7.10.2" - do: watcher.put_watch: @@ -63,9 +66,12 @@ setup: --- "Test put watch with expand wildcards": + - requires: + test_runner_features: ["warnings"] - skip: - features: ["warnings"] - version: "7.10.1 - 7.10.2" + known_issues: + - cluster_feature: "gte_v7.10.1" + fixed_by: "gte_v7.10.3" reason: "watch parsing with partial indices options was broken in 7.10.1 and 7.10.2" - do: watcher.put_watch: @@ -120,9 +126,12 @@ setup: --- "Test put watch with ignore unavailable": + - requires: + test_runner_features: ["warnings"] - skip: - features: ["warnings"] - version: "7.10.1 - 7.10.2" + known_issues: + - cluster_feature: "gte_v7.10.1" + fixed_by: "gte_v7.10.3" reason: "watch parsing with partial indices options was broken in 7.10.1 and 7.10.2" - do: watcher.put_watch: diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/usage/10_basic.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/usage/10_basic.yml index 17031abf39e02..e37e78ab772ca 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/usage/10_basic.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/watcher/usage/10_basic.yml @@ -1,8 +1,7 @@ --- "Test watcher usage stats output": - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/65547" + awaits_fix: "https://github.com/elastic/elasticsearch/issues/65547" - do: catch: missing watcher.delete_watch: diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index 0330b12663a41..079714ea3a886 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -17,11 +17,11 @@ --- "Test old cluster datafeed with aggs": + - requires: + test_runner_features: "warnings" - skip: - features: warnings #TODO remove skip when master is bumped to 9.0.0 - version: "all" - reason: "If we hit the old node we get a warning. If we hit the new node, we don't" + awaits_fix: "If we hit the old node we get a warning. If we hit the new node, we don't" - do: warnings: - '[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.' From ee85f74e55b3c76304d87de812dfa28527de28e4 Mon Sep 17 00:00:00 2001 From: Andrew Wilkins Date: Tue, 7 May 2024 16:07:06 +0800 Subject: [PATCH 019/117] apm-data: increase version for templates (#108340) * apm-data: bump version of resources * Update docs/changelog/108340.yaml --- docs/changelog/108340.yaml | 5 +++++ x-pack/plugin/apm-data/src/main/resources/resources.yaml | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/108340.yaml diff --git a/docs/changelog/108340.yaml b/docs/changelog/108340.yaml new file mode 100644 index 0000000000000..fb2ea72c0a0f5 --- /dev/null +++ b/docs/changelog/108340.yaml @@ -0,0 +1,5 @@ +pr: 108340 +summary: "Apm-data: increase version for templates" +area: Data streams +type: enhancement +issues: [] diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index 71b54ae6297db..2f2025c37f70f 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 1 +version: 2 component-templates: # Data lifecycle. From eba6a848dfde0332dec29278dc5b383062253cd1 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 7 May 2024 18:09:16 +1000 Subject: [PATCH 020/117] Unmute Azure 3rd party tests (#108336) Unmute Azure 3rd party tests (again) after re-generating credentials following the updated guide. Relates: #107928 Fixes: #107720 Fixes: #107502 --- .../azure/AzureStorageCleanupThirdPartyTests.java | 6 ------ .../blobstore/testkit/AzureSnapshotRepoTestKitIT.java | 1 - 2 files changed, 7 deletions(-) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java index 9ee714c3c4d59..d9ab689c05a5c 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java @@ -50,31 +50,26 @@ public class AzureStorageCleanupThirdPartyTests extends AbstractThirdPartyReposi System.getProperty("test.azure.container") ); - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testCreateSnapshot() { super.testCreateSnapshot(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testIndexLatest() throws Exception { super.testIndexLatest(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testListChildren() { super.testListChildren(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testCleanup() throws Exception { super.testCleanup(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") @Override public void testReadFromPositionWithLength() { super.testReadFromPositionWithLength(); @@ -162,7 +157,6 @@ private void ensureSasTokenPermissions() { future.actionGet(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107720") public void testMultiBlockUpload() throws Exception { final BlobStoreRepository repo = getRepository(); // The configured threshold for this test suite is 1mb diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java index 5638450bfec75..31c9639facd93 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AzureSnapshotRepoTestKitIT.java @@ -78,7 +78,6 @@ protected Settings repositorySettings() { return Settings.builder().put("client", "repository_test_kit").put("container", container).put("base_path", basePath).build(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107502") @Override public void testRepositoryAnalysis() throws Exception { super.testRepositoryAnalysis(); From fa196f72c14cb5379d190f8b7e0439af0ff81bd2 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 7 May 2024 10:42:52 +0200 Subject: [PATCH 021/117] Extended feature validations in REST tests and improved wording. (#108292) --- .../features/FeatureSpecification.java | 4 ++++ .../test/rest/ESRestTestCase.java | 10 +++++++-- .../test/rest/ESRestTestFeatureService.java | 22 +++++++++++-------- 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java index 817ccde4bad2e..4410ce69ddf54 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java @@ -26,6 +26,10 @@ * All feature checks should be done through {@code FeatureService} to ensure that Elasticsearch's * guarantees on the introduction of new functionality are followed; * that is, new functionality is not enabled until all nodes in the cluster support it. + *

+ * Note: {@link FeatureSpecification}s are loaded as service providers, however tests are not fully modularized yet. + * Make sure to also register new specifications in {@code META-INF/services/org.elasticsearch.features.FeatureSpecification}, + * so they are available in tests as well. */ public interface FeatureSpecification { /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 8997844bdbba1..fd3ba7d864f99 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -350,7 +350,13 @@ public void initClient() throws IOException { assert nodesVersions != null; } - protected List createAdditionalFeatureSpecifications() { + /** + * Override to provide additional test-only historical features. + * + * Note: This extension point cannot be used to add cluster features. The provided {@link FeatureSpecification}s + * must contain only historical features, otherwise an assertion error is thrown. + */ + protected List additionalTestOnlyHistoricalFeatures() { return List.of(); } @@ -368,7 +374,7 @@ protected final TestFeatureService createTestFeatureService( ); } return new ESRestTestFeatureService( - createAdditionalFeatureSpecifications(), + additionalTestOnlyHistoricalFeatures(), semanticNodeVersions, ClusterFeatures.calculateAllNodeFeatures(clusterStateFeatures.values()) ); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index 0e761622f4d5f..78a4126ec09db 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -58,10 +58,15 @@ class ESRestTestFeatureService implements TestFeatureService { if (MetadataHolder.HISTORICAL_FEATURES != null) { specs.add(MetadataHolder.HISTORICAL_FEATURES); } - var historicalFeatures = FeatureData.createFromSpecifications(specs).getHistoricalFeatures(); - this.knownHistoricalFeatureNames = historicalFeatures.lastEntry().getValue(); + FeatureData featureData = FeatureData.createFromSpecifications(specs); + assert featureData.getNodeFeatures().isEmpty() + : Strings.format( + "Only historical features can be injected via ESRestTestCase#additionalTestOnlyHistoricalFeatures(), rejecting %s", + featureData.getNodeFeatures().keySet() + ); + this.knownHistoricalFeatureNames = featureData.getHistoricalFeatures().lastEntry().getValue(); this.version = nodeVersions.stream().min(Comparator.naturalOrder()).orElse(Version.CURRENT); - this.allSupportedFeatures = Sets.union(clusterStateFeatures, historicalFeatures.floorEntry(version).getValue()); + this.allSupportedFeatures = Sets.union(clusterStateFeatures, featureData.getHistoricalFeatures().floorEntry(version).getValue()); } public static boolean hasFeatureMetadata() { @@ -88,8 +93,8 @@ public boolean clusterHasFeature(String featureId) { throw new IllegalArgumentException( Strings.format( "Synthetic version features are only available before [%s] for migration purposes! " - + "Please add a cluster feature to an appropriate FeatureSpecification; features only necessary for " - + "testing can be supplied via ESRestTestCase#createAdditionalFeatureSpecifications()", + + "Please add a cluster feature to an appropriate FeatureSpecification; test-only historical-features " + + "can be supplied via ESRestTestCase#additionalTestOnlyHistoricalFeatures()", Version.V_8_15_0 ) ); @@ -100,10 +105,9 @@ public boolean clusterHasFeature(String featureId) { if (hasFeatureMetadata()) { throw new IllegalArgumentException( Strings.format( - "Unknown feature %s: check the feature has been added to the correct FeatureSpecification in the relevant module or, " - + "if this is a legacy feature used only in tests, to a test-only FeatureSpecification such as %s.", - featureId, - RestTestLegacyFeatures.class.getCanonicalName() + "Unknown feature %s: check the respective FeatureSpecification is provided both in module-info.java " + + "as well as in META-INF/services and verify the module is loaded during tests.", + featureId ) ); } From 26db24317defcfce13acf99a3921b7ef4259eb70 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 7 May 2024 10:52:51 +0100 Subject: [PATCH 022/117] Revert "Cluster state role mapper file settings service (#107886)" (#108346) This reverts commit 391136c0899ca0a4f8874b89cd05d5ea44987497. --- docs/changelog/107886.yaml | 5 - .../rolemapping/PutRoleMappingRequest.java | 12 + .../PutRoleMappingRequestBuilder.java | 7 +- .../RoleMappingFileSettingsIT.java | 292 +++++++----------- .../FileSettingsRoleMappingsStartupIT.java | 148 +++++++++ .../xpack/security/Security.java | 3 +- .../ReservedRoleMappingAction.java | 136 ++++++-- .../TransportDeleteRoleMappingAction.java | 32 +- .../TransportPutRoleMappingAction.java | 28 +- .../rolemapping/RestPutRoleMappingAction.java | 21 +- ...dUnstableSecurityStateHandlerProvider.java | 28 ++ .../security/UnstableLocalStateSecurity.java | 97 ++++++ .../ReservedRoleMappingActionTests.java | 152 ++++++++- ...TransportDeleteRoleMappingActionTests.java | 45 +++ .../TransportPutRoleMappingActionTests.java | 41 ++- ...dstate.ReservedClusterStateHandlerProvider | 1 + .../ldap/AbstractAdLdapRealmTestCase.java | 18 +- 17 files changed, 798 insertions(+), 268 deletions(-) delete mode 100644 docs/changelog/107886.yaml create mode 100644 x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java diff --git a/docs/changelog/107886.yaml b/docs/changelog/107886.yaml deleted file mode 100644 index a328bc2a2a208..0000000000000 --- a/docs/changelog/107886.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107886 -summary: Cluster state role mapper file settings service -area: Authorization -type: enhancement -issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java index f85ca260c3fff..039ed8aa5fb64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java @@ -166,4 +166,16 @@ public void writeTo(StreamOutput out) throws IOException { public ExpressionRoleMapping getMapping() { return new ExpressionRoleMapping(name, rules, roles, roleTemplates, metadata, enabled); } + + public static PutRoleMappingRequest fromMapping(ExpressionRoleMapping mapping) { + var request = new PutRoleMappingRequest(); + request.setName(mapping.getName()); + request.setEnabled(mapping.isEnabled()); + request.setRoles(mapping.getRoles()); + request.setRoleTemplates(mapping.getRoleTemplates()); + request.setRules(mapping.getExpression()); + request.setMetadata(mapping.getMetadata()); + + return request; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java index d46c21f080308..88a930063190b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java @@ -9,7 +9,8 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.RoleMapperExpression; @@ -34,8 +35,8 @@ public PutRoleMappingRequestBuilder(ElasticsearchClient client) { /** * Populate the put role request from the source and the role's name */ - public PutRoleMappingRequestBuilder source(String name, XContentParser parser) throws IOException { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); + public PutRoleMappingRequestBuilder source(String name, BytesReference source, XContentType xContentType) throws IOException { + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, source, xContentType); request.setName(name); request.setEnabled(mapping.isEnabled()); request.setRoles(mapping.getRoles()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 286a9cb736b1b..7c753692628cb 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -7,13 +7,11 @@ package org.elasticsearch.integration; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; -import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; @@ -27,15 +25,10 @@ import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; import org.junit.After; @@ -46,31 +39,25 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.Consumer; +import java.util.stream.Collectors; import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; import static org.elasticsearch.xcontent.XContentType.JSON; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.Mockito.mock; /** - * Tests that file settings service can properly add role mappings. + * Tests that file settings service can properly add role mappings and detect REST clashes + * with the reserved role mappings. */ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { @@ -148,21 +135,12 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { } }"""; - @Override - protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - Settings.Builder builder = Settings.builder() - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - // some tests make use of cluster-state based role mappings - .put("xpack.security.authc.cluster_state_role_mappings.enabled", true); - return builder.build(); - } - @After public void cleanUp() { updateClusterSettings(Settings.builder().putNull("indices.recovery.max_bytes_per_sec")); } - public static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter) throws Exception { + private void writeJSONFile(String node, String json) throws Exception { long version = versionCounter.incrementAndGet(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); @@ -173,11 +151,10 @@ public static void writeJSONFile(String node, String json, Logger logger, Atomic Files.createDirectories(fileSettingsService.watchedFileDir()); Path tempFilePath = createTempFile(); - logger.info("--> before writing JSON config to node {} with path {}", node, tempFilePath); + logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); logger.info(Strings.format(json, version)); Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); - logger.info("--> after writing JSON config to node {} with path {}", node, tempFilePath); } private Tuple setupClusterStateListener(String node, String expectedKey) { @@ -261,41 +238,49 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo expectThrows(ExecutionException.class, () -> clusterAdmin().updateSettings(req).get()).getMessage() ); - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); - } - - // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - assertThat(response.mappings(), emptyArray()); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(r -> r.getName()).collect(Collectors.toSet()), + allOf(notNullValue(), containsInAnyOrder("everyone_kibana", "everyone_fleet")) + ); - // role mappings (with the same names) can also be stored in the "native" store - var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); - putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); - assertTrue(putRoleMappingResponse.isCreated()); + // Try using the REST API to update the everyone_kibana role mapping + // This should fail, we have reserved certain role mappings in operator mode + assertEquals( + "Failed to process request " + + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " + + "with errors: [[everyone_kibana] set as read-only by [file_settings]]", + expectThrows( + IllegalArgumentException.class, + () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet() + ).getMessage() + ); + assertEquals( + "Failed to process request " + + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " + + "with errors: [[everyone_fleet] set as read-only by [file_settings]]", + expectThrows( + IllegalArgumentException.class, + () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet() + ).getMessage() + ); } public void testRoleMappingsApplied() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), testJSON); assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), emptyJSON); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -307,65 +292,32 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - // native role mappings are not affected by the removal of the cluster-state based ones - { - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), - containsInAnyOrder("everyone_kibana", "everyone_fleet") - ); - } - - // and roles are resolved based on the native role mappings - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); - } - - { - var request = new DeleteRoleMappingRequest(); - request.setName("everyone_kibana"); - var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - request = new DeleteRoleMappingRequest(); - request.setName("everyone_fleet"); - response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); - assertTrue(response.isFound()); - } - - // no roles are resolved now, because both native and cluster-state based stores have been cleared - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); - } + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); } - public static Tuple setupClusterStateListenerForError( - ClusterService clusterService, - Consumer errorMetadataConsumer - ) { + private Tuple setupClusterStateListenerForError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null && reservedState.errorMetadata() != null) { + if (reservedState != null + && reservedState.errorMetadata() != null + && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.PARSING) { clusterService.removeListener(this); metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); - errorMetadataConsumer.accept(reservedState.errorMetadata()); + assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat( + reservedState.errorMetadata().errors().get(0), + containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") + ); } } }); @@ -373,13 +325,22 @@ public void clusterChanged(ClusterChangedEvent event) { return new Tuple<>(savedClusterState, metadataVersion); } + private void assertRoleMappingsNotSaved(CountDownLatch savedClusterState, AtomicLong metadataVersion) throws Exception { + boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + // This should succeed, nothing was reserved + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana_bad")).get(); + client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet_ok")).get(); + } + public void testErrorSaved() throws Exception { ensureGreen(); // save an empty file to clear any prior state, this ensures we don't get a stale file left over by another test var savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + writeJSONFile(internalCluster().getMasterName(), emptyJSON); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -392,94 +353,76 @@ public void testErrorSaved() throws Exception { ); // save a bad file - savedClusterState = setupClusterStateListenerForError( - internalCluster().getCurrentMasterNodeInstance(ClusterService.class), - errorMetadata -> { - assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, errorMetadata.errorKind()); - assertThat(errorMetadata.errors(), allOf(notNullValue(), hasSize(1))); - assertThat( - errorMetadata.errors().get(0), - containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") - ); - } - ); + savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), testErrorJSON, logger, versionCounter); - awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); + writeJSONFile(internalCluster().getMasterName(), testErrorJSON); + assertRoleMappingsNotSaved(savedClusterState.v1(), savedClusterState.v2()); + } - // no roles are resolved because both role mapping stores are empty - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), empty()); - } + private Tuple setupClusterStateListenerForSecurityWriteError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); + CountDownLatch savedClusterState = new CountDownLatch(1); + AtomicLong metadataVersion = new AtomicLong(-1); + clusterService.addListener(new ClusterStateListener() { + @Override + public void clusterChanged(ClusterChangedEvent event) { + ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); + if (reservedState != null + && reservedState.errorMetadata() != null + && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.VALIDATION) { + clusterService.removeListener(this); + metadataVersion.set(event.state().metadata().version()); + savedClusterState.countDown(); + assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat(reservedState.errorMetadata().errors().get(0), containsString("closed")); + } + } + }); + + return new Tuple<>(savedClusterState, metadataVersion); } - public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { + public void testRoleMappingFailsToWriteToStore() throws Exception { ensureGreen(); - // expect the role mappings to apply even if the .security index is closed - var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); + var savedClusterState = setupClusterStateListenerForSecurityWriteError(internalCluster().getMasterName()); - try { - var closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(closeIndexResponse.isAcknowledged()); - - writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - // no native role mappings exist - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - - // cluster state settings are also applied - var clusterStateResponse = clusterAdmin().state(new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get())) - .get(); - assertThat( - clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()), - equalTo("50mb") - ); - - ReservedStateMetadata reservedState = clusterStateResponse.getState() - .metadata() - .reservedStateMetadata() - .get(FileSettingsService.NAMESPACE); - - ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); - assertThat(handlerMetadata.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); - - // and roles are resolved based on the cluster-state role mappings - for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { - PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); - userRoleMapper.resolveRoles( - new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), - resolveRolesFuture - ); - assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); - } - } finally { - savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - var openIndexResponse = indicesAdmin().open(new OpenIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(openIndexResponse.isAcknowledged()); - } + final CloseIndexResponse closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(closeIndexResponse.isAcknowledged()); + + writeJSONFile(internalCluster().getMasterName(), testJSON); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); + + final ClusterStateResponse clusterStateResponse = clusterAdmin().state( + new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get()) + ).get(); + + assertNull( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) + ); + + ReservedStateMetadata reservedState = clusterStateResponse.getState() + .metadata() + .reservedStateMetadata() + .get(FileSettingsService.NAMESPACE); + + ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); + assertTrue(handlerMetadata == null || handlerMetadata.keys().isEmpty()); } private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { - "enabled": true, - "roles": [ "kibana_user_native" ], + "enabled": false, + "roles": [ "kibana_user" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" @@ -490,7 +433,8 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var bis = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); var parser = JSON.xContent().createParser(XContentParserConfiguration.EMPTY, bis) ) { - return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); + return PutRoleMappingRequest.fromMapping(mapping); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java new file mode 100644 index 0000000000000..48e97b7afb897 --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; +import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.Strings; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.reservedstate.service.FileSettingsService; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.transport.netty4.Netty4Plugin; +import org.elasticsearch.xpack.wildcard.Wildcard; + +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +public class FileSettingsRoleMappingsStartupIT extends SecurityIntegTestCase { + + private static AtomicLong versionCounter = new AtomicLong(1); + private static String testJSONForFailedCase = """ + { + "metadata": { + "version": "%s", + "compatibility": "8.4.0" + }, + "state": { + "role_mappings": { + "everyone_kibana_2": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_foo": "something" + } + } + } + } + }"""; + + @Override + protected void doAssertXPackIsInstalled() {} + + @Override + protected Path nodeConfigPath(int nodeOrdinal) { + return null; + } + + private void writeJSONFile(String node, String json) throws Exception { + long version = versionCounter.incrementAndGet(); + + FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); + + Files.deleteIfExists(fileSettingsService.watchedFile()); + + Files.createDirectories(fileSettingsService.watchedFileDir()); + Path tempFilePath = createTempFile(); + + logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); + logger.info(Strings.format(json, version)); + Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); + Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); + } + + private Tuple setupClusterStateListenerForError(String node) { + ClusterService clusterService = internalCluster().clusterService(node); + CountDownLatch savedClusterState = new CountDownLatch(1); + AtomicLong metadataVersion = new AtomicLong(-1); + clusterService.addListener(new ClusterStateListener() { + @Override + public void clusterChanged(ClusterChangedEvent event) { + ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); + if (reservedState != null && reservedState.errorMetadata() != null) { + assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); + assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); + assertThat(reservedState.errorMetadata().errors().get(0), containsString("Fake exception")); + clusterService.removeListener(this); + metadataVersion.set(event.state().metadata().version()); + savedClusterState.countDown(); + } else if (reservedState != null) { + logger.debug(() -> "Got reserved state update without error metadata: " + reservedState); + } else { + logger.debug(() -> "Got cluster state update: " + event.source()); + } + } + }); + + return new Tuple<>(savedClusterState, metadataVersion); + } + + @TestLogging( + value = "org.elasticsearch.common.file:DEBUG,org.elasticsearch.xpack.security:DEBUG,org.elasticsearch.cluster.metadata:DEBUG", + reason = "https://github.com/elastic/elasticsearch/issues/98391" + ) + public void testFailsOnStartMasterNodeWithError() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + + internalCluster().startMasterOnlyNode(); + + logger.info("--> write some role mappings, no other file settings"); + writeJSONFile(internalCluster().getMasterName(), testJSONForFailedCase); + var savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); + + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + } + + public Collection> nodePlugins() { + return Arrays.asList( + UnstableLocalStateSecurity.class, + Netty4Plugin.class, + ReindexPlugin.class, + CommonAnalysisPlugin.class, + InternalSettingsPlugin.class, + MapperExtrasPlugin.class, + Wildcard.class + ); + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 0ff4f1160af56..ef08f855a46cc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1103,7 +1103,8 @@ Collection createComponents( new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get(), profileService, apiKeyService) ); - reservedRoleMappingAction.set(new ReservedRoleMappingAction()); + reservedRoleMappingAction.set(new ReservedRoleMappingAction(nativeRoleMappingStore)); + systemIndices.getMainIndexManager().onStateRecovered(state -> reservedRoleMappingAction.get().securityIndexRecovered()); cacheInvalidatorRegistry.validate(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java index 73d1a1abcdb50..852887767578f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java @@ -7,18 +7,24 @@ package org.elasticsearch.xpack.security.action.rolemapping; -import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.common.util.concurrent.ListenableFuture; +import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -32,59 +38,123 @@ * It is used by the ReservedClusterStateService to add/update or remove role mappings. Typical usage * for this action is in the context of file based settings. */ -public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { +public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { public static final String NAME = "role_mappings"; + private final NativeRoleMappingStore roleMappingStore; + private final ListenableFuture securityIndexRecoveryListener = new ListenableFuture<>(); + + /** + * Creates a ReservedRoleMappingAction + * + * @param roleMappingStore requires {@link NativeRoleMappingStore} for storing/deleting the mappings + */ + public ReservedRoleMappingAction(NativeRoleMappingStore roleMappingStore) { + this.roleMappingStore = roleMappingStore; + } + @Override public String name() { return NAME; } + private static Collection prepare(List roleMappings) { + List requests = roleMappings.stream().map(rm -> PutRoleMappingRequest.fromMapping(rm)).toList(); + + var exceptions = new ArrayList(); + for (var request : requests) { + // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX + var exception = request.validate(false); + if (exception != null) { + exceptions.add(exception); + } + } + + if (exceptions.isEmpty() == false) { + var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); + exceptions.forEach(illegalArgumentException::addSuppressed); + throw illegalArgumentException; + } + + return requests; + } + @Override public TransformState transform(Object source, TransformState prevState) throws Exception { + // We execute the prepare() call to catch any errors in the transform phase. + // Since we store the role mappings outside the cluster state, we do the actual save with a + // non cluster state transform call. @SuppressWarnings("unchecked") - Set roleMappings = validate((List) source); - RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings); - if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromClusterState(prevState.state()))) { - return prevState; - } else { - ClusterState newState = newRoleMappingMetadata.updateClusterState(prevState.state()); - Set entities = newRoleMappingMetadata.getRoleMappings() - .stream() - .map(ExpressionRoleMapping::getName) - .collect(Collectors.toSet()); - return new TransformState(newState, entities); + var requests = prepare((List) source); + return new TransformState( + prevState.state(), + prevState.keys(), + l -> securityIndexRecoveryListener.addListener( + ActionListener.wrap(ignored -> nonStateTransform(requests, prevState, l), l::onFailure) + ) + ); + } + + // Exposed for testing purposes + protected void nonStateTransform( + Collection requests, + TransformState prevState, + ActionListener listener + ) { + Set entities = requests.stream().map(r -> r.getName()).collect(Collectors.toSet()); + Set toDelete = new HashSet<>(prevState.keys()); + toDelete.removeAll(entities); + + final int tasksCount = requests.size() + toDelete.size(); + + // Nothing to do, don't start a group listener with 0 actions + if (tasksCount == 0) { + listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Set.of())); + return; + } + + GroupedActionListener taskListener = new GroupedActionListener<>(tasksCount, new ActionListener<>() { + @Override + public void onResponse(Collection booleans) { + listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Collections.unmodifiableSet(entities))); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + + for (var request : requests) { + roleMappingStore.putRoleMapping(request, taskListener); + } + + for (var mappingToDelete : toDelete) { + var deleteRequest = new DeleteRoleMappingRequest(); + deleteRequest.setName(mappingToDelete); + roleMappingStore.deleteRoleMapping(deleteRequest, taskListener); } } @Override - public List fromXContent(XContentParser parser) throws IOException { - List result = new ArrayList<>(); + public List fromXContent(XContentParser parser) throws IOException { + List result = new ArrayList<>(); + Map source = parser.map(); + for (String name : source.keySet()) { @SuppressWarnings("unchecked") Map content = (Map) source.get(name); try (XContentParser mappingParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) { - result.add(new PutRoleMappingRequestBuilder(null).source(name, mappingParser).request()); + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, mappingParser); + result.add(mapping); } } + return result; } - private Set validate(List roleMappings) { - var exceptions = new ArrayList(); - for (var roleMapping : roleMappings) { - // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX - var exception = roleMapping.validate(false); - if (exception != null) { - exceptions.add(exception); - } - } - if (exceptions.isEmpty() == false) { - var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); - exceptions.forEach(illegalArgumentException::addSuppressed); - throw illegalArgumentException; - } - return roleMappings.stream().map(PutRoleMappingRequest::getMapping).collect(Collectors.toUnmodifiableSet()); + public void securityIndexRecovered() { + securityIndexRecoveryListener.onResponse(null); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index b4e8d5d6db83f..811d357b89f89 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; @@ -18,7 +18,12 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -public class TransportDeleteRoleMappingAction extends HandledTransportAction { +import java.util.Optional; +import java.util.Set; + +public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledTransportAction< + DeleteRoleMappingRequest, + DeleteRoleMappingResponse> { private final NativeRoleMappingStore roleMappingStore; @@ -26,20 +31,25 @@ public class TransportDeleteRoleMappingAction extends HandledTransportAction listener) { + protected void doExecuteProtected(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); } + + @Override + public Optional reservedStateHandlerName() { + return Optional.of(ReservedRoleMappingAction.NAME); + } + + @Override + public Set modifiedKeys(DeleteRoleMappingRequest request) { + return Set.of(request.getName()); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 44c72bc13a54b..5e32e4f903f81 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; @@ -18,7 +18,10 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -public class TransportPutRoleMappingAction extends HandledTransportAction { +import java.util.Optional; +import java.util.Set; + +public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -26,17 +29,32 @@ public class TransportPutRoleMappingAction extends HandledTransportAction listener) { + protected void doExecuteProtected( + Task task, + final PutRoleMappingRequest request, + final ActionListener listener + ) { roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) ); } + + @Override + public Optional reservedStateHandlerName() { + return Optional.of(ReservedRoleMappingAction.NAME); + } + + @Override + public Set modifiedKeys(PutRoleMappingRequest request) { + return Set.of(request.getName()); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index 55562c8ee0138..e7e24037543fa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -8,8 +8,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -19,7 +17,6 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -60,18 +57,12 @@ public String getName() { @Override public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - String name = request.param("name"); - String refresh = request.param("refresh"); - PutRoleMappingRequestBuilder requestBuilder; - try ( - XContentParser parser = XContentHelper.createParserNotCompressed( - LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, - request.requiredContent(), - request.getXContentType() - ) - ) { - requestBuilder = new PutRoleMappingRequestBuilder(client).source(name, parser).setRefreshPolicy(refresh); - } + final String name = request.param("name"); + PutRoleMappingRequestBuilder requestBuilder = new PutRoleMappingRequestBuilder(client).source( + name, + request.requiredContent(), + request.getXContentType() + ).setRefreshPolicy(request.param("refresh")); return channel -> requestBuilder.execute(new RestBuilderListener<>(channel) { @Override public RestResponse buildResponse(PutRoleMappingResponse response, XContentBuilder builder) throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java new file mode 100644 index 0000000000000..b4a07093e49c3 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; + +/** + * Mock Security Provider implementation for the {@link ReservedClusterStateHandlerProvider} service interface. This is used + * for {@link org.elasticsearch.test.ESIntegTestCase} because the Security Plugin is really LocalStateSecurity in those tests. + *

+ * Unlike {@link LocalReservedSecurityStateHandlerProvider} this implementation is mocked to implement the + * {@link UnstableLocalStateSecurity}. Separate implementation is needed, because the SPI creation code matches the constructor + * signature when instantiating. E.g. we need to match {@link UnstableLocalStateSecurity} instead of {@link LocalStateSecurity} + */ +public class LocalReservedUnstableSecurityStateHandlerProvider extends LocalReservedSecurityStateHandlerProvider { + public LocalReservedUnstableSecurityStateHandlerProvider() { + throw new IllegalStateException("Provider must be constructed using PluginsService"); + } + + public LocalReservedUnstableSecurityStateHandlerProvider(UnstableLocalStateSecurity plugin) { + super(plugin); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java new file mode 100644 index 0000000000000..5621bdced15b3 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reservedstate.NonStateTransformResult; +import org.elasticsearch.reservedstate.ReservedClusterStateHandler; +import org.elasticsearch.reservedstate.TransformState; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; + +import java.nio.file.Path; +import java.util.Collection; +import java.util.List; +import java.util.Optional; + +/** + * A test class that allows us to Inject new type of Reserved Handler that can + * simulate errors in saving role mappings. + *

+ * We can't use our regular path to simply make an extension of LocalStateSecurity + * in an integration test class, because the reserved handlers are injected through + * SPI. (see {@link LocalReservedUnstableSecurityStateHandlerProvider}) + */ +public final class UnstableLocalStateSecurity extends LocalStateSecurity { + + public UnstableLocalStateSecurity(Settings settings, Path configPath) throws Exception { + super(settings, configPath); + // We reuse most of the initialization of LocalStateSecurity, we then just overwrite + // the security plugin with an extra method to give us a fake RoleMappingAction. + Optional security = plugins.stream().filter(p -> p instanceof Security).findFirst(); + if (security.isPresent()) { + plugins.remove(security.get()); + } + + UnstableLocalStateSecurity thisVar = this; + var action = new ReservedUnstableRoleMappingAction(); + + plugins.add(new Security(settings, super.securityExtensions()) { + @Override + protected SSLService getSslService() { + return thisVar.getSslService(); + } + + @Override + protected XPackLicenseState getLicenseState() { + return thisVar.getLicenseState(); + } + + @Override + List> reservedClusterStateHandlers() { + // pretend the security index is initialized after 2 seconds + var timer = new java.util.Timer(); + timer.schedule(new java.util.TimerTask() { + @Override + public void run() { + action.securityIndexRecovered(); + timer.cancel(); + } + }, 2_000); + return List.of(action); + } + }); + } + + public static class ReservedUnstableRoleMappingAction extends ReservedRoleMappingAction { + /** + * Creates a fake ReservedRoleMappingAction that doesn't actually use the role mapping store + */ + public ReservedUnstableRoleMappingAction() { + // we don't actually need a NativeRoleMappingStore + super(null); + } + + /** + * The nonStateTransform method is the only one that uses the native store, we simply pretend + * something has called the onFailure method of the listener. + */ + @Override + protected void nonStateTransform( + Collection requests, + TransformState prevState, + ActionListener listener + ) { + listener.onFailure(new IllegalStateException("Fake exception")); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java index cac7c91f73ed1..6cdca0cb3b24d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java @@ -7,40 +7,77 @@ package org.elasticsearch.xpack.security.action.reservedstate; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.TransformState; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.Collections; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; /** * Tests that the ReservedRoleMappingAction does validation, can add and remove role mappings */ public class ReservedRoleMappingActionTests extends ESTestCase { - private TransformState processJSON(ReservedRoleMappingAction action, TransformState prevState, String json) throws Exception { try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { var content = action.fromXContent(parser); var state = action.transform(content, prevState); - assertThat(state.nonStateTransform(), nullValue()); - return state; + + CountDownLatch latch = new CountDownLatch(1); + AtomicReference> updatedKeys = new AtomicReference<>(); + AtomicReference error = new AtomicReference<>(); + state.nonStateTransform().accept(new ActionListener<>() { + @Override + public void onResponse(NonStateTransformResult nonStateTransformResult) { + updatedKeys.set(nonStateTransformResult.updatedKeys()); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + error.set(e); + latch.countDown(); + } + }); + + latch.await(); + if (error.get() != null) { + throw error.get(); + } + return new TransformState(state.state(), updatedKeys.get()); } } public void testValidation() { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + String badPolicyJSON = """ { "everyone_kibana": { @@ -60,6 +97,7 @@ public void testValidation() { } } }"""; + assertEquals( "failed to parse role-mapping [everyone_fleet]. missing field [rules]", expectThrows(ParsingException.class, () -> processJSON(action, prevState, badPolicyJSON)).getMessage() @@ -67,9 +105,13 @@ public void testValidation() { } public void testAddRemoveRoleMapping() throws Exception { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + String emptyJSON = ""; TransformState updatedState = processJSON(action, prevState, emptyJSON); @@ -105,4 +147,102 @@ public void testAddRemoveRoleMapping() throws Exception { updatedState = processJSON(action, prevState, emptyJSON); assertThat(updatedState.keys(), empty()); } + + @SuppressWarnings("unchecked") + public void testNonStateTransformWaitsOnAsyncActions() throws Exception { + var nativeRoleMappingStore = mockNativeRoleMappingStore(); + + doAnswer(invocation -> { + new Thread(() -> { + // Simulate put role mapping async action taking a while + try { + Thread.sleep(1_000); + ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }).start(); + + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + doAnswer(invocation -> { + new Thread(() -> { + // Simulate delete role mapping async action taking a while + try { + Thread.sleep(1_000); + ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }).start(); + + return null; + }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); + + ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); + TransformState updatedState = new TransformState(state, Collections.emptySet()); + ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); + action.securityIndexRecovered(); + + String json = """ + { + "everyone_kibana": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_reserved": true + } + }, + "everyone_fleet": { + "enabled": true, + "roles": [ "fleet_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "a9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", + "_reserved": true + } + } + }"""; + + assertEquals( + "err_done", + expectThrows(IllegalStateException.class, () -> processJSON(action, new TransformState(state, Collections.emptySet()), json)) + .getMessage() + ); + + // Now that we've tested that we wait on putRoleMapping correctly, let it finish without exception, so we can test error on delete + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + updatedState = processJSON(action, updatedState, json); + assertThat(updatedState.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); + + final TransformState currentState = new TransformState(updatedState.state(), updatedState.keys()); + + assertEquals("err_done", expectThrows(IllegalStateException.class, () -> processJSON(action, currentState, "")).getMessage()); + } + + @SuppressWarnings("unchecked") + private NativeRoleMappingStore mockNativeRoleMappingStore() { + final NativeRoleMappingStore nativeRoleMappingStore = spy( + new NativeRoleMappingStore(Settings.EMPTY, mock(Client.class), mock(SecurityIndexManager.class), mock(ScriptService.class)) + ); + + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); + + doAnswer(invocation -> { + ((ActionListener) invocation.getArgument(1)).onResponse(true); + return null; + }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); + + return nativeRoleMappingStore; + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java new file mode 100644 index 0000000000000..038e673e07862 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.rolemapping; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; + +import java.util.Collections; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.mockito.Mockito.mock; + +public class TransportDeleteRoleMappingActionTests extends ESTestCase { + public void testReservedStateHandler() { + var store = mock(NativeRoleMappingStore.class); + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + mock(ThreadPool.class), + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + var action = new TransportDeleteRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); + + assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); + + var deleteRequest = new DeleteRoleMappingRequest(); + deleteRequest.setName("kibana_all"); + assertThat(action.modifiedKeys(deleteRequest), containsInAnyOrder("kibana_all")); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 6f789a10a3a6c..58a8e8e3d4751 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -9,12 +9,16 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -29,6 +33,7 @@ import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; @@ -55,7 +60,7 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); requestRef = new AtomicReference<>(null); @@ -94,7 +99,39 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), request, future); + action.doExecuteProtected(mock(Task.class), request, future); return future.get(); } + + public void testReservedStateHandler() throws Exception { + assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); + String json = """ + { + "everyone_kibana": { + "enabled": true, + "roles": [ "kibana_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" + } + }, + "everyone_fleet": { + "enabled": true, + "roles": [ "fleet_user" ], + "rules": { "field": { "username": "*" } }, + "metadata": { + "uuid" : "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7" + } + } + }"""; + + try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { + ReservedRoleMappingAction roleMappingAction = new ReservedRoleMappingAction(store); + var parsedResult = roleMappingAction.fromXContent(parser); + + for (var mapping : parsedResult) { + assertThat(action.modifiedKeys(PutRoleMappingRequest.fromMapping(mapping)), containsInAnyOrder(mapping.getName())); + } + } + } } diff --git a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider index 3d17572429bac..77c38d302d9c9 100644 --- a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider +++ b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider @@ -6,3 +6,4 @@ # org.elasticsearch.xpack.security.LocalReservedSecurityStateHandlerProvider +org.elasticsearch.xpack.security.LocalReservedUnstableSecurityStateHandlerProvider diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 17363d58545c2..3d9e7f3828bc7 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -20,14 +20,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslVerificationMode; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.fixtures.smb.SmbTestContainer; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -190,16 +187,11 @@ public void setupRoleMappings() throws Exception { Map> futures = Maps.newLinkedHashMapWithExpectedSize(content.size()); for (int i = 0; i < content.size(); i++) { final String name = "external_" + i; - final PutRoleMappingRequestBuilder builder; - try ( - XContentParser parser = XContentHelper.createParserNotCompressed( - LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, - new BytesArray(content.get(i)), - XContentType.JSON - ) - ) { - builder = new PutRoleMappingRequestBuilder(client()).source(name, parser); - } + final PutRoleMappingRequestBuilder builder = new PutRoleMappingRequestBuilder(client()).source( + name, + new BytesArray(content.get(i)), + XContentType.JSON + ); futures.put(name, builder.execute()); } for (String mappingName : futures.keySet()) { From de725aef80bc72cd0617f0ee27ab81e727d84e71 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Tue, 7 May 2024 12:59:01 +0200 Subject: [PATCH 023/117] Add docs clarifications on DATE_DIFF args (#108301) This adds some clarifications on the time unit strings the function takes as arguments, noting the differences between these and the time span literals, as well as the abbreviations' source. --- .../esql/functions/description/date_diff.asciidoc | 6 ++++++ .../esql/expression/function/scalar/date/DateDiff.java | 8 +++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/reference/esql/functions/description/date_diff.asciidoc b/docs/reference/esql/functions/description/date_diff.asciidoc index 3dd19b5885902..dbc03d59a2bf7 100644 --- a/docs/reference/esql/functions/description/date_diff.asciidoc +++ b/docs/reference/esql/functions/description/date_diff.asciidoc @@ -25,3 +25,9 @@ s|abbreviations | microsecond | microseconds, mcs | nanosecond | nanoseconds, ns |=== + +Note that while there is an overlap between the function's supported units and +{esql}'s supported time span literals, these sets are distinct and not +interchangeable. Similarly, the supported abbreviations are conveniently shared +with implementations of this function in other established products and not +necessarily common with the date-time nomenclature used by {es}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index 6dc859afe37e3..55dff823806d2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -145,7 +145,13 @@ public static Part resolve(String dateTimeUnit) { | millisecond | milliseconds, ms | microsecond | microseconds, mcs | nanosecond | nanoseconds, ns - |===""", examples = @Example(file = "date", tag = "docsDateDiff")) + |=== + + Note that while there is an overlap between the function's supported units and + {esql}'s supported time span literals, these sets are distinct and not + interchangeable. Similarly, the supported abbreviations are conveniently shared + with implementations of this function in other established products and not + necessarily common with the date-time nomenclature used by {es}.""", examples = @Example(file = "date", tag = "docsDateDiff")) public DateDiff( Source source, @Param(name = "unit", type = { "keyword", "text" }, description = "Time difference unit") Expression unit, From b6ded320f0df69ba286560b36bfa75053174c670 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Tue, 7 May 2024 13:06:59 +0200 Subject: [PATCH 024/117] Reenable JdbcCsvSpecIT inWithCompatibleDateTypes (#108017) Reenable org.elasticsearch.xpack.sql.qa.single_node.JdbcCsvSpecIT "test {filter.testInWithCompatibleDateTypes}", no longer reproducible. --- x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec b/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec index 1615ee3a64256..f6a6cec5dc65b 100644 --- a/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec +++ b/x-pack/plugin/sql/qa/server/src/main/resources/filter.csv-spec @@ -130,8 +130,7 @@ SELECT COUNT(*), TRUNCATE(emp_no, -2) t FROM test_emp WHERE 'aaabbb' RLIKE 'a{2, 1 |10100 ; -// AwaitsFix https://github.com/elastic/elasticsearch/issues/96805 -inWithCompatibleDateTypes-Ignore +inWithCompatibleDateTypes SELECT birth_date FROM test_emp WHERE birth_date IN ({d '1959-07-23'}, CAST('1959-12-25T00:00:00' AS TIMESTAMP), '1964-06-02T00:00:00.000Z') OR birth_date IS NULL ORDER BY birth_date; birth_date:ts From 9b7e9b5d591b872970c6f8e364ec508f8a232439 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 7 May 2024 14:12:50 +0200 Subject: [PATCH 025/117] [DOCS] ESQL goes GA (#108342) --- docs/reference/esql/esql-commands.asciidoc | 2 +- docs/reference/esql/esql-get-started.asciidoc | 3 --- docs/reference/esql/index.asciidoc | 2 -- docs/reference/esql/processing-commands/mv_expand.asciidoc | 2 ++ 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/docs/reference/esql/esql-commands.asciidoc b/docs/reference/esql/esql-commands.asciidoc index 708127718fe38..1f07361b89aac 100644 --- a/docs/reference/esql/esql-commands.asciidoc +++ b/docs/reference/esql/esql-commands.asciidoc @@ -39,7 +39,7 @@ image::images/esql/processing-command.svg[A processing command changing an input * <> * <> * <> -* <> +* experimental:[] <> * <> * <> * <> diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 663b2f8ecd249..b7928898a3bbb 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -1,12 +1,9 @@ [[esql-getting-started]] == Getting started with {esql} queries - ++++ Getting started ++++ -preview::["Do not use {esql} on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - This guide shows how you can use {esql} to query and aggregate your data. [TIP] diff --git a/docs/reference/esql/index.asciidoc b/docs/reference/esql/index.asciidoc index 5cb02064dc794..54627a6de3c62 100644 --- a/docs/reference/esql/index.asciidoc +++ b/docs/reference/esql/index.asciidoc @@ -6,8 +6,6 @@ [partintro] -preview::["Do not use {esql} on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] - The {es} Query Language ({esql}) provides a powerful way to filter, transform, and analyze data stored in {es}, and in the future in other runtimes. It is designed to be easy to learn and use, by end users, SRE teams, application diff --git a/docs/reference/esql/processing-commands/mv_expand.asciidoc b/docs/reference/esql/processing-commands/mv_expand.asciidoc index 46dc4fd0a33cf..9e1cb5573c381 100644 --- a/docs/reference/esql/processing-commands/mv_expand.asciidoc +++ b/docs/reference/esql/processing-commands/mv_expand.asciidoc @@ -2,6 +2,8 @@ [[esql-mv_expand]] === `MV_EXPAND` +preview::[] + **Syntax** [source,esql] From 1a7c46a19778ea0f8d74e29da4a56ccd6bddc1a4 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 7 May 2024 08:23:40 -0400 Subject: [PATCH 026/117] Filtering out rate limit service settings field (#108261) --- .../inference/FilteredXContent.java | 21 +++++ .../inference/ServiceSettings.java | 7 +- .../AzureOpenAiEmbeddingsServiceSettings.java | 24 ++--- .../cohere/CohereServiceSettings.java | 18 ++-- .../CohereEmbeddingsServiceSettings.java | 11 ++- .../rerank/CohereRerankServiceSettings.java | 10 +- .../HuggingFaceServiceSettings.java | 20 ++-- .../HuggingFaceElserServiceSettings.java | 20 ++-- .../OpenAiChatCompletionServiceSettings.java | 40 ++++---- .../OpenAiEmbeddingsServiceSettings.java | 21 ++--- .../settings/FilteredXContentObject.java | 32 +++++++ ...eOpenAiEmbeddingsServiceSettingsTests.java | 4 +- .../CohereEmbeddingsServiceSettingsTests.java | 33 +++++++ .../CohereRerankServiceSettingsTests.java | 94 +++++++++++++++++++ .../HuggingFaceServiceSettingsTests.java | 12 +++ .../HuggingFaceElserServiceSettingsTests.java | 28 ++++++ ...nAiChatCompletionServiceSettingsTests.java | 13 +++ .../OpenAiEmbeddingsServiceSettingsTests.java | 4 +- 18 files changed, 320 insertions(+), 92 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/inference/FilteredXContent.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java diff --git a/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java b/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java new file mode 100644 index 0000000000000..1c63aea61b7c8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/FilteredXContent.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.xcontent.ToXContentObject; + +/** + * Provides a contract for retrieving exposed fields. + */ +public interface FilteredXContent { + /** + * Returns a {@link ToXContentObject} that only writes the exposed fields. Any hidden fields are not written. + */ + ToXContentObject getFilteredXContentObject(); +} diff --git a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java index 6c1a01acb0dab..b143f74c848c1 100644 --- a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java @@ -12,12 +12,7 @@ import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.xcontent.ToXContentObject; -public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable { - - /** - * Returns a {@link ToXContentObject} that only writes the exposed fields. Any hidden fields are not written. - */ - ToXContentObject getFilteredXContentObject(); +public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable, FilteredXContent { /** * Similarity used in the service. Will be null if not applicable. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java index 7ed1a9c2d4786..478af679938d6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java @@ -17,11 +17,11 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -42,7 +42,10 @@ /** * Defines the service settings for interacting with OpenAI's text embedding models. */ -public class AzureOpenAiEmbeddingsServiceSettings implements ServiceSettings, AzureOpenAiRateLimitServiceSettings { +public class AzureOpenAiEmbeddingsServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + AzureOpenAiRateLimitServiceSettings { public static final String NAME = "azure_openai_embeddings_service_settings"; @@ -248,13 +251,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); builder.field(DIMENSIONS_SET_BY_USER, dimensionsSetByUser); builder.endObject(); return builder; } - private void toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(RESOURCE_NAME, resourceName); builder.field(DEPLOYMENT_ID, deploymentId); builder.field(API_VERSION, apiVersion); @@ -268,19 +273,8 @@ private void toXContentFragmentOfExposedFields(XContentBuilder builder, Params p if (similarity != null) { builder.field(SIMILARITY, similarity); } - rateLimitSettings.toXContent(builder, params); - } - @Override - public ToXContentObject getFilteredXContentObject() { - return (builder, params) -> { - builder.startObject(); - - toXContentFragmentOfExposedFields(builder, params); - - builder.endObject(); - return builder; - }; + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 33136c339e757..4c39d35e2ff03 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -18,9 +18,9 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -38,7 +38,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public class CohereServiceSettings implements ServiceSettings, CohereRateLimitServiceSettings { +public class CohereServiceSettings extends FilteredXContentObject implements ServiceSettings, CohereRateLimitServiceSettings { public static final String NAME = "cohere_service_settings"; public static final String OLD_MODEL_ID_FIELD = "model"; @@ -173,6 +173,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public XContentBuilder toXContentFragment(XContentBuilder builder, Params params) throws IOException { + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + + return builder; + } + + @Override + public XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { if (uri != null) { builder.field(URL, uri.toString()); } @@ -188,16 +196,10 @@ public XContentBuilder toXContentFragment(XContentBuilder builder, Params params if (modelId != null) { builder.field(MODEL_ID, modelId); } - rateLimitSettings.toXContent(builder, params); return builder; } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; - } - @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index 7d78091a20106..00a406a7a3efa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -16,11 +16,11 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import java.io.IOException; import java.util.EnumSet; @@ -30,7 +30,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; -public class CohereEmbeddingsServiceSettings implements ServiceSettings { +public class CohereEmbeddingsServiceSettings extends FilteredXContentObject implements ServiceSettings { public static final String NAME = "cohere_embeddings_service_settings"; static final String EMBEDDING_TYPE = "embedding_type"; @@ -160,8 +160,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + commonSettings.toXContentFragmentOfExposedFields(builder, params); + builder.field(EMBEDDING_TYPE, elementType()); + + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java index 19538be3734ba..6a74fe533e3db 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettings.java @@ -13,16 +13,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import java.io.IOException; import java.util.Map; import java.util.Objects; -public class CohereRerankServiceSettings implements ServiceSettings { +public class CohereRerankServiceSettings extends FilteredXContentObject implements ServiceSettings { public static final String NAME = "cohere_rerank_service_settings"; public static CohereRerankServiceSettings fromMap(Map map, ConfigurationParseContext parseContext) { @@ -62,8 +62,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + commonSettings.toXContentFragmentOfExposedFields(builder, params); + + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index beb9035640024..af2c433663ac4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -17,8 +17,8 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -36,7 +36,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public class HuggingFaceServiceSettings implements ServiceSettings, HuggingFaceRateLimitServiceSettings { +public class HuggingFaceServiceSettings extends FilteredXContentObject implements ServiceSettings, HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_service_settings"; // At the time of writing HuggingFace hasn't posted the default rate limit for inference endpoints so the value here is only a guess @@ -118,6 +118,14 @@ public HuggingFaceServiceSettings(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(URL, uri.toString()); if (similarity != null) { builder.field(SIMILARITY, similarity); @@ -128,14 +136,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - builder.endObject(); - return builder; - } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index a48ccd14fdb66..1f337de450ef9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -28,7 +28,10 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; -public class HuggingFaceElserServiceSettings implements ServiceSettings, HuggingFaceRateLimitServiceSettings { +public class HuggingFaceElserServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_elser_service_settings"; static final String URL = "url"; @@ -56,7 +59,8 @@ public HuggingFaceElserServiceSettings(String url) { rateLimitSettings = DEFAULT_RATE_LIMIT_SETTINGS; } - private HuggingFaceElserServiceSettings(URI uri, @Nullable RateLimitSettings rateLimitSettings) { + // default for testing + HuggingFaceElserServiceSettings(URI uri, @Nullable RateLimitSettings rateLimitSettings) { this.uri = Objects.requireNonNull(uri); this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); } @@ -88,8 +92,7 @@ public int maxInputTokens() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(URL, uri.toString()); - builder.field(MAX_INPUT_TOKENS, ELSER_TOKEN_LIMIT); + toXContentFragmentOfExposedFields(builder, params); rateLimitSettings.toXContent(builder, params); builder.endObject(); @@ -97,8 +100,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public ToXContentObject getFilteredXContentObject() { - return this; + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(URL, uri.toString()); + builder.field(MAX_INPUT_TOKENS, ELSER_TOKEN_LIMIT); + + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java index 7703476a14dea..5105bb59e048f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -15,9 +15,9 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -38,7 +38,7 @@ /** * Defines the service settings for interacting with OpenAI's chat completion models. */ -public class OpenAiChatCompletionServiceSettings implements ServiceSettings, OpenAiRateLimitServiceSettings { +public class OpenAiChatCompletionServiceSettings extends FilteredXContentObject implements ServiceSettings, OpenAiRateLimitServiceSettings { public static final String NAME = "openai_completion_service_settings"; @@ -141,24 +141,29 @@ public Integer maxInputTokens() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - { - builder.field(MODEL_ID, modelId); + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); - if (uri != null) { - builder.field(URL, uri.toString()); - } + builder.endObject(); + return builder; + } - if (organizationId != null) { - builder.field(ORGANIZATION, organizationId); - } + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(MODEL_ID, modelId); - if (maxInputTokens != null) { - builder.field(MAX_INPUT_TOKENS, maxInputTokens); - } + if (uri != null) { + builder.field(URL, uri.toString()); + } + + if (organizationId != null) { + builder.field(ORGANIZATION, organizationId); + } + + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - builder.endObject(); return builder; } @@ -184,11 +189,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public ToXContentObject getFilteredXContentObject() { - return this; - } - @Override public boolean equals(Object object) { if (this == object) return true; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 8edbb7bc14f2c..690e8f0ddd947 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -17,10 +17,10 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; @@ -44,7 +44,7 @@ /** * Defines the service settings for interacting with OpenAI's text embedding models. */ -public class OpenAiEmbeddingsServiceSettings implements ServiceSettings, OpenAiRateLimitServiceSettings { +public class OpenAiEmbeddingsServiceSettings extends FilteredXContentObject implements ServiceSettings, OpenAiRateLimitServiceSettings { public static final String NAME = "openai_service_settings"; @@ -261,6 +261,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); if (dimensionsSetByUser != null) { builder.field(DIMENSIONS_SET_BY_USER, dimensionsSetByUser); @@ -270,7 +271,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private void toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { builder.field(MODEL_ID, modelId); if (uri != null) { builder.field(URL, uri.toString()); @@ -287,19 +289,8 @@ private void toXContentFragmentOfExposedFields(XContentBuilder builder, Params p if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - rateLimitSettings.toXContent(builder, params); - } - @Override - public ToXContentObject getFilteredXContentObject() { - return (builder, params) -> { - builder.startObject(); - - toXContentFragmentOfExposedFields(builder, params); - - builder.endObject(); - return builder; - }; + return builder; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java new file mode 100644 index 0000000000000..655e50e073972 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/FilteredXContentObject.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.settings; + +import org.elasticsearch.inference.FilteredXContent; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +public abstract class FilteredXContentObject implements FilteredXContent { + @Override + public ToXContentObject getFilteredXContentObject() { + return (builder, params) -> { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + + builder.endObject(); + return builder; + }; + } + + protected abstract XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, ToXContent.Params params) + throws IOException; +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java index 79bd28fd8b600..8882a97210de2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java @@ -390,7 +390,7 @@ public void testToXContent_WritesAllValues() throws IOException { "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":3},"dimensions_set_by_user":false}""")); } - public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() throws IOException { + public void testToFilteredXContent_WritesAllValues_Except_DimensionsSetByUser_RateLimit() throws IOException { var entity = new AzureOpenAiEmbeddingsServiceSettings( "resource", "deployment", @@ -409,7 +409,7 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t assertThat(xContentResult, CoreMatchers.is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ - "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":1}}""")); + "dimensions":1024,"max_input_tokens":512}""")); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 24edb9bfe87f0..1ac97642f0b85 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -16,6 +16,9 @@ import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; @@ -23,6 +26,7 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -314,6 +318,35 @@ public void testFromCohereOrDenseVectorEnumValues() { assertTrue(validation.validationErrors().isEmpty()); } + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new CohereEmbeddingsServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)), + CohereEmbeddingType.INT8 + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "rate_limit":{"requests_per_minute":3},"embedding_type":"byte"}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new CohereEmbeddingsServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)), + CohereEmbeddingType.INT8 + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "embedding_type":"byte"}""")); + } + @Override protected Writeable.Reader instanceReader() { return CohereEmbeddingsServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java new file mode 100644 index 0000000000000..cb30077fec174 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.rerank; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; +import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class CohereRerankServiceSettingsTests extends AbstractWireSerializingTestCase { + public static CohereRerankServiceSettings createRandom() { + var commonSettings = CohereServiceSettingsTests.createRandom(); + + return new CohereRerankServiceSettings(commonSettings); + } + + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new CohereRerankServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + // TODO we probably shouldn't allow configuring these fields for reranking + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id",""" + """ + "rate_limit":{"requests_per_minute":3}}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new CohereRerankServiceSettings( + new CohereServiceSettings("url", SimilarityMeasure.COSINE, 5, 10, "model_id", new RateLimitSettings(3)) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + // TODO we probably shouldn't allow configuring these fields for reranking + assertThat(xContentResult, is(""" + {"url":"url","similarity":"cosine","dimensions":5,"max_input_tokens":10,"model_id":"model_id"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return CohereRerankServiceSettings::new; + } + + @Override + protected CohereRerankServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected CohereRerankServiceSettings mutateInstance(CohereRerankServiceSettings instance) throws IOException { + return null; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + List entries = new ArrayList<>(); + entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables()); + entries.addAll(InferenceNamedWriteablesProvider.getNamedWriteables()); + return new NamedWriteableRegistry(entries); + } + + public static Map getServiceSettingsMap(@Nullable String url, @Nullable String model) { + return new HashMap<>(CohereServiceSettingsTests.getServiceSettingsMap(url, model)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index 8ebf5b1dfd615..d81c94a0dedda 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -173,6 +173,18 @@ public void testToXContent_WritesAllValues() throws IOException { {"url":"url","rate_limit":{"requests_per_minute":3}}""")); } + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new HuggingFaceServiceSettings(ServiceUtils.createUri("url"), null, null, null, new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url"}""")); + } + @Override protected Writeable.Reader instanceReader() { return HuggingFaceServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java index 525f701323511..eadefddecce70 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java @@ -11,6 +11,11 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import java.io.IOException; import java.util.HashMap; @@ -82,6 +87,29 @@ public void testFromMap_InvalidUrl_ThrowsError() { ); } + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new HuggingFaceElserServiceSettings(ServiceUtils.createUri("url"), new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url","max_input_tokens":512,"rate_limit":{"requests_per_minute":3}}""")); + } + + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new HuggingFaceElserServiceSettings(ServiceUtils.createUri("url"), new RateLimitSettings(3)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"url":"url","max_input_tokens":512}""")); + } + @Override protected Writeable.Reader instanceReader() { return HuggingFaceElserServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java index 5531f1c14ddff..b9b4310699d07 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java @@ -211,6 +211,19 @@ public void testToXContent_DoesNotWriteOptionalValues() throws IOException { {"model_id":"model","rate_limit":{"requests_per_minute":500}}""")); } + public void testToXContent_WritesAllValues_Except_RateLimit() throws IOException { + var serviceSettings = new OpenAiChatCompletionServiceSettings("model", "url", "org", 1024, new RateLimitSettings(2)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = serviceSettings.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"model_id":"model","url":"url","organization_id":"org",""" + """ + "max_input_tokens":1024}""")); + } + @Override protected Writeable.Reader instanceReader() { return OpenAiChatCompletionServiceSettings::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index 92fb00a4061e2..d6deaa7da79f6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -405,7 +405,7 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t assertThat(xContentResult, CoreMatchers.is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ - "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":3000}}""")); + "dimensions":1,"max_input_tokens":2}""")); } public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() throws IOException { @@ -427,7 +427,7 @@ public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() thro assertThat(xContentResult, CoreMatchers.is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ - "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":2000}}""")); + "dimensions":1,"max_input_tokens":2}""")); } @Override From 6f8f1286c6cda29dae7d95ddebac2b7b3cbedba8 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 7 May 2024 15:06:44 +0200 Subject: [PATCH 027/117] [Inference API] Remove explicit CoreMatchers prefix (#108355) --- .../openai/OpenAiChatCompletionActionTests.java | 5 ++--- .../azureopenai/AzureOpenAiSecretSettingsTests.java | 5 ++--- .../AzureOpenAiEmbeddingsServiceSettingsTests.java | 7 +++---- .../services/cohere/CohereServiceSettingsTests.java | 3 +-- .../embeddings/CohereEmbeddingsTaskSettingsTests.java | 3 +-- .../OpenAiEmbeddingsServiceSettingsTests.java | 11 +++++------ 6 files changed, 14 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java index b802403dcd28d..e28c3e817b351 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import org.hamcrest.CoreMatchers; import org.junit.After; import org.junit.Before; @@ -272,8 +271,8 @@ public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOExc var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), CoreMatchers.is("OpenAI completions only accepts 1 input")); - assertThat(thrownException.status(), CoreMatchers.is(RestStatus.BAD_REQUEST)); + assertThat(thrownException.getMessage(), is("OpenAI completions only accepts 1 input")); + assertThat(thrownException.status(), is(RestStatus.BAD_REQUEST)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java index 97fa6efc962bb..d2b83d7b14e2b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.hamcrest.CoreMatchers; import java.io.IOException; import java.util.HashMap; @@ -119,7 +118,7 @@ public void testToXContext_WritesApiKeyOnlyWhenEntraIdIsNull() throws IOExceptio String xContentResult = Strings.toString(builder); var expectedResult = Strings.format("{\"%s\":\"apikey\"}", API_KEY); - assertThat(xContentResult, CoreMatchers.is(expectedResult)); + assertThat(xContentResult, is(expectedResult)); } public void testToXContext_WritesEntraIdOnlyWhenApiKeyIsNull() throws IOException { @@ -129,7 +128,7 @@ public void testToXContext_WritesEntraIdOnlyWhenApiKeyIsNull() throws IOExceptio String xContentResult = Strings.toString(builder); var expectedResult = Strings.format("{\"%s\":\"entraid\"}", ENTRA_ID); - assertThat(xContentResult, CoreMatchers.is(expectedResult)); + assertThat(xContentResult, is(expectedResult)); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java index 8882a97210de2..7c56ffad27c80 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -364,7 +363,7 @@ public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ "rate_limit":{"requests_per_minute":2},"dimensions_set_by_user":true}""")); } @@ -385,7 +384,7 @@ public void testToXContent_WritesAllValues() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ "dimensions":1024,"max_input_tokens":512,"rate_limit":{"requests_per_minute":3},"dimensions_set_by_user":false}""")); } @@ -407,7 +406,7 @@ public void testToFilteredXContent_WritesAllValues_Except_DimensionsSetByUser_Ra filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"resource_name":"resource","deployment_id":"deployment","api_version":"apiVersion",""" + """ "dimensions":1024,"max_input_tokens":512}""")); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java index cb224f4089c0a..a010f63802052 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -260,7 +259,7 @@ public void testXContent_WritesModelId() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"modelId","rate_limit":{"requests_per_minute":1}}""")); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java index 64af547171af2..4f5d872f09eb8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields; import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; -import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -110,7 +109,7 @@ private static > String getValidValuesSortedAndCombined(EnumSe public void testXContent_ThrowsAssertionFailure_WhenInputTypeIsUnspecified() { var thrownException = expectThrows(AssertionError.class, () -> new CohereEmbeddingsTaskSettings(InputType.UNSPECIFIED, null)); - MatcherAssert.assertThat(thrownException.getMessage(), CoreMatchers.is("received invalid input type value [unspecified]")); + MatcherAssert.assertThat(thrownException.getMessage(), is("received invalid input type value [unspecified]")); } public void testOf_KeepsOriginalValuesWhenRequestSettingsAreNull_AndRequestInputTypeIsInvalid() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index d6deaa7da79f6..c964d2643459d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; -import org.hamcrest.CoreMatchers; import java.io.IOException; import java.net.URI; @@ -366,7 +365,7 @@ public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org",""" + """ "rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":true}""")); } @@ -378,7 +377,7 @@ public void testToXContent_WritesDimensionsSetByUserFalse() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org",""" + """ "rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":false}""")); } @@ -390,7 +389,7 @@ public void testToXContent_WritesAllValues() throws IOException { entity.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ "dimensions":1,"max_input_tokens":2,"rate_limit":{"requests_per_minute":3000},"dimensions_set_by_user":false}""")); } @@ -403,7 +402,7 @@ public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() t filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ "dimensions":1,"max_input_tokens":2}""")); } @@ -425,7 +424,7 @@ public void testToFilteredXContent_WritesAllValues_WithSpecifiedRateLimit() thro filteredXContent.toXContent(builder, null); String xContentResult = Strings.toString(builder); - assertThat(xContentResult, CoreMatchers.is(""" + assertThat(xContentResult, is(""" {"model_id":"model","url":"url","organization_id":"org","similarity":"dot_product",""" + """ "dimensions":1,"max_input_tokens":2}""")); } From 4574f2a434a7196c5a24026ffa0e301bc18c702e Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 7 May 2024 09:46:53 -0400 Subject: [PATCH 028/117] Adding manage_ml (#108262) --- .../security/authz/store/KibanaOwnedReservedRoleDescriptors.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 8e4f9108c3b9c..787463355f594 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -69,6 +69,7 @@ static RoleDescriptor kibanaSystem(String name) { // For Fleet package upgrade "manage_pipeline", "manage_ilm", + "manage_inference", // For the endpoint package that ships a transform "manage_transform", InvalidateApiKeyAction.NAME, From 4c263e2ab452f89244e6c5392ce06829435667e1 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 7 May 2024 16:09:52 +0200 Subject: [PATCH 029/117] CLI launcher / terminal improvements (#106470) This removes getWriter() and getErrorWriter() from Terminal in order to prepare for using log4j in server-cli. Additionally, ensure IO failures on the pump thread are handled once completed (closed) and catch throwables in Command#main to print them (formatted) via Terminal. --- .../launcher/CliToolLauncher.java | 2 +- .../server/cli/ErrorPumpThread.java | 34 ++++-- .../server/cli/KeystorePasswordTerminal.java | 2 +- .../elasticsearch/server/cli/ServerCli.java | 3 +- .../server/cli/ServerProcess.java | 17 ++- .../server/cli/ServerProcessBuilder.java | 2 +- .../server/cli/ServerCliTests.java | 19 +++- .../server/cli/ServerProcessTests.java | 16 ++- .../windows/service/WindowsServiceDaemon.java | 4 +- .../windows/service/ProcrunCommandTests.java | 8 +- .../java/org/elasticsearch/cli/Command.java | 17 ++- .../java/org/elasticsearch/cli/Terminal.java | 106 +++++++++++++++--- .../org/elasticsearch/cli/TerminalTests.java | 40 +++++++ .../RemoveCorruptedShardDataCommand.java | 11 +- 14 files changed, 222 insertions(+), 59 deletions(-) diff --git a/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java b/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java index 4fd2512f2cbbe..981033aeccd8c 100644 --- a/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java +++ b/distribution/tools/cli-launcher/src/main/java/org/elasticsearch/launcher/CliToolLauncher.java @@ -91,7 +91,7 @@ static Thread createShutdownHook(Terminal terminal, Closeable closeable) { try { closeable.close(); } catch (final IOException e) { - e.printStackTrace(terminal.getErrorWriter()); + terminal.errorPrintln(e); } terminal.flush(); // make sure to flush whatever the close or error might have written }, "elasticsearch-cli-shutdown"); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java index a6eb32cb1bb38..94c7653a08e0e 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ErrorPumpThread.java @@ -9,12 +9,14 @@ package org.elasticsearch.server.cli; import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.Terminal.Verbosity; import java.io.BufferedReader; +import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.concurrent.CountDownLatch; @@ -29,9 +31,9 @@ * {@link BootstrapInfo#SERVER_READY_MARKER} signals the server is ready and the cli may * detach if daemonizing. All other messages are passed through to stderr. */ -class ErrorPumpThread extends Thread { +class ErrorPumpThread extends Thread implements Closeable { private final BufferedReader reader; - private final PrintWriter writer; + private final Terminal terminal; // a latch which changes state when the server is ready or has had a bootstrap error private final CountDownLatch readyOrDead = new CountDownLatch(1); @@ -42,10 +44,24 @@ class ErrorPumpThread extends Thread { // an unexpected io failure that occurred while pumping stderr private volatile IOException ioFailure; - ErrorPumpThread(PrintWriter errOutput, InputStream errInput) { + ErrorPumpThread(Terminal terminal, InputStream errInput) { super("server-cli[stderr_pump]"); this.reader = new BufferedReader(new InputStreamReader(errInput, StandardCharsets.UTF_8)); - this.writer = errOutput; + this.terminal = terminal; + } + + private void checkForIoFailure() throws IOException { + IOException failure = ioFailure; + ioFailure = null; + if (failure != null) { + throw failure; + } + } + + @Override + public void close() throws IOException { + assert isAlive() == false : "Pump thread must be drained first"; + checkForIoFailure(); } /** @@ -56,9 +72,7 @@ class ErrorPumpThread extends Thread { */ boolean waitUntilReady() throws IOException { nonInterruptibleVoid(readyOrDead::await); - if (ioFailure != null) { - throw ioFailure; - } + checkForIoFailure(); return ready; } @@ -81,13 +95,13 @@ public void run() { ready = true; readyOrDead.countDown(); } else if (filter.contains(line) == false) { - writer.println(line); + terminal.errorPrintln(Verbosity.SILENT, line, false); } } } catch (IOException e) { ioFailure = e; } finally { - writer.flush(); + terminal.flush(); readyOrDead.countDown(); } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java index bf03acaf7a5da..0fddf76caff59 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/KeystorePasswordTerminal.java @@ -23,7 +23,7 @@ class KeystorePasswordTerminal extends Terminal implements Closeable { private final SecureString password; KeystorePasswordTerminal(Terminal delegate, SecureString password) { - super(delegate.getReader(), delegate.getWriter(), delegate.getErrorWriter()); + super(delegate); this.delegate = delegate; this.password = password; setVerbosity(delegate.getVerbosity()); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 0505ab86127cf..7b904d4cb5a89 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -27,6 +27,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -231,7 +232,7 @@ private ServerArgs createArgs(OptionSet options, Environment env, SecureSettings } @Override - public void close() { + public void close() throws IOException { if (server != null) { server.stop(); } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java index fa948572e7675..35b5d93b39933 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java @@ -61,16 +61,21 @@ public long pid() { */ public synchronized void detach() throws IOException { errorPump.drain(); - IOUtils.close(jvmProcess.getOutputStream(), jvmProcess.getInputStream(), jvmProcess.getErrorStream()); - detached = true; + try { + IOUtils.close(jvmProcess.getOutputStream(), jvmProcess.getInputStream(), jvmProcess.getErrorStream(), errorPump); + } finally { + detached = true; + } } /** * Waits for the subprocess to exit. */ - public int waitFor() { + public int waitFor() throws IOException { errorPump.drain(); - return nonInterruptible(jvmProcess::waitFor); + int exitCode = nonInterruptible(jvmProcess::waitFor); + errorPump.close(); + return exitCode; } /** @@ -81,7 +86,7 @@ public int waitFor() { * *

Note that if {@link #detach()} has been called, this method is a no-op. */ - public synchronized void stop() { + public synchronized void stop() throws IOException { if (detached) { return; } @@ -93,7 +98,7 @@ public synchronized void stop() { /** * Stop the subprocess, sending a SIGKILL. */ - public void forceStop() { + public void forceStop() throws IOException { assert detached == false; jvmProcess.destroyForcibly(); waitFor(); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java index b90ac25f5d57d..fcc290ebe9e72 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java @@ -154,7 +154,7 @@ ServerProcess start(ProcessStarter processStarter) throws UserException { boolean success = false; try { jvmProcess = createProcess(getCommand(), getJvmArgs(), jvmOptions, getEnvironment(), processStarter); - errorPump = new ErrorPumpThread(terminal.getErrorWriter(), jvmProcess.getErrorStream()); + errorPump = new ErrorPumpThread(terminal, jvmProcess.getErrorStream()); errorPump.start(); sendArgs(serverArgs, jvmProcess.getOutputStream()); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java index e469764590bd6..38a64a778fc27 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; import java.util.Locale; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; @@ -43,8 +44,11 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.emptyString; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.matchesRegex; import static org.hamcrest.Matchers.not; public class ServerCliTests extends CommandTestCase { @@ -321,11 +325,16 @@ protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, throw new InterruptedException("interrupted while get jvm options"); } }; - var e = expectThrows( - InterruptedException.class, - () -> command.main(new String[0], terminal, new ProcessInfo(sysprops, envVars, esHomeDir)) - ); - assertThat(e.getMessage(), equalTo("interrupted while get jvm options")); + + int exitCode = command.main(new String[0], terminal, new ProcessInfo(sysprops, envVars, esHomeDir)); + assertThat(exitCode, is(ExitCodes.CODE_ERROR)); + + String[] lines = terminal.getErrorOutput().split(System.lineSeparator()); + assertThat(List.of(lines), hasSize(greaterThan(10))); // at least decent sized stacktrace + assertThat(lines[0], is("java.lang.InterruptedException: interrupted while get jvm options")); + assertThat(lines[1], matchesRegex("\\tat org.elasticsearch.server.cli.ServerCliTests.+startServer\\(ServerCliTests.java:\\d+\\)")); + assertThat(lines[lines.length - 1], matchesRegex("\tat java.base/java.lang.Thread.run\\(Thread.java:\\d+\\)")); + command.close(); } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index b9f2eb73b30b5..dc36485fb77ab 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -38,6 +38,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -393,15 +394,24 @@ public void testWaitFor() throws Exception { stderr.println("final message"); }; var server = startProcess(false, false); + + CompletableFuture stopping = new CompletableFuture<>(); new Thread(() -> { - // simulate stop run as shutdown hook in another thread, eg from Ctrl-C - nonInterruptibleVoid(mainReady::await); - server.stop(); + try { + // simulate stop run as shutdown hook in another thread, eg from Ctrl-C + nonInterruptibleVoid(mainReady::await); + server.stop(); + stopping.complete(null); + } catch (Throwable e) { + stopping.completeExceptionally(e); + } }).start(); int exitCode = server.waitFor(); assertThat(process.main.isDone(), is(true)); assertThat(exitCode, equalTo(0)); assertThat(terminal.getErrorOutput(), containsString("final message")); + // rethrow any potential exception observed while stopping + stopping.get(); } public void testProcessDies() throws Exception { diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index 22474e63ab0df..66ae78470c55d 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -23,6 +23,8 @@ import org.elasticsearch.server.cli.ServerProcessBuilder; import org.elasticsearch.server.cli.ServerProcessUtils; +import java.io.IOException; + /** * Starts an Elasticsearch process, but does not wait for it to exit. *

@@ -55,7 +57,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce } @Override - public void close() { + public void close() throws IOException { if (server != null) { server.stop(); } diff --git a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java index e4b651fcb77af..8f44eaa80f23a 100644 --- a/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java +++ b/distribution/tools/windows-service-cli/src/test/java/org/elasticsearch/windows/service/ProcrunCommandTests.java @@ -22,6 +22,8 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; public class ProcrunCommandTests extends WindowsServiceCliTestCase { @@ -111,8 +113,10 @@ protected String getDefaultFailureMessage() { public void testMissingExe() throws Exception { Files.delete(serviceExe); - var e = expectThrows(IllegalStateException.class, () -> executeMain("install")); - assertThat(e.getMessage(), containsString("Missing procrun exe")); + int exitCode = executeMain("install"); + + assertThat(exitCode, is(ExitCodes.CODE_ERROR)); + assertThat(terminal.getErrorOutput(), startsWith("java.lang.IllegalStateException: Missing procrun exe")); } public void testServiceId() throws Exception { diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Command.java b/libs/cli/src/main/java/org/elasticsearch/cli/Command.java index 201f0810f4d9b..32c4446e71dd2 100644 --- a/libs/cli/src/main/java/org/elasticsearch/cli/Command.java +++ b/libs/cli/src/main/java/org/elasticsearch/cli/Command.java @@ -17,6 +17,7 @@ import java.io.Closeable; import java.io.IOException; +import java.io.StringWriter; import java.util.Arrays; /** @@ -45,7 +46,7 @@ public Command(final String description) { } /** Parses options for this command from args and executes it. */ - public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) throws Exception { + public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) throws IOException { try { mainWithoutErrorHandling(args, terminal, processInfo); } catch (OptionException e) { @@ -59,6 +60,14 @@ public final int main(String[] args, Terminal terminal, ProcessInfo processInfo) } printUserException(terminal, e); return e.exitCode; + } catch (IOException ioe) { + terminal.errorPrintln(ioe); + return ExitCodes.IO_ERROR; + } catch (Throwable t) { + // It's acceptable to catch Throwable at this point: + // We're about to exit and only want to print the stacktrace with appropriate formatting (e.g. JSON). + terminal.errorPrintln(t); + return ExitCodes.CODE_ERROR; } return ExitCodes.OK; } @@ -96,15 +105,17 @@ public OptionSet parseOptions(String[] args) { /** Prints a help message for the command to the terminal. */ private void printHelp(Terminal terminal, boolean toStdError) throws IOException { + StringWriter writer = new StringWriter(); + parser.printHelpOn(writer); if (toStdError) { terminal.errorPrintln(description); terminal.errorPrintln(""); - parser.printHelpOn(terminal.getErrorWriter()); + terminal.errorPrintln(writer.toString()); } else { terminal.println(description); terminal.println(""); printAdditionalHelp(terminal); - parser.printHelpOn(terminal.getWriter()); + terminal.println(writer.toString()); } } diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java index 69cb76636a996..aaf233438f263 100644 --- a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java +++ b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java @@ -72,6 +72,13 @@ protected Terminal(Reader reader, PrintWriter outWriter, PrintWriter errWriter) this.errWriter = errWriter; } + /** + * Constructs a terminal instance from a delegate instance. + */ + protected Terminal(Terminal delegate) { + this(delegate.reader, delegate.outWriter, delegate.errWriter); + } + /** * Sets the verbosity of the terminal. * @@ -113,14 +120,12 @@ public final Reader getReader() { return reader; } - /** Returns a Writer which can be used to write to the terminal directly using standard output. */ - public final PrintWriter getWriter() { - return outWriter; - } - - /** Returns a Writer which can be used to write to the terminal directly using standard error. */ - public final PrintWriter getErrorWriter() { - return errWriter; + /** + * Returns a line based OutputStream wrapping this Terminal's println. + * Note, this OutputStream is not thread-safe! + */ + public final OutputStream asLineOutputStream(Charset charset) { + return new LineOutputStream(charset); } /** @@ -138,7 +143,7 @@ public InputStream getInputStream() { * Returns an OutputStream which can be used to write to the terminal directly using standard output. * *

May return {@code null} if this Terminal is not capable of binary output. - * This corresponds with the underlying stream of bytes written to by {@link #getWriter()}. + * This corresponds with the underlying stream of bytes written to by {@link #println(CharSequence)}. */ @Nullable public OutputStream getOutputStream() { @@ -152,12 +157,12 @@ public final void println(CharSequence msg) { /** Prints a line to the terminal at {@code verbosity} level. */ public final void println(Verbosity verbosity, CharSequence msg) { - print(verbosity, outWriter, msg, true); + print(verbosity, outWriter, msg, true, true); } /** Prints message to the terminal's standard output at {@code verbosity} level, without a newline. */ public final void print(Verbosity verbosity, String msg) { - print(verbosity, outWriter, msg, false); + print(verbosity, outWriter, msg, false, true); } /** @@ -165,30 +170,49 @@ public final void print(Verbosity verbosity, String msg) { * * Subclasses may override if the writers are not implemented. */ - protected void print(Verbosity verbosity, PrintWriter writer, CharSequence msg, boolean newline) { + protected void print(Verbosity verbosity, PrintWriter writer, CharSequence msg, boolean newline, boolean flush) { if (isPrintable(verbosity)) { if (newline) { writer.println(msg); } else { writer.print(msg); } - writer.flush(); + if (flush) { + writer.flush(); + } } } /** Prints a line to the terminal's standard error at {@link Verbosity#NORMAL} verbosity level, without a newline. */ public final void errorPrint(Verbosity verbosity, String msg) { - print(verbosity, errWriter, msg, false); + print(verbosity, errWriter, msg, false, true); } /** Prints a line to the terminal's standard error at {@link Verbosity#NORMAL} verbosity level. */ public final void errorPrintln(String msg) { - errorPrintln(Verbosity.NORMAL, msg); + print(Verbosity.NORMAL, errWriter, msg, true, true); } /** Prints a line to the terminal's standard error at {@code verbosity} level. */ public final void errorPrintln(Verbosity verbosity, String msg) { - print(verbosity, errWriter, msg, true); + print(verbosity, errWriter, msg, true, true); + } + + /** Prints a line to the terminal's standard error at {@code verbosity} level, with an optional flush */ + public final void errorPrintln(Verbosity verbosity, String msg, boolean flush) { + print(verbosity, errWriter, msg, true, flush); + } + + /** Prints a stacktrace to the terminal's standard error at {@code verbosity} level. */ + public void errorPrintln(Verbosity verbosity, Throwable throwable) { + if (isPrintable(verbosity)) { + throwable.printStackTrace(errWriter); + } + } + + /** Prints a stacktrace to the terminal's standard error at {@link Verbosity#SILENT} verbosity level. */ + public void errorPrintln(Throwable throwable) { + errorPrintln(Verbosity.SILENT, throwable); } /** Checks if is enough {@code verbosity} level to be printed */ @@ -339,4 +363,54 @@ public OutputStream getOutputStream() { return System.out; } } + + /** A line based OutputStream wrapping this Terminal's println, not thread-safe! */ + private class LineOutputStream extends OutputStream { + static final int DEFAULT_BUFFER_LENGTH = 1024; + static final int MAX_BUFFER_LENGTH = DEFAULT_BUFFER_LENGTH * 8; + + private final Charset charset; + private byte[] bytes = new byte[DEFAULT_BUFFER_LENGTH]; + private int count = 0; + + LineOutputStream(Charset charset) { + this.charset = charset; + } + + @Override + public void write(int b) { + if (b == 0) return; + if (b == '\n') { + flush(true); + return; + } + if (count == bytes.length) { + if (count >= MAX_BUFFER_LENGTH) { + flush(false); + } else { + bytes = Arrays.copyOf(bytes, 2 * bytes.length); + } + } + bytes[count++] = (byte) b; + } + + private void flush(boolean newline) { + if (newline && count > 0 && bytes[count - 1] == '\r') { + --count; // drop CR on windows as well + } + String msg = count > 0 ? new String(bytes, 0, count, charset) : ""; + print(Verbosity.NORMAL, outWriter, msg, newline, true); + count = 0; + if (bytes.length > DEFAULT_BUFFER_LENGTH) { + bytes = new byte[DEFAULT_BUFFER_LENGTH]; + } + } + + @Override + public void flush() { + if (count > 0) { + flush(false); + } + } + } } diff --git a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java index 9c1faf911a829..dffb93ebbf230 100644 --- a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java +++ b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java @@ -11,6 +11,17 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + @WithoutSecurityManager public class TerminalTests extends ESTestCase { @@ -20,4 +31,33 @@ public void testSystemTerminalIfRedirected() { // Otherwise, JDK 22 doesn't provide a console if redirected. assertEquals(Terminal.SystemTerminal.class, Terminal.DEFAULT.getClass()); } + + public void testTerminalAsLineOutputStream() throws IOException { + PrintWriter stdOut = mock("stdOut"); + PrintWriter stdErr = mock("stdErr"); + + OutputStream out = new Terminal(mock("reader"), stdOut, stdErr) { + }.asLineOutputStream(StandardCharsets.UTF_8); + + out.write("123".getBytes(StandardCharsets.UTF_8)); + out.write("456".getBytes(StandardCharsets.UTF_8)); + out.write("789\r\n".getBytes(StandardCharsets.UTF_8)); // CR is removed as well + + verify(stdOut).println(eq((CharSequence) "123456789")); + verify(stdOut).flush(); + verifyNoMoreInteractions(stdOut, stdErr); + + out.write("\n".getBytes(StandardCharsets.UTF_8)); + verify(stdOut).println(eq((CharSequence) "")); + verify(stdOut, times(2)).flush(); + verifyNoMoreInteractions(stdOut, stdErr); + + out.write("a".getBytes(StandardCharsets.UTF_8)); + out.flush(); + verify(stdOut).print(eq((CharSequence) "a")); + verify(stdOut, times(3)).flush(); + + out.flush(); + verifyNoMoreInteractions(stdOut, stdErr); + } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java index b74abe3cc0790..ace891f9aead6 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java @@ -50,9 +50,7 @@ import org.elasticsearch.index.translog.TruncateTranslogAction; import java.io.IOException; -import java.io.OutputStream; import java.io.PrintStream; -import java.io.PrintWriter; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -60,6 +58,7 @@ import java.util.Map; import java.util.Objects; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.common.lucene.Lucene.indexWriterConfigWithNoMerging; public class RemoveCorruptedShardDataCommand extends ElasticsearchNodeCommand { @@ -249,13 +248,7 @@ public void processDataPaths(Terminal terminal, Path[] dataPaths, OptionSet opti throw new ElasticsearchException("translog directory [" + translogPath + "], must exist and be a directory"); } - final PrintWriter writer = terminal.getWriter(); - final PrintStream printStream = new PrintStream(new OutputStream() { - @Override - public void write(int b) { - writer.write(b); - } - }, false, "UTF-8"); + final PrintStream printStream = new PrintStream(terminal.asLineOutputStream(UTF_8), false, UTF_8); final boolean verbose = terminal.isPrintable(Terminal.Verbosity.VERBOSE); final Directory indexDirectory = getDirectory(indexPath); From 9623e522c371dec36b4893da41242ba18fe26163 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 7 May 2024 10:19:08 -0400 Subject: [PATCH 030/117] [ML] Inference document configurable settings (#108273) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Starting to document various inference settings * Finish settings * Update docs/reference/settings/inference-settings.asciidoc Co-authored-by: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> * Update docs/reference/settings/inference-settings.asciidoc Co-authored-by: István Zoltán Szabó * Update docs/reference/settings/inference-settings.asciidoc Co-authored-by: István Zoltán Szabó * Update docs/reference/settings/inference-settings.asciidoc Co-authored-by: István Zoltán Szabó * Update docs/reference/settings/inference-settings.asciidoc Co-authored-by: István Zoltán Szabó * Update docs/reference/settings/inference-settings.asciidoc Co-authored-by: István Zoltán Szabó * Update docs/reference/settings/inference-settings.asciidoc Co-authored-by: István Zoltán Szabó * Update docs/reference/settings/inference-settings.asciidoc Co-authored-by: István Zoltán Szabó --------- Co-authored-by: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Co-authored-by: István Zoltán Szabó --- .../settings/inference-settings.asciidoc | 92 +++++++++++++++++++ docs/reference/setup.asciidoc | 2 + 2 files changed, 94 insertions(+) create mode 100644 docs/reference/settings/inference-settings.asciidoc diff --git a/docs/reference/settings/inference-settings.asciidoc b/docs/reference/settings/inference-settings.asciidoc new file mode 100644 index 0000000000000..fa0905cf0ef73 --- /dev/null +++ b/docs/reference/settings/inference-settings.asciidoc @@ -0,0 +1,92 @@ + +[role="xpack"] +[[inference-settings]] +=== Inference API settings in {es} +++++ +Inference settings +++++ + +[[inference-settings-description]] +// tag::inference-settings-description-tag[] +You do not need to configure any settings to use the {infer} APIs. Each setting has a default. +// end::inference-settings-description-tag[] + +[discrete] +[[xpack-inference-logging]] +// tag::inference-logging[] +==== Inference API logging settings + +When certain failures occur, a log message is emitted. In the case of a +reoccurring failure the logging throttler restricts repeated messages from being logged. + +`xpack.inference.logging.reset_interval`:: +(<>) Specifies the interval for when a cleanup thread will clear an internal +cache of the previously logged messages. Defaults to one day (`1d`). + +`xpack.inference.logging.wait_duration`:: +(<>) Specifies the amount of time to wait after logging a message before that +message can be logged again. Defaults to one hour (`1h`). +// end::inference-logging[] + +[[xpack-inference-http-settings]] +// tag::inference-http-settings[] +==== {infer-cap} API HTTP settings + +`xpack.inference.http.max_response_size`:: +(<>) Specifies the maximum size in bytes an HTTP response is allowed to have, +defaults to `10mb`, the maximum configurable value is `50mb`. + +`xpack.inference.http.max_total_connections`:: +(<>) Specifies the maximum number of connections the internal connection pool can +lease. Defaults to `50`. + +`xpack.inference.http.max_route_connections`:: +(<>) Specifies the maximum number of connections a single route can lease from +the internal connection pool. If this setting is set to a value equal to or greater than +`xpack.inference.http.max_total_connections`, then a single third party service could lease all available +connections and other third party services would be unable to lease connections. Defaults to `20`. + +`xpack.inference.http.connection_eviction_interval`:: +(<>) Specifies the interval that an eviction thread will run to remove expired and +stale connections from the internal connection pool. Decreasing this time value can help improve throughput if +multiple third party service are contending for the available connections in the pool. Defaults to one minute (`1m`). + +`xpack.inference.http.connection_eviction_max_idle_time`:: +(<>) Specifies the maximum duration a connection can be unused before it is marked as +idle and can be closed and removed from the shared connection pool. Defaults to one minute (`1m`). + +`xpack.inference.http.request_executor.queue_capacity`:: +(<>) Specifies the size of the internal queue for requests waiting to be sent. If +the queue is full and a request is sent to the {infer} API, it will be rejected. Defaults to `2000`. + +[[xpack-inference-http-retry-settings]] +==== {infer-cap} API HTTP Retry settings + +When a third-party service returns a transient failure code (for example, 429), the request is retried by the {infer} +API. These settings govern the retry behavior. When a request is retried, exponential backoff is used. + +`xpack.inference.http.retry.initial_delay`:: +(<>) Specifies the initial delay before retrying a request. Defaults to one second +(`1s`). + +`xpack.inference.http.retry.max_delay_bound`:: +(<>) Specifies the maximum delay for a request. Defaults to five seconds (`5s`). + +`xpack.inference.http.retry.timeout`:: +(<>) Specifies the maximum amount of time a request can be retried. +Once the request exceeds this time, the request will no longer be retried and a failure will be returned. +Defaults to 30 seconds (`30s`). +// end::inference-logging[] + +[[xpack-inference-input-text]] +// tag::inference-input-text[] +==== {infer-cap} API Input text + +For certain third-party service integrations, when the service returns an error indicating that the request +input was too large, the input will be truncated and the request is retried. These settings govern +how the truncation is performed. + +`xpack.inference.truncator.reduction_percentage`:: +(<>) Specifies the percentage to reduce the input text by if the 3rd party service +responds with an error indicating it is too long. Defaults to 50 percent (`0.5`). +// end::inference-input-text[] diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index e007b67a943b0..c886fe0feeb4a 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -70,6 +70,8 @@ include::setup/logging-config.asciidoc[] include::settings/ml-settings.asciidoc[] +include::settings/inference-settings.asciidoc[] + include::settings/monitoring-settings.asciidoc[] include::modules/node.asciidoc[] From c5d960875add2123ba995130a2051192b6c7930b Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 7 May 2024 16:27:26 +0200 Subject: [PATCH 031/117] Verify nodes are ready after restart despite bad file settings (#108024) --- .../readiness/ReadinessClusterIT.java | 47 +++++++++++++++---- 1 file changed, 38 insertions(+), 9 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java index 1f8d55516d508..b7a1dc12406d2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java @@ -215,10 +215,9 @@ public Settings onNodeStopped(String nodeName) throws Exception { } } - private Tuple setupClusterStateListenerForError(String node) { + private CountDownLatch setupClusterStateListenerForError(String node) { ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { @@ -231,13 +230,16 @@ public void clusterChanged(ClusterChangedEvent event) { containsString("Missing handler definition for content key [not_cluster_settings]") ); clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); } } }); - return new Tuple<>(savedClusterState, metadataVersion); + // we need this after we setup the listener above, in case the node started and processed + // settings before we set our listener to cluster state changes. + causeClusterStateUpdate(); + + return savedClusterState; } private void writeFileSettings(String json) throws Exception { @@ -269,22 +271,49 @@ public void testNotReadyOnBadFileSettings() throws Exception { assertMasterNode(internalCluster().nonMasterClient(), masterNode); var savedClusterState = setupClusterStateListenerForError(masterNode); - // we need this after we setup the listener above, in case the node started and processed - // settings before we set our listener to cluster state changes. - causeClusterStateUpdate(); - FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); assertTrue(masterFileSettingsService.watching()); assertFalse(dataFileSettingsService.watching()); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); ReadinessService s = internalCluster().getInstance(ReadinessService.class, internalCluster().getMasterName()); assertNull(s.boundAddress()); } + public void testReadyAfterRestartWithBadFileSettings() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + writeFileSettings(testJSON); + + logger.info("--> start data node / non master node"); + String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + String masterNode = internalCluster().startMasterOnlyNode(); + + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + assertBusy(() -> assertTrue("master node ready", internalCluster().getInstance(ReadinessService.class, masterNode).ready())); + assertBusy(() -> assertTrue("data node ready", internalCluster().getInstance(ReadinessService.class, dataNode).ready())); + + logger.info("--> stop master node"); + Settings masterDataPathSettings = internalCluster().dataPathSettings(internalCluster().getMasterName()); + internalCluster().stopCurrentMasterNode(); + expectMasterNotFound(); + + logger.info("--> write bad file settings before restarting master node"); + writeFileSettings(testErrorJSON); + + logger.info("--> restart master node"); + String nextMasterNode = internalCluster().startNode(Settings.builder().put(nonDataNode(masterNode())).put(masterDataPathSettings)); + + assertMasterNode(internalCluster().nonMasterClient(), nextMasterNode); + + var savedClusterState = setupClusterStateListenerForError(nextMasterNode); + assertTrue(savedClusterState.await(20, TimeUnit.SECONDS)); + + assertTrue("master node ready on restart", internalCluster().getInstance(ReadinessService.class, nextMasterNode).ready()); + } + public void testReadyWhenMissingFileSettings() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); From c19b7089d4e6dd903b158bc7dbdc95a1cc3545e2 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 7 May 2024 16:27:46 +0200 Subject: [PATCH 032/117] Mute RestEsqlIT testDoLogWithDebug (#108368) --- .../org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 4f43e54a82546..072dc5265fe60 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -126,6 +126,7 @@ public void testDoNotLogWithInfo() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108367") public void testDoLogWithDebug() throws IOException { try { setLoggingLevel("DEBUG"); From e8cb8c67d5eb1697704f5f06f98eb928c9f92332 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Tue, 7 May 2024 16:35:54 +0200 Subject: [PATCH 033/117] [Profiling] Decrease storage size by reordering fields for the events data stream (#108341) * [Profiling] Reorder events field sorting * Bump template version --------- Co-authored-by: Elastic Machine --- .../profiling/component-template/profiling-events.json | 5 ++--- .../persistence/ProfilingIndexTemplateRegistry.java | 3 ++- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json index c540a61c28f05..fbba399162ee0 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json @@ -10,11 +10,10 @@ "sort": { "field": [ "profiling.project.id", - "@timestamp", "orchestrator.resource.name", + "host.name", "container.name", - "process.thread.name", - "host.id" + "process.thread.name" ] } }, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java index 066a975d7de7d..86cefd71b2d5f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java @@ -49,7 +49,8 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { // version 6: Added 'host.arch' keyword mapping to profiling-hosts // version 7: Added 'host.type', 'cloud.provider', 'cloud.region' keyword mappings to profiling-hosts // version 8: Changed from disabled _source to synthetic _source for profiling-events-* and profiling-metrics - public static final int INDEX_TEMPLATE_VERSION = 8; + // version 9: Changed sort order for profiling-events-* + public static final int INDEX_TEMPLATE_VERSION = 9; // history for individual indices / index templates. Only bump these for breaking changes that require to create a new index public static final int PROFILING_EVENTS_VERSION = 3; From a079cdc17d7467088ddbe0cdf619cf478ffba1b3 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 7 May 2024 07:52:45 -0700 Subject: [PATCH 034/117] [DOCS] Update transform and anomaly detection rule creation steps (#107975) --- .../ml-configuring-alerts.asciidoc | 7 +------ docs/reference/ml/images/ml-rule.png | Bin 118882 -> 0 bytes .../transform/images/transform-rule.png | Bin 83160 -> 0 bytes .../transform/transform-alerts.asciidoc | 19 ++++++++---------- 4 files changed, 9 insertions(+), 17 deletions(-) delete mode 100644 docs/reference/ml/images/ml-rule.png delete mode 100644 docs/reference/transform/images/transform-rule.png diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc index 2e678b929d296..89eb6e8559056 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc @@ -27,12 +27,7 @@ TIP: If you have created rules for specific {anomaly-jobs} and you want to monitor whether these jobs work as expected, {anomaly-jobs} health rules are ideal for this purpose. -In *{stack-manage-app} > {rules-ui}*, you can create both types of {ml} rules: - -[role="screenshot"] -image::images/ml-rule.png["Creating a new machine learning rule",500] -// NOTE: This is an autogenerated screenshot. Do not edit it directly. - +In *{stack-manage-app} > {rules-ui}*, you can create both types of {ml} rules. In the *{ml-app}* app, you can create only {anomaly-detect} alert rules; create them from the {anomaly-job} wizard after you start the job or from the {anomaly-job} list. diff --git a/docs/reference/ml/images/ml-rule.png b/docs/reference/ml/images/ml-rule.png deleted file mode 100644 index f7ebcb3716b81c9fc6d0f9567a6efb41e177c77d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 118882 zcmeEuS5#Bm_pZGGf+C_+8=zE?-Vu(VC`F}1h;*gb(BUBMNLA?&+Cgf70HFtn5Q@~$ zL#PsZ?*tMcck}zV3;f1?xDWT`o`;N)k?gGOJ=a=u&G~)boL|&b6sfK;Tsw2-4ArZb zf4n<$<^mFUL9Sc^KIyY7{dVTeS&L8d@@lW-Z!)9WA=>2IZH%fC0T2U^64+pk3`=auO1D2 z-F}b6HlG*PSmo#g6_gKSLh@VNanr9_W8EYVhQ~|Y5tZjb7NDLUs@`*Ynxgh5cZ1;p zif`^JRb+~e<2f3Kc?v{41|sWF|+A5#n8w1aq`?DH)8~3LX=~|eI&CIllkohMVoHH zB_*^grXlij$`fV*H^n1e1J2`b-OE12th(4*PMd3I3p0c%>FMRy_j7NJmIz@aDaFzt z<{vW$qWUOsj&EIAqxmt0yBCRVe~0Hx^3DGZra*k3k^zh_YO4LpOj-F1C-DBtnX^Hk z&RhWAodsSD!0XJJbE)V4>yrz})bs!S9&+?$p<|BJnKQCyUj6Z0-SzC!sB;|Ci~7sI zgE?d=Lul^Ri7{O#$onxl1Tn3wtk~IUcl|CHpIGrnkB+i6+>`y&zrqK^Y*vhrK;Wz( z<}T)A?#WMi`bRZvmk;bwSyffIz8@C?&YV4W;R+3K^?$#Sy?RBE&sTcx+^KKQ_^Qhe zPE-5-_oe^!Ce`GEKZs2rN?h)LhWXF$?k#PI{BMK*=cj7V&ea!uzpKQ2|I~LhkUhr# z{dWH27mx3rHPmXM%m_brtN;4$3UZ(7^rumwe-^?_0^cbff4HMx_=65Fo&HowL3U8% znS9Q}6XpPZQP1mh{`9A85AQ8;#;5h&IBCE$XANUf|HgRnHwWEfz3O?9D~t-^OhWIm z8hK3cFoNh>>O(G;&vBqj=G=PLiEgMYcNRo&c|<7~;Xl&h!e z^6;ejqN;xiX@MDTgG?p_^Mcd*+b^U3 zZd)VvhiZ$bGe55anAOrrV47MEwq5k00qlw@6IFP5aBTZM1xV&gDKd2E=VWWKVejjuAwuW>! z>wwmUnksCxB(*{xEO7=Fo1a>hO8Th|U3A#FU~(|C>s6%EKD9F8)Joju`N}tEG435s zL#C5hy9wzhd_>BeS{42muU!Pi^U@z(VLEki^M9t`KPRm{>SyTZg7%K!yTCs^TkyVm z>TWE!)p}F89MMJ>frSxT@o%1<;cB~8S9Ikk{&R3(uD-jap1$VnIf4IJ*1$|9dVk{Q zm1ULBi+{q;Edfklk4Ih!eq&+*#v32s2FtfJ(<+rzIen{BwAxPJ0($v)I<30sRyFJc zwLq%qX9%wGk75cZZGErn(Z%*9i|^>n?$cXx57^s#>P)A01O%9GGYy`uKKJ!za#k3?vA9Z-Q2j3Y@DdQQJi_Qn;cT{a)FqdSp$o;B*09x4X%)$Vh`+h1Kxkex)A9LJPT@cIF zgJd^h$^EVx!xpxfxM@=KFqKhP>9+9j^zzPUigs25kL||lS*AxPorqwRY`Si$^d*qQ zQiWsE1<>8D?sUZE5(ZB~Vhu3nQ% zbK9I38*TVPig^e-8{%6l$repdpv0BB!0SFl=MHuACOZ2x3 zXq{?_M8D>%`94MtV)drDB0H3^qn53_I0bw6}k9pm9f z?K>EbIuBgW*DLRIam$bB?-!-7QnebqooIX8eMi6W1EbI3UhJc|8ot%NU6*RIH)%>r zi~cfWYYp$v3d?cxre>bgh)ho~&C(j1Ci-tugbx=>DUw&A?-o-oWFa11iqyv++sWGGAyT5M=LF zIBbJ@I1*JYepfzlp>^II8eJRi_S=^fgvv)5IBUS5q&0LzeLq&8p2chF3+L$Vw7&#G`h&GO(w^(!< z-?dn}_%UvTaTtOZyMV57&(W(vcuO$vFgIa?S zB)&(83U=j$sL_<}rXOD-wvp@Zj4Z^5I0PA2r3WHR}Ceb#M5UtQJjU!FS8X%#aqJ3r1oZ z;2#aVI<^hbVeUbMG4efviPTcR8FVr2uCsev=5K>6X2^*sxxe}m}qmJdBNOml9GE}~hBrF`5liw64|hPmn2c>cPiAT%W}OEdANd873n!5Y1Z zL1`zpS$cM~hdYMZj>ys_278=ecjZ2I8b=k%`k|V3m&?cLro!OP)~Hb#iex?~qvg^8 z%7bL@>aO{&8a=}2S)|spVO%j2F8DMpsWX>D7NzWD*yM?NA@x4d|$c^9<~3l2~0|x~>P9%axlXl@K2b z2y||*?^o5Dzqf}kdLvnn-5<@zwRn(y_l}%trD^PS>!J4j2h;Xxs~O)f;wTPa9a0pf zMfmkeRZ+J1d11TP}yzEazbpux`N z7U;Wnv(10r>+jE5u8fc-RwxKmX&x>r8SYKTQ`|nx1<6zDQsxf4{G4l0Ak9SKl}s2DAjSj z(aWJetHN>s=A5T6A-wZ6!}$*$}4~Ar#D?XH!Q4LR`kf+UXg?aIu+}@Q?}Z@wS31 z^k4^_PA|?C&%Nnb!+wn{soCs@)T(Cg%rVK{9G}JqhHbz_77&SojGa1CrJ?>bTrzn( z#j$vM(;DhLGjB*3g|zTS$v~C{e_h=TUE$Kr^`tiG41k(aJBsH-6rBUmnC=tvFg&CJZ#2Ytr7s+hqNHPTaWr@69If?z105rJXA^ zv*A+|QLt%!9B?eyZM@=m1O_kN>J%$i?W}C?JWnkN?aM(GS}IIJKPeg?Sm&;U^}k0R zpS$_1f^(Lkv>YmV5HFa`=z_UdW;}D|!|O7Sm>_d?7ySx=Lno->r(2rCt7qx37a_OZ zRGaTDj+;Xh2>Oz{TfLYmXo&URnqN8kx9@l$MtbjF^-e3r#JVCYrOW-)`i+i+5l3+2sQ74d zwW>op@u-}c_o(}?T`lEIDkM^43 zL;!v6%8r0n0sEw6uZZ=Z6bT6)=V{y?9rI056|#Oopi!g?*US9_0u?Jn$Trz0KR-Au zTp(-#M#-$}JfN*#a=3)RP`Hdat9nU6VpbSYx!Nhy<>NlE&0(wwba6k;*%O^PXV84X zM)COJCzpO5iMMn;K#`@l7I2Q4ftkGgv$Eywx2P_oedq1GocCD+{rO4@(j%`eM4bbVEiQ{EYSmGh zvgfh-#$k5bLRflH#(K6`eHPwDkGFN3dn%GUyR;Jxk=J}C)}mum+FW$rK4=a#B+`qt^1Cj+jp83Iac)d)y209C@JuUCS^Fzi+_N z@s!E4>(Fq$ksemGpX4$IDqI=5b|XuLMIgrVK+lu)hXOth*ttT?QKT}w`hr#*l9 z`n3pYP0_+&uVEXcH9AJmiX{dFiFdlKH*>gJO)@Y;JVsG$FWi+roa(K{h%fg=*7v;T zizev5ml6H&Gt~mQHtKQ!lahgPwDo&wj_Ru(o2ZP3UDavGo?H!BVJ`}jU3Ryb$wO{; zZx)=JA1UVcX`HM6GOKqK;0cf`Rg3Ck!m!I#&Ro?igNm$PH`!GSyvtOTQ^rwb!bKQw z#n1hTO7oO!Sx`?SZQl59Ixsd*JCO3pzH>wH_JW0kw`;1}-hK!lMUutMW*~~tD@zk#HZ;P#eomr2@++jTEZIKamHB!wZ*};#K9X)s7 z!Sae7#@r{HRj&ZK-usH*^&y+d7^ZzJstjW_fLs0knwzD{c{zaq`z~{xnPH`da9B7Q z9*{zM_3ZpYNiT+yS={MLlDPAmLuaLO2@X^Ucd?qb$8u>;+4TbULgK7UIh7-u0CFSx zDlNOe)`F?pj#zJ<*z5@((mAeZ)5Ka$HG@MYz_Oya7W ze+jQ|H+Q((-V}FP(O-{wF}l(ZGq0O%Tt8f8b_p1D8O~5jq6d45HN*#R>nP!Bq(?^* zk1cN{#iLB6jSr%Xi!nVYq)BIj&qwE(S81U`K#Dg} zY^l19<>GMdEDO>UUC@dsiTkf~K|3sd?}kl3qV6OW~xvMm(qDQ35dNajgy?yl3( z*p1Hc+`&*$fy^?5v5MrxU~qYE@3N>wzSC_lx4;Xb~gPTk=1{BJXNA0x5z!v3%T{;wW_-IrMQ+ zFGxm!=N1E{Jb2^Y!#6r(e8z?Mtq+`!5_S5f(7TD7_kBevEEDs|EG(XL-&$icNadr4 z<7-Wfwt>dsV$ZmXcJXO$Av5=D&+QIb+WH@5kE=rKAD}mc)2Ywhs;&rE;jZ|w&AZNT zp5o|DREXHfSDt3mr(BtJqMc>TMp@|T_ zg-92({=89VVt)uv*+)_Qn!KF8K%QD9z7;0Fuhm=J9mwd<_Fe0dfNA%OYeAr*&a@!r zXiX517nyXEao2chQYyzoy(cZB?7K+|X6;gh{Se$RbZ@n{2m4guGM;D%01agy1~>b$ zze28p_iFV*6_Z!rgkjK>5SqxV$mIP|TUv*ymRAXS!>N9~d1{2Sb4X_o&Na={ zR=x@3GYBRi^Fi(mS(*u(?~;yE@4hbrg#{H$-=W5cVpnJdU|#>q0|J(v6@TQgSm+!cYvRO#cVaEfd?1rkrw zpmmPbXDO`^n~cP||1qF%yuC(*@!bY;sz!w-9U;QV!Fb`Sw1(C zd$trD#VlSzZQo7glOaFWADkU2Z0~v9p(a|u=e%ne^g{q`^tq>)pHS+h{GY+kG!(Azlla^qYP~WT=~hD|u9; z$GcOBWWJR*lpHZJ?0b2MF?pWwN0pjS?Om63?Jb|_j9D|$Nw|Em4G0wb&IxY$kZ=sG z^zFI%F~;+Yt;PHKo(lU(bXS^uFQ4$?8k*V=-|*^wr(}XlvRl36JgU?v5p zWOD?y{n>2;DUU#w7A@ArsyFe^*pBk9Pd?t{WkOe`a^jYoeUnaYVOH0s5395!L4W(~4_mzv=uzsgd2*Ql zQQ+80>x+<$yoOu|`UyG>M=QNZUGp%{=#f7-g=Nz`W(4RT9A#$aLx=8O}w&r0)qs+ zZYV&`nFQ0&?1#l9IuV5khNSl{yMM{}gXny%XivJ~EB*@G43mwq z`nShC$mh|gMP9P0n&-kemx854r@o@mas0*>{deh4dGjk#<+6UYO>q(4KYu=-fI5== z$xvs7+Fk+4ox01c|574QG85m=4W`M@@sm9b^M$ZimWN8)Dj^GfITQ0)DOYJZl$9dj zx8j4gV_3gK zr#aVQCBMx`#oCb!;OzNJfnR7j0;XfZ!rbLc&kW0OW-$H!<5}?${2|MHMX;)nYX^Xm zY$zD5E1qkMTursmmEN1`wQKm+9Ed^f{@u#P-yl9cMOzQBwNNP$fGa8_)Lu3rq_{4n z*=_N2s8=5538+#7#Dzx%z^T={&4T0dSw-+(@Ty|lK|4RxG*g6xs`Q?d=>9L``nm3O z7<70ZEaC5n5#uVc!ad%Dm0(Y%Fn`LvzwuEW&d*!fa8suoeO-ogU)s|Zf~pF`NRuB} z^?0;#KG&E@xFee6+LJR;OJZ$j%H8nTzsfONbJ*8=$lNKl`kC{&R`v7h)#sO4S-cOn zu~1TDydi1H<-=GimuB^3Co=NGO5D?VjE<=o{qVn0PnF81UqkXDm+)4Kfox}cS~yZ- zMkCf9o4AmPQn&z|oMc*$L*U|^rfRJXYAjghv+;S`9zu5F6N*n-L`~lM3rgNUF zpjv!%{78>+1RZb#5H4<$f6X0`8X!nup9AGV8&Xm+iL0Q{a~Cyib6r*X&`xyS-x$%% z3cy}=6E9-T2-e53sv~o?SIMTkN7xj-pnrv35TEW7=t-6W3~UI1hS&70OHE6qM)L^l zhF{ojQy-9gSj$GuU|kZp2-C{x7%HEA{%0mD^k91?Aq(|508Gmr;kyLlt6h8Zam-Wq zu>9b_1^!IRXn^d)C^z@4@<0v zg{8`@iVd&;&`uXFz2omh&P=qWQe<^@{n($YvBSW4GQv4~u9W_xx}nxm(+YC=CUKht zcAcNr>M>yU*nedht)W#=79_D!;T64UuUz2UVno155j;C94bk2mjF9DuNeC{L%b;+3 zzz1^`iDxj1FJ7ys2r!K?;B3HsCq5WcpZMrFv_Kb@hs!Z5&3APHq`Q5kIm~S){xG_!{R=>%`HPHs zav>OPD%NMVz&otd-V>d_b5g{iSk*>u7X}oqPr$=JxRY zl){pCu)XbW3VlL{`m7DQEBVo`bAHcb>(`1Zpv@TZY_rB47+K|M-~YJiSl2iKAT@1n zR#upvwt`#k+=X`9#vRV+2>GBHXu14JlOK(*Acf$9SOQ*x3J{Ix)pc2PD}kSl*pmI~ zXA%k5`5z-mD<1WhE$xLyG1lcc&Jk}C^91tnptrdIF&1c84sy8;LPD+rOpBrGj1R^v zvG?KCnIlHz-K4os)o5U@}_YPM)MNrhFHWYR6oC40gF*lS|U zWA%!jO_5F~LO_DFg3xj$<}}N}lcfZL_D?}nSNeQq5hfs5jjnXWhrm1jLQY?-iR;a2hWq|k(~N_5(q6~%rkS=8sYe*9#*>e8Wq_afc*XboH6+8i zE`d|2*#(9OS+uH`h2(AB)2DP-1Au!ed!7+WIlV-y{w%bt3g9`N2D0o)pYwwDKte~$D%-(vYO%%v!zI`lhMc;C)GriQkO{kIEKVQ7Kb_7RVx1`T zPTzsDzU*Ls!)3M2(}u_dhRf6&JSAbK`G`M~f=d6HaLTUto`Pftg+}XdpFVAAfpk-7 z{cNA-X#>rl_Jc7%f&H}(@!5veTlM&nGX4o+Z}SS z7#_J{VIFy88IL@|tYcx>ZQFj*>{X#)skDuLqK8WSGoer>A?Z*iDQQ8TrlG9?p_4Z7 zZQ`MkPv1e5UtOO4NSh#H%kW7#{VK94EQQINQJP)C59YjhJIxsYY&BrFfi*Rwe5u5q z{KbO#79X)uIqAXT#C$7cz1ppeAhfUb11&~&UkSKG zJ{Yj+DztPertx~&Q@6Wd^vC;SeDA-;V91j7WaKjwwT~zA8)tTGScLLe7Z!5yx(Y)^ z8u}g;bZ4BNUO@w2Q8diB*i4I?zxNAH#h9DwHpKLEfo6*q{Ukj--N|P>159C;c#FMm zS;B1&)8eG20sJKjWYd0Swhk=jb3xONVw%*cS(iJPuUGm;6}Vh(n_gfm&mfWwU|f8KbhJ27xCSyansnw}gF(9dcB+f-dq1$la# z=3cpi%>62Q@_uRVb*UdYE9a?_<6QL79Hj;Wr5)G4{2O=v&mSEb|Nqi-siyC(ciT%y z0IB#2<}%lHe`~4oXS=>`NtJS1*bDU9y^||fmgi!Eva%8nJSn3sa9{L6O>JBCviZEU zP^D{_kjP7z^;-FMy^eX3ZshA=gRCbdMRSi?PEsy{O1>%HEb(QQwKmL*AX0VH;9TGz zey4Y$1=qc$+2Da4WLpYw+h}T;Kp+Tte0jfyAaf-a1aWrHGlj(*Y3Dn{QgD0t z5I7J_)ZDp5#lQqq-_YHDGq9)Ynr2{_9(H(ps66H2d!E(RwTXv-hPj#*C>~i{1Cqla zO&+~~l}%>~%lt}RZMW%YIlxO7V>^@>5h6JgO+cb6k1|iDNb%Wm1H`pyfb!6c`d{J= z`^ITBpkNe1Bi|%TvQ-}v87K0~$CTl=-Ni4oJ&RtT@gaQQM~nly3P8gat2a`}Z#MQZ zho0Vv!q)F%0cw>kuecQ~zd57fh}{ibrV<%rXky0#D$Hkw%cWRKR-gS(uB$cFXcDXE zotZ?qhqmc79a@PiKaM%}ZMs4bwaotGJ;JMvJU+Q{8&Ey?u-IhKC`4HvrkS5x!e0=* zQ%}K{)l3++#n24ztsqx}0NDZGJ|KgBH6BCZbFdivN`}6}fCMPWGYLTSNsS&Xnyjx6 zdE~>$c*w(DHCK-l-xKC!xPEuTSl2Wyz@437z@3LW97+S!ej2wm9&^wx zVWhwsY1c?cuTfcmA7AsoO52N?Kyg>PYTHzr)=O@-gEG7u&zF8OZ(OuKjyw1U-g=A^ zLK=OrpEGQ}Rgz>~2AS}|NX9sNw?;jgSVEBeIa1u&XI;&*9A{M~{z6l$$iv%HdU zV(UM8(Eua@cNIm}tcY6%IChFXWOVJ%48tXMG&BK73CndE{Gi~k78f1;wxvS!b?!T# zL+`9Biq54zlq0WRoC?{5-*d3Y;eBA)k{dhxYi(u_}R=rUb)7FKd` z!MQa*N=4A--{dH-3NmE2caxd?AEhiAI;_I8zr^zBA>PkVWDDKHZ|MfcFWEp znawM^tI89<<-=Pgyt9%W!nC`> z`q<02soG2{MBsZ5jr>!GzTK@N$M&&I0~Q&x_PD3q@$rZw>A&y!%Ndxz3^kx2x(FUb z`#Zx9lQNA)hixGd|H~|{k32q(nOtgjXyajQieRNz;2JnsDcg^;ZcAR>rD z^4XvaXZ-pd&OD%M#qi6chQ9YgOGqW@z#vR{Y_$5Qw`VAlS3%S93)#qoL z*mG;`fH!dR17z-i>?(eZKlr$bLGQHq)p>A%Hp;OTy7(YPNc~CL&B? z#(ByQ!6Q__)H}`fn2zNcO6~xXoB%3eZ5gM9D5NM@P;zE4vYsC7O$W&(@2oe20| zMy)!0hl<5*>r3I@Ok!r5L8gJJpUapVpA_A2h4wQ9Z)eZYgG%=IMt8K=ZAAk;+jx*~ z;u0=YK0Mrg=%tDB#su<$kdLr@-z&KflA<9qBfqX6i|~NDz`1VP9%;x&@Ij&h)!Dbj z0!U7yfvML)5t18!h_3E`g8Mc8`5O>gUXhB#(hE!%HK!<vD?Es)+Gp@n)-u8+LSPiX=Z%p}WYO~~ z)j$JqU)(ZbcYRWRe5$nTNO3DQ?$H}opOcmRwMIHnZGA63Uh^_*33$-H*~bio=l)wX zawS>|tP02F5t=5ZA|u*5Jz=7-Cz^k)VD(2uB>cE*&D?+b2F|40?_XYxtt}RH z+n5skC650!iLiEGXt^A@!b96d(;>7ZAp3k&|*YHc|aWR0M zy2aw#auS?zg1EGyyOrZl$gj!|-AurWonZcmjEVF~YIAn=NMQplaX*0#O7+l4!<&Kaf zS8jcm24Je>aJVvy!`fL~Lib^p{0^p|gAV&Mc+q#ca<%{`ZeLR@XL_(baP{&M%B3u9 zo#^%bH&>@ZTNLnrkAc57?p~{y4F3h~yc80DewAa#njue?=kaBkto(skrcq~r?SFSXd;)TaOBe!gyt~A@b1LDa^ zGu?%%q<9kXL^R!an+C#$U*AT&!8v1-a;t-5#mct_^nSoOnhp*RNaokGP>52?*uy65 z9NyA5`n7(@lTP>G=YzoBGT7UqB#?KFO^8Ea&uw@Y>W-^A_W|*MZ!`?yiVw~8WUIYi zKFiXJPHnHBFIoX=xuR-JP+o-gKv|q$ivplmMQ6nBT#Yqw@SaQDZwgoyn4Ztd%J;gR zV!u=rWPW{izv_Z0PH}Ipy7pATHj?UM|y%QNtOgkILJCnpS7FE5po^%CMc#pe~ zOW-mq)oo*ji}~f0>!sCfG1}a=XWp+T3*lCV39U<%X>!}fA8yL$Xydi68n4}Mdl146 zy{`t`p2_w8RZsxFhUw8mcyHTErx({7@I?~paP*Xdf@^9tB*gC%GC z>w>rOJHP>#D76J>k4?(CY`9UL{a>FX+m5;u>2Din$HGfU0)K_h0@E6xv|nse`!3SF z{m}duJ=iNx`$+n{WCIn_nzQAqeU|VfUTEeSDSkY_eY+jdsD^>b?NPvT5q`~$0I!SG zx--hxwPx@R>+Fr~9M6hm`GLge%MKhsauZlRMxNl77liSj7x=}_X+HU{UI3{83_Ube zQL{6|1H_hzxIZ}xjH7`{fv|op3Lpki*|1%HB$|+4HGErAi~lR9dXCYD4bm#g9M%|s zbs4ozS@L#fJb#Jux@8`JeiKGUHSwZ`8~@=#0R~E1E}zGMMRh`Js&1P5HOA}7a4dqU zULQ_#) zEn7$BKfjoA*KPM5Of#L724)i8&O^BeS5vv0w+`1!>ysd30JSyebrGxHD*U6TspmDBP3$JQKd)g8K-Mk#yb?LGlf|2Sv=qn z2wW%NLoo2SFiToZ4JXE!cJ(D7#$b@*dY_}c+%AdXJ9b3SWb*+?+ye>Bh@-jJWWwuB zj0_lp5@uZI^0wz+-?bgKCbI-yKX?N57<+NQR^?6`VEsg1b#S5wb}OMXNx~d_w=FfCkmC0Y|SrFzAcM!PFP;l@&piQ z#f%nI-=M$avsR~wo=x$IM|8PS{;J)@!|cYrr~~T!V4a^AhDu)#dX*;?m%}RVxbFN; zVGJ13t{e;9nao1~x^toYof$pbAUW`E2;EVDx;Em-=Rbi~oEm|22x+ygjC-t>G!hV( zIju$<+I-NymzlpElBO|2<+`O7bOXAMLzb}zDAuo=jcwOrGZS6LH)VIa7+U1W8R`St z#_m4EZI6Nv*uRG#Mo8WO(#m&*D$X37hg9PbQqo0O?amM*t|)hxuE(ep@c|7z!Kpew zQoC)aWfs#N#Of90uU-jm3kiSCTj@I2?^qYLwa$^O;+NuCt7YH@0;&mKqsK^0w>m{9 zm#m^=!QMPer3Bvz{A*s-fvKv;pbBY^4NTbK!DTb*##s$s0Sn`4q7ZJ}p-mal-zh=< z+XOQ3YsuT}VIY?xlbgQibRVMLEYUWx);nH|p1vOw#b{e48S&ju%aK_eoz zbE4k+yLRow2<7)g43{Q^dc{BHNYn=_yH*{7Fo*w$CONu_+Zr^Ekcr+GetoD+wN3;IazynZdm5im#sN~0aUpKx z(mQ(r9{xA+-|u+>i$7;Egs&D_d+qM|X3@y`_2v{6gVpM%@1^wJ?Nhoe(~@xqyBAv% zZEK}Q!B*WbHZB>WgQ%&gCD&f+nrG0(Qgh`M>hBQSn|{~H%b{dQg>{z}1LnRz>7E&N z8CP{FC(R0{87DVxfn6suzV6B4lUv-a_G*K7Uv8A&z_tR)Xn{)A%%k=o44FHx56kf5 zih_w|qHF+#_^f&t&oz+?gR7cj8!e4~yYzf%Hk0vGqxEwI?K2uk#nWL|Pv~C zG_PR=>_o9P!m?Lc_zmjoo$c|XC!&U|Fz|2;zV_1kh<| zOIypYjxlOTN3UQ@!rb@@WTpOvXyo`7a#4mYn2Up_O7KMRPr}O;&K>KaX^kxK9v+7 zlk+}ud^gx?Uqat)dVaCXv>;LB?l^N9S+N4Y5pvCYyBIj*N?J~yahASjEtcMtvS}&$| z;9?3~ueqte@Q08oJw&MLM%QUXmX`KoyP;M`7NZcqQ^$$C1=w4UWq@#CFRYT~IMfEj z3`&}S(y~|{W_)Z10!z?$gBDO)j>j$XACD{m7~TX3RM{j-#835T`R8x`PeucHk^~~h zeAJWp{O>{or~1ywwp=kmK+QTBOjC{+m2RV8Hh==HttHOYo?Mz|&mkTIXr{j)`iqWAR`{~ny3MwWn;+1o1A>JnnZ5Q9F>Ci< zYyP!vxS$Rf92mDl9xp}#4ESN@f{~^GKEmFxJ?;~2v{6iNwcF(WP2(jU#E&|vWZuTF z0NiMw;@&t?r2|Fb$b$8HVu4 zCknTq*vtCV0nN+;ByJ?^^m^34z_4W*I{i5><@97V0(Av+&A5L=F@N|kNA&nJKmuO*#d3;xzNf|nI5~T$)allCsaF8%|G_OU?_wCT)Oek1N}2Y|13G#fA-K(Q1t)KDaf`xkVHQ;yLZHn1*ORc zWtPq)SQd1pH+u?H{B5Wj!6?Ejxvh>`Yb!Ja@XthidpTWj2QLobv~kV1@f@BINv}; zfmUQzu(dsU@Q)6uBh4ews%#)T(Yr2yKIk>c;9HsfG^*ehi~HJ`!swW17-h_R|gf1pDj=WIOKdrt#G zUUnIyqGf;c((mu3I`+c#HVkTN|G<=J_p9!*3A$jg9FU3gEmchJD5okWoD?MdarV6? z!`kYHsbAkF+7X>CWxpF}!>RryT@~qTx=3$uvs2kjs?mqlYyVZW8I$LluVI=&jU44H z@0_DeBt=!OQ+B=YE(4IKGbc$o4Ba&c1YwyMeQ*!%`Yk%-v14IPb=N!A9 z2L-@u580(^8)p&&9)^qlP)ds7Va&s63_atz%;G&&1Z^W1Eeng0!ko` zfu#f?%K_~~okhrU;Q?e~Uz$Dx8^IzaE_vXDD(u(P8z6e~o@~Mw%+d8B=Pke|)C0XK z-ezPmOF-IL7Dp=IkBQ^1wKV;C_Bh5KnxA&6mDti zIY(DxJOPfezRRSWq|_8#1a3RKme=%p|nR&j^}xV#&H7WEFGmM?d*y8Q~OsnP^78yF47 z{3U%PYbXAtXG@YX)H1aq9S;^#zg-j59u$JH@Lv&I*^R7^Y|%Qq84Ys5JvnMJ1t{}0 z#j+-0ejdv;;G=p5$8f%*HYS;jaEi-JqF(H{ZkgD0SXxoeYbugy2S4`-~qzlUu`dq0KM049UiPA(1l>1ddP0 zscqQ>vKET_UL>woY_3#4*Rq;53}Q{9z1d{!!V&U|)qU`-8$g9hhj)~bNBBM08)NOu z6*3(DBupr>V&}x8PwwqNx{u!twLJ6l@Ncrb2so}0pHA=G2i25xa zwLa)Z8dVY(77@tqeV}Y4X*2>+!CNy`zcw{V9n`BTA?6`zV`7#)&BR@ZP=hCnnM)J1 z7vv7a=$1}~@?mnRuWFkr`oJ-iK+(e=yN9H+X@Dy@I!BTlmlCRn%DbBqO$jo@waLPj zD$6VqnKbT*n_Mq#B&{O37#R+{+B*K#b7ZH#)T+_sVS&uH6N+p~yd6|@E*GX4Pl6qpftCZC-RC^rIn(OLqY4ghZt?#1s#>txHYh#o2NkIHqDLof#J zIT`f^q&E}=c$f#A-BQE4vT@5hi;ldHQ5Ml>6vmmPJJ}xf>)4xJ{g$+S8~t}^_eM+o zW>LdV!(xjlOWOXRC?N)*U1rq}0DjhC=Tsse_CEDh zAZVQq(F%`mY;5t_ooHUu4!Ah;a1Y}Xcnj!1^y_M|@TgNH&_;I=$neFjYTZ_$;y}Z~ z^$TwMKsQc`Oa+a6+3!MLVD1=5?RKH(6W*^@ZyI!3hV50O4vC9Uv}Jy& zLZ|nLPI^7p*JxFsy;AaojCo2TOE@C=sI?uDuk*7mhmI}GWmOrdSFnz0@3MHm*HcXb zu)r3U2=LcD{V^{WqTD(#b8p79uEgq?rU;Z^Do8A5ehiPps zFhz>&)=QZsFVpd((gg>B*ZR`4a8 zuS)++&mMRh?(t>wG${5#|9y~kL87}3B}?lR|f z&w0&jUhg)waE-JrPaFkpr{XCkoU)S-dM>snI-xk|%zSIlH^lJmJQv3eRDJU4!a%!X zijV~wMfxI``r^SNO`bGVF@SvxxP*k{z>8zJ;&QNvRzJ_fMGR~W&aoDkbIn??^tFxqzt*Tn>E3h%)#k^|={_0Vh2CEdm?LuIryU1Bp{3KoXwaPNVDv%h?Kw1Axj>mi-X z8ii(5g!=cAhi^Z`zo1%NHW7w zBGnBE{4`#ONiB0V`qV!cu5 z?nCpQq}6kX>ra!ouwP#*SzA+BFeOtxJ$VC%9&co7=)Th;?7BQ+Ria;ljR8u7BwTtD zMO|ayOkX`kQI!_qkZ3*w56`nae|A?4aV=FL!Nw#Jwk{d3g5Pj}cBfh>zIoiHnCuZc zlgY?$hOHb2KNByBg!JB^HhjpupWlxx^U5(D>2$RvGh14^RCFiST**BBrq6Q^x2{{J4F=%PEfFvo2O`bm<=l3A( zZd#7IqTQ3{n+)q5QJmt62W{;519u1L`SPi~%BK^CIjSohuxI|i&fR~evg|tDeKC!~ zs=g8_@uw;$xf?JlB|NDgo_inO42FR_8;5wnD5A?w+$&E@n<_HC+fu$ z#80i);f*5BG;EcgNWXP`6MdUcx1SE*#NNbi8~J=|Ff#nigm%5Nl{+ySiMmIPofJYe zr_2*8dFRFF=0DC9OF&#nU69W;qH(LDW{bj@Rc_@N;c0Y;qkW=NlE+Gi$bN;Op{$M* zeRJc%Fo&Y5DwIS3-6uvd4(AgqoBMHfuLY=SaBzH)&r3k4?;K83*XhH8i_5&J7hrWz zYb=Lj;?UX=5y|H*b%7p|w}6~%)V#$5Q^yvWJmItbw-uZQo7moe=}tk`6krQff8Uj z3$~cMzh}GMrX7C2VbW`e%?v00&TNR`1rS`@dYI^)1JNx*l3;}hMrkl?o@)s_r5!8yn z6_3$qbXGC5hHk!=(i02xZMhI>Zuy;I9`8I@BzQK()Z(TJh|({ zQsdUdm>%28PH9!6)qwd900Vj)zSd_TO3&Te?eD8AK%)mT^@s9hPuA?Cp2gk6yQZku zEWiwG-rds9Sv39kfL z#O?ul`bU8m7O$v5`=^6}!+xbs$O>@>@1b1aupZ@9W7vv{S2B=G2Nd=!_?GvxK!;dZ&&dpo2();(P1QsO02Ya)s zPuNT4PVqWFNk0u9psiHoiJ-mbeD`(ijMmQCdHlzT9lIhaY?Xve;q?0*!vK|kc)+t9 z>Q7I+lCscw_H;Aob*iVhAy6)PQm@WB+UZ>KW}{W)%Pdd>p^$Q^sz#?VMpbme+}{Ix z2;gA(`f*paMkE6^^oUVDdr4{PJynCJ3wmI{>>z+zqxz|yjR0>-Exw@T?vYpIpz}Tv z=sd#UT@%81?Z3S9|9aFJ9`(CTp|*1WG%EM^|`!Xo%>5ZFt3`%;OI<|EE=Z;RpRNtlc?(P^n2{bp%2(_uf z&>36q;oe&Qc(A3oa`Wj9W^*2`QSV%STsf?vEUnD*jn(DfyjqwK%BRXnCm=J&LN_Nb zO8;?ZlMc+%(xZOlK8 zhB77xSocmAHYa~+L40I^uH2*V4^cehGM|z0?C)Ry`;&?01BLHa0UrkjbFyYPH-cMZ z9`i4wDSOddrux^#6__ZF4&wt3C`DzUj=YdXEMt$e@K{jm#j4USn)--tO(tSc$fctr z1t0^)C-=luO*B-KOf4kJhQHKP~|^_zj0 zJucBzR*`%*Mf_?`u>MGv(3Z_jJv6ve53hFg+Q1tylvX6^syJsbLR(k)Kbrk^cijSZ zSBYyF^mir%2=PHF-mCi_wNxB*B`5qqKRoc3qDP<82S^_n>l~wG`1giK2It=){ddv) zyDk3R7Qe26{~fmU8C3&`ovsU29s4c%{qKx_V3<$jY}Ablkq~jfk3D>d%BmuEiN*b@LQsU!R@-6;f{O~d#kJufav7oA$$3+%8j*b~-9~66w zb@!h3u^n|{9BP2eQ&jBefNqT6QMwFxvS{&R&bRe^&2-(3Kv? z%8^mk_A{``mxIm4U%o_SP^N{g?z_Sg6dk@*u2xpee^NNYxv)B|IZ}UYQMZUh<4DH?H7TKvvu?M4KKc4w-WMz8R zMa6l^0Z=hdeK)1>q}wQ?ZinB+h=?Sv4n<1sb%w4W~Z($=KlNf|Nhi?0qFMmHNN0?3&@QFXdQa|ZtO2{-@o2v{jpDJ*JX?1*GET~W6JlK zaqO9P!xKlB@`F#8k2ecIlIqSKT`xX9X8;Vw#^FSHbl)8N_XYo*!9UmYFKhhYCHn6k zJZdfeH8THN(cd_RP@R_f&2!e!lV>EC--eKFYod-k84vzkBK!0BV^m*!M9WH9W_BOMx$&zkPr4yYF;=hK#csWDWj? zUw?mU2EIHO68`2W{yl!mL=kw8e_!(d>Y1dous`vq7Qp{n9RJShzx(rFBlLforRBI2 z!&33$^FzH#w;;ZIMos(W>W7DTb}Oi-rq%GWn-w-QD9KsFNo+CqT1ZyMKo0o^`M|Vx zY5V}}^$bYlD&?-Z4o)JNHwFg~vme7{ZmwrS z5Mt`$l})*y4$d{SNUYHa+0B0!x9!Mq*T~c0)&W)OXcumSs_k&A!XLGig0Hw5YFbD! zGG*8`0Ey=v3!*hs59Rd-DoRR@LuE$~O3D6qrf>u!YXR31D&kdl&;lhx_=%BBX+>n?PB3uTb6*--tIY#dR6dYJZU@0~(pe z;DRuQBi`+;w{LFms7tRDNRFo7b`^8_d{jx(&Mm_xwBWWC!Vo>t+gH9ICc;V_Efd6 zJ=D)Z_J|eD77F#!B{zD_=08_G_{QC&q^%vpb^SEbUxvJ|3`hhT=+!w!7aG)PcgFDZ zD6qNvRocU=liM zdGC=}H&*8Flw0GlVarJISiB>?OkIRc|6-92~tNujd6{38;KY7`x;RLLAf|O6#?JOwi0_Ye?MEV|B-An`x$|INRLupls2m!Bm*K7Y& z*GO@N(H?Rf=PA&sl&6uEiN8kMF(n$<{`jR;>|L|YC$ul8jGVpdn83?R`Ni5r3C_qp z8cOrAyaA#{gEl`iVL~@v!W9V!3zkGA=qo1NlNuf`;O)U@CoCC$))uBaJlvz)*uoW< z4P>jBZ_iFU0xb}`{F$ohU9ykN(}Us%FEpYWibi4&4_pmCiFBTE-%VHL0_dM5y3-Nx z2SFFPzj)ijLEhP0PIotxx0WXiOIjw$>egB%0+1%DC#u$788!yFyj?0&&a=-k9drB! zuh};Cso=AuG5Ze+Fw$*VC8a(%lJy{Ye*ER38G7cX>T`fc)ghu|v{2|x+KH8geO{SF zjJY@AtE6f^mD&(x3X%_2B8^NRM#5GRM#Du$5S@-ta}8I1fUq9(1E=P^cv;l?lJK&} z(n|XE&9RkbS3pFXL3}ybuV~;{1yIi;bYijqaX?SchLvwA&Qgx;c>Cew@~tIeKCfV` zepT43p`Jt;nW>~LXJi+aCvF80@VY73xL)ol3Qby)te@PP)tRZ%+3SebF`UDimn;KX zDB=^&iQNfOZN-iV8kYT-B!I)@4*$f<>2F^A<_N!Qii6>;^~m!Y)sC7` zj~6>6ESimvf@tDihbuTP>qn*#?ptu{nB8c_r?xNfs}XQXYr8maawMeBjJ;5e9cbH& z17z_gsp1}aZ$#zEz7g2d-K*M@3vbVV?%jD!s$RJxqE!N`by6Ic)@A=Dw&X)8bq<2O zqZa5>x}-oKvl9r?%sHfUh*uV`rp>M3P4PyS>{mPIAz0S2E53n+DFKK0zDbvwNvKYW zY%Dw@)x0|j%2XK}ME~{fG%~cA_rGI*pE&u|8*eHduWXCwgdNJmo=%J`Xah|@V>ZSK zfqNg_h z!(Dr73C$PbLT!!Y4P_iF3fr|3BfJ|pxjJo{ngS*5@H}l_nFSQegu8)` zdn1V_!33)MOK270_6`1Ba3xPH8HR2WqJ?^oxa!3G6WHH3rO04` zTJ`niY-!5=AfDO&3_;rY%Y`NiS9on>rg(*fgkF$|=Ix+V3nPain;WGs4sDsmz6r(O z-GS{)9k?2f5%+EP47ZI!UVP0H&&+)?N^_CwAV)dc7g zf=68@*cYt?#Y-mGm#_1B8{O*iGTnrWqM$aY z1UET9F?<#h04<@N7yQ`7%FXV5zq|W#jH{34pv{bnA(!o&WP?Da{H_Xk3e8BNRJ!xn zxR_9(op|^dL#)vvRxJ>+cm)LQuN#Wl>o&?J>KIN-f1Si^RRLrqv&p3R4NU&*#*PJ< z`s|SNHr`E$Cl>TIlsuUr(F4X{0G)jm^WNPKaj(hoc=W;NJ-BG1x?95hZS^Cuk52#RhU7eTae9bc+ue0;^#B< zN=i!QQ9@493o{$BMsqZ3j}vzg`}pflB3vhUS;WIGOJM7(X&9vNN!rI-wyrwFr{%o$ zv+JX0O4nc)m@=9}b;0%LHy31fATczo0yMdVBK0v&2yOM;AlkT&rJ06J_>(J>je8?y z?eaHrovqh5O7v2YLQ!wf>PNhLnt@Kg2kx#%w=VAa(dEQNks)po8@%MHRWpuO#8%tA zJdq`6_T}xf<5IbvYwu{NY;e;W%!dmK8YN(m&46GN-{wTC{;<`;MDLw?Nut_;Ql z%nQf{Vnuf9S8E(+mHH{04??7;dtEc2@Vl8P>gq3&Qs}1f!G+W0TMMHEPO%yln;fL+ z!`BSZ1;72)=YU|UdESdT#gNBAes-DIiHugoVLDmrn=5yF&MOT0D0Kl;<075CaQXyR z2(hpN^9C{LNeDfzc3vvSrNC#YQ1?fm({{K_U7+CNBO)XL{Q8hvLSpQBR;uFfrhKqP zVDI!5m%$I_mfE5N0}Ea@_hU$J6NIgTpT^zqm~3?)>KII2I4JZq4H&c1Yw}92=mGaB zVyJGhC|UJhWo?cz8m+a`{4`hg{X}2_#j@$Gd$uE%&r4+17aP=y^s78|ie?i;tZO~q zQ7o&UXQ(Sv%}C*r#usw zP91Ob0TU?<^eI=-xxrGQ78b4#w z*Kp~>FAR&%#R|*!@57bU>G$Hkd8#Nm(*79SDU6BRX%oIw-i0>nT4{aQ^tidYkEsWL zjQR&4KIOPT?zBOHJyK(K5N+xKw-+gqmnFc(b#u4yhV^NLD1K66OV8o?B_feB5$P?M zua$T8^PHS9e*9HVgN5Fw<7aMFp6Pw1 zl306>_K$UdESJ-;7$a!J*`sR7$y0M5giUe%VCX_5qLN+$C2I9HMj-ct(9aq4BV+Qt zm=aupw&2Z`n^{J{ggH@wxRq)l@c~!mSDu?0i8f(?c0J-R2%lsjVrpLuOJAf*?mP7Q z!1sC7Xh)_W$aC2e#9h6JP2>*}oQ-5J)`OJN?IQQ9*)$fL{3|G#URROcS;fel_AiQ& z$t&^k@f*udg5pc0W&nLz33gJ<-AqQxr=D75uU?R9sy*;7>>U%cL&bIpG+{kcPVcaN zJ;Fr|%zQBD0nLNj5VY#2z0#vdwrD}t;pa8>tnnf+)xOIEX0 z{3Ne;J>AY!LylF!Vu9N0QOT88epO8qHgoz-Y@)J)&S@I}?5%3P3u9-UgL*ZP0P#Jz z`F#VIogvB5SMBdzTfHJuPuuJ7A~zd}`eh~*1vT+V(FV&GuMR0zW`_;zBj9`GGlX7+ z`)y-;+h^Dnt~?uV8FBcyrmJc2j5-O zj3YYOQPsy(4>UQiP0FDpN{WOn)s2dqR#`CS@J@^Gk3%1&?lTt}S~lkbIOy{|ilT;v zxv|-eE`eyEP`Q0DZ{UQ7%Yv-O+Op%QS#0g~4$Ll1tf2s{%bNN1YRFaZ#NJ6j?e8XV zv@EA7lX1crt$DGNc8+Aer_?V3=rt?Sm%QFGw9%21N)fk2yH2A0as0AM>wp^3F`sA^ zd=lsOytuA7QZt824(Hyx>-c=PaGdp9BiQ5Z>6afL!&lDXgoT$pyj@d;>X}OFc9rsG z37IVxDP9_Yl-x^|V#r#aFXr;eGh+qCI@J3pLSjMQ4_}{N*EZm&8RWH4+Vdn3#Ih_2 zRHe(V`9u1$?0VL`cB(PnVi&6q@tYvXNUrZ%epR9IEN~33c*svKRd%i2=gVw{L z!hbRm9oi{t+rG$8uWT(GI6qOVsz}tXw$)?a{Az>FAJT(H6Fp)UCWuO8WO4nzw)Ex0 zGUltgqD>&fG3T(d8XOB(P71pDc`8P!fiTud=q0|gQ&m)~M)iN2i8LgB)~>{3N7j%u zai0z&!dOoPXkpuWMd?Wj^-;V!s$h>hYwzVtwsw6EP1L?XjGAr}vgDhGk@lq+Y?XEV zq|++L;G`N1LIKk--wP3}bft8Ys_79s?&AJ@d|;=z@tz)$M_2-!;(lGUw#pURv;=qT z>iIloyY{llx{o-^lo}j>Ji5Ij@j4Aa)&+g{nYAqwwmhT`{g=|)C<%f zUo%h(1~f{wdr~t}*X%3D-!+>#FkaJ@a@~~LVM@OIG*BdrWyhbH#CYHI8A95P46=EW zNiI87JHLrb!FZTN{M;2eayQ$;cJwz9JB)Tsr;V{f9Zg&e4ybRXM97=9UHWjD>)}$^ zYx8(zo_AY<3Rhk|>U%-xL}p@hqTh1c-6&Q`T8X<5E!KXN1J`u)D0h3-t-K;EW)StJ|i-@#GnQYP;R)Rd37SJpBL0$ zub{1B^&f@wrXb2ah}L=P>92>Do{oX8a2t*_Vl(OSOLaB26SvG%)^k%s8a*Dlc+Y?E z5t{{Cxy2#u8?FyTSj=*qO03cttVeDb|UcIgQo$Yz<1IR;ki@J8(aC$X=DA(H@3Qp9n`jq zrSj%b@gFRnJXarKaDv2~=KKc_z=7`EjEvU`hi6M8H$YkLt836txzLD-VIw%=m^RMW zZ;0~{|2gO!Q$-I3@yi>e6-SX%8;sjlU%Mqm7aL7U4qqf)O-}yv0na`H0U4OZ8lTf& zf*EdY%1HDLs9ap$Cy$8Wwo zzV?#~R57P?DE;NGLVE#@oy7&-2kbl^?dijKhM8EQ*n*-0Q(4)!28cB{W-&zDge^;|M#!x`)Ezz zc`YxX4q^ll?3Esy$Cv`;2vE{tbF$BWk*nzW1!&UU%=At9wM+|4iS5M-)m?d{*!#%o;gM<6@R`-lLn(u6p3eAscf_ zao)lvx8$)EqRQ2FYJ#1x=|6s*x*O)cgTYxv>|Vh;cTo$(*vY>NG7fTQ8z~rn&Ur0k zUtg~0=%t%R0`i-&96!`et}mm#zubdNtZYsxDXK|UgNYaEjWOVpTO~URPn91&R8%Zd?>tv$ z!WvHcWj-BlPy%GTu4>kWa)=RX{c~q@XWaVDbltPiWnn}ebfpXhib%Z>+!`18^MTXP zj^OqxXg$|T)R9p`UA$2*KUS#$2fgG(n}Hb6@SdQi_^1ze+k|eP<%DE$$lqJf2XjK4 zsuKlbj)+My2JNo5bjo`QlmIm#_}J;J9p=wV2HnJ;?{n<<9?W~!ckLirqw7gLTJiW^ ztCBoW=8>z5l-2RyxOoaxw$YKs`pecib)io&m`GmP4YB6mCGRYsRXu4Nx_ke2V99CF z{yVyv>*_ztQQi1nc&}mBy7Zmj)}iXy3b=s)_iE+SF8M%#MH!R-a*A(hkF$PACx>Gq zpi^Yx2g@CMp$mw(^{0UXz_@cg5<3HIKRU7F2a_eZRF{O^%E}}*NQDWFj= zXC92|m;W{0?Yn)U54WKsOsKXjBO9^XFS%&UY)XkW(cvxp97M!^ZuPf~K5JWE2VO~gDNg|w?z}aZ++K)U5G$|PHI@LnmWdWOqO0lP)(Yg0Vzu3`L}P7{*yaj z*Lcvh#dCf3iy~>uPOyZ%vkKs_X0dLEszkV(-u|y#`Zvu_UMw$nQ`YhJWjqKqCB76M z$*(!Li+*O~P1HEVZuVP89l#}q_5Y~Kvd#se%8wDikmIGCB^ z%k5hjcL=F2ZCRYyvnokg?rsVD}v)G*|2I{y4 zftpTQCEGwSVNS4=RPgL${rIcB9`xZisO(nGFZK4mPtGn2e;(eKS2o*WFCa)J2Y|KL zt7MZ7SO%pu64-I=elM~Jl3~<zPu}|`=*{XZ}`l}Oo z`2i~1q|HU*+d<(yjbE1W+bAE>OhS!iJl~Gp^ADym4YPw>idGNbuDx^%5YJ)!vr_dmqb@9!-~LFo*#Oj#ht|W z7;w$&`BMqu7B(;BUioOaP4a7{Xz{2EAPXFl+)l3yG;iPE@S*}tl19U(Oqy* z1yY-LI7`Pi=dow2h4!EeRP~koEC~x@#PO(g z3LsO(&V1*aO}`HCv|qA)Q2nzmeOVVJT(d*Ki5Fof&cJ&GSnJ(dLTd~-hgzAcB6Cuf znTJ7c*)`0l?FqMnKup{9V-h2Q!d~msIpwya=khdWZ@aJi!jgIJpeuL_!!Mlq>ufm8 zwaKDn7U%vV_#+n&cV|^>6)`hVr;xOqAY_r^N$7F){{{q=D>79QhllU%lBW#p5o7wp zGSd4mat8}^+VpV%(%xJW{WLm^faxfzgd!%XRZmjsj#OGzGy?4=P+(da&nB&0_0M`1 z$9;7fN9t_C@C=C=~)alE5=V8yv zwm^LURkz-M2?L1&f;5czih%BjBPmtL0$EW=C1inuHWvd@X7woe8p1U}%CV(b3Px(* zIQdcbB0c@=$ji%hC!csVPvm=?GLDY|1)#*g>vabP`r7INyGfWrZ#n8tj_N0}!RR)nn;S29N2TW5j|-A5!Zk1oVec#P+I-Sx3Cb-$zJD9 zxL!xhUNW72J$~C@NxOI-6yUvE_B8qKv)u%6;?1mwOGVG)Yy!uhK+hXYrO`A@x^JDN zdcyNj&kvz6ZxUblL+N(m1Nr?xX7aNXxYoa^Fo@tei{!E&>MJ@ z)ON}%mBz8E98IWhOctX4A$-xqSeKXtgm7^60pJ3_di#qd!T)xJG|zn9sbW12QN)Cz z-uwN8rLS0uL{XXaJ{7c)bB*OIjr8#E#{jb=ioKxMZ~&O^yT#@L)Jq@0RsB9 z55x_fi>oBv(OczHu{(~OhMuIO)Uf0rtRQ|+})B4F~kUP!jZSZcvx`P>h?zBHtv z=c@)EQg@1!j@LcRV8r|P9LW;66nvusJ&)?0$71P_4EOiVQpsyKO0Ci-Lw@#vzpj#C zovg-Q?lR$`Ly=^F*Gv>$W6~=#$=W~MW6z+yoC`+e59YhPGjyBYIlk6IKD9?$iDVY? zVoV;eJJ_Y3TYFwEd^?#v%m(|2Ma2F_mU7^|{T^~7$wOa1!BfUKc;DO$Y|sEj5aS0@ z{DFyx{oajF*E-&^G)M|ukm*PmvN0pX@JHyEWeiGY1XPOUL=!7g;mI#b5=9|*Qgh_X zn``&&Ju9_Jo~eY{jX%LwODO=cn>Va;gL!R$KJzTl`nq}!RqW`-Kw!4F25sej409lb ze0>OoRyQ8ZT}EZ$m`O%F^XSg+iOC!XV0Sq-kJ6B;-Uc~bl;~+%7t)rs?8&_2CA8H7 zIRkg4o_%R*I$VJdzk9l8zr3aGLbQIKwWH`yOFrmj?ppH28NaNn^0yq@-=Qi(7$k!{ z304b`0O4iyk8KRtNuE=mDIQeOBU2A7iFB8{2W=a@GUR(s=VlJglI*JAT_J`J-osVm zV35+x71cvFl;&-pHOeWh?N zEQ+|u?lvkLYd;95I@jPVgCN~enB9EQ=c4ragh91!G(c%-9(@MWyL1WKLpJmEBo5V} z?vR?-dy$n)v&9YeF8&}rJw0@R4tu=$YhK)4xW_6MP(kDqjdu?aegY)Kr}*~H=(0u- zzpKe~-5?10fbI$j0XY!F;s7&DdN*^QG0NY4Z-Oe-kf*3Y7tlR10%%~V&B(8Ru~db3vtoSz3wTodS_`DvEL|s≶}fg^ieU&I?NH_`4^xc8<^WSO+iOU@E7 z+#yBob(L`9W#OpoHr6$qR^3zvNp*zSgSj?4dsw2JhxXMzeuh!A)fE?7gSuirHGA24 zff5t1zVSjZUB-k1#RZM$xo(~JtYZ66R?tYmk>KOsB>|*{fdvU!%t>-AcTzfde7OA` zF^d$S)su}?eeeEtA*T-cc|rQ*849nLj;hUn%Zxl2IZ<#3C8O_@&|5?J8!V@W7B|On zpY~3J>ciKNq&uI%atLVY%e;_6fVER66JWJHfZKnt!tT6d^ds377km7qqX%poQh`x; zx8KR&JW7;zUzynR_ety8lI)ejVHQ8X#3XxIQ(=0USehw#0eLfr*8&q>pDFC#tZ4JO z06FK!#SMosk^mDWL$Gl~&1=^x8?o59I*Bl-m)W#@&^NpMf>HQ}$MO#g({f4*Duc>c zz0#i-Zz3Z#?3bMrb3a1*bKXokE={$yo^|a|rkc*ChHkyD< zdxqDWM4>IF1rKt|aP^%z!Dlu0TTfcg^($gO+O-q^be_K77)Q)Juz?ztIoiR@ELkm= zsz#(5m7Y-Ms4nU3mx*VBLG_Sa%rN|>Jkz}A){0{Y^J=cK8d(Qiqnb%p^Y$JY8!o4D zS5`$OOV1o^$;6a2F@HB)5AGsgv%^+Kh`U&K0a+ijQs&1Zy_=-|joG&)nPy#@-IzSo zA(CpB?8T2YP)g;BPe@>)v(}aHTMsU6U5Pza-u^5v{Y|=4qdxEBC0% z0?4n1msZ<6*_td3e?(4k0}Yq-9+=4y7ezK2_dh7iZRQTK-m@RS(KeW$j7PRsO#_`} z%2e&Pu!Em${5$TSM*avEl7A+P8BSXQwsgYxLkH+kT9iiD@sn9e!tWm=g?g289(~=&nVg@ zy$7jZ{;-Z0zs57QPTRQrU1Yv|t88fl*3;hJ2_bNn6%DAk&g@B#8F?Cy&_7)ABkg7# z4~B@1cY|h(HhuMX`*<71p-U=`V|K~Q&xK6O(;G?pMg*z_Hq|K+>%kiL0FX!8A(@^N_kj=B`oLZ)vLBWrf8^TLJ?CxpKH5L!3l z1`~s<4_b-I9VhOJld%*`Nfx5=C`p1dz;X;;pcqj8*-zT!bx=$tOfL-3u>smqb3QCaU(p)%Qe54G(;K+8$Mky=FZerKx6RT=nBy2)5&z6k922Kt{N)0Mrme54DG>DwaQW**`XZ zBOqnp)g-apLOZI11B-p8JKWt%eKFLkE$Xruj;bS)34r1vf|D~na~!^9Xa40YTze?o z+$}la>E}5^@eK3O(iZW(daO`8P5rVs95I)`Tco(@a@CEWrG8{O0o9+i){+u#vl}jC zXdFv3KuDI^-X~yn@o&Gf3-z?eCW?dB{O)2;ABfEtlt@!yuTU2+qtg%kRsS%>EEJHY z1|9%%)-^@eD~b$CBU!Ql1yr->mwe{QCwH zmiJMUV*pP*Y+wP2-BVr5IdFXdqa1B*+DZnl_+p^uaHC5i*lUMrMZ_EjnHc`4AR3#8 zAQLf0+^L?s@x;-g-nC}`kV2c0F@)RD8Tt|Kn|C_E=iO9Pm7t5H)gGy@d7BRDV+UJG z2Oh*nfz&pX-0SBUop*Z6@Y4owv01=mDM!BK$7<%)HWw$KnWG57P7vzWgWONqD@ubh zfkZ7)?5bjl%Y-3BRb7qiu@XgYzSGKEX~s>YXp-TL8^rsn+vjU6c*WIukVj@c&78 z$vG2F`c_W}Ge{cPUvD{Yq4%clWw_l;A*`%#dHZ{kd%N@fiPjatNZ#Vw!Ogegqb#ej zIL%8;eBJKH5~_tb@7(EyWyywt4)4gx%H0$<9NMY{nih)A8}DBF3Q(+X&);i6uLfnO zG9!?x`c3u|j|qOQ{;GXhd%IrQ0Arq4+_^uclH4Vz(6OxNG=o-_=%h6NBP+m^zp z=;iok@1(2W+x7kooB2NQbu?2>tgCSJ{)1+`1@v8p8G#r|Y3U;03?-3XlmtFE0S}iD z=Br-9vNAJALQ)zRbI&zcT4p-k^Vc|>*e$u@qV;0rxp+H0>7kfWDLnsq0ZSn^Ut%uq zBWSF$?cM1+`Y5TID!_&a3as>)(Y6h^H!h=*2lB-!a!~jy)UJ13vkqu?aV7PMjh${x*?Zu34__HUKuIX*SBZCxDlk>s zLIWSCdj+_#-7Qh>NvQL-@I1*+hT38)`}6Yks}d@%^!Tfv5M*U6Qd1`a5ft_1eluLo zc$?3N$eE4;=|%0XO39QH7lUf7QVrVFehsAGc#`H>ho)HhX1hgO?>wL3HQEY{BNPrU z>FD^dJ8KEnd3)fIEXvy?2zwHzv&O~GUwMXcyT$_Y6->^5TY_iWj}}QUf^@XEJ7~Lj z<$VpkEWQMV6wCAs;6xsjDUVebt;qL+UE%<3Z{#(m-R_}T*bjdyro_N==idXTArlWd zJBNGzLQ(ueLIYxIU@BzGl5?s?);aci>rbzM7Vllap@W*md7%{ZLe6OA-a>EX8e-2p zMRW6M%3T#U__9@hj`MD{C_ln7b^6`A*P!KU`Nb;hszp?uIcmEfP}%L{%#cw{ozr?w z5c`_<4*mKxm<2p4zqT{0V`$OSl(A1-cjJE@eq>wHZlE(JBG=ct+|A;=Li$EM`DtSe)g+ks@edkDY$g!JgooigOYSI_s< zzIp@7m-kyJszfnGHItT7$!O{M?eB@57Zm(NCA+&r*eY$ukW!WiP@aOtK-OwWBBbfS~_GRS`<0>#~u7 zTW3T1IlVr6tnRFSZ%9f`=AU45km)D`f+mitOBF@w8P-LdYfiny=sRElh(6lepgruu1Z-~F z1cWRa8j+CNvz*&)vX`L?h{o@v#H?`lyMEI5s;d?jvee{*9`(N}^-$#x+&y#SVoK7< zKTYv{ty9+QAUW+qL(tTi9riw+(yjg@*>n>+Z*~S;54i#O(8P-dCx&chXg;8=6vH4f zdo6C22D^#w5#P$L-=@?Cy1?_K4cM_b%o~D`S-=@I(};XrJNh(tf%d^odLETm(Ei(n zrQCP!e0F(D^QK_>y9LCsv36=oXKN^xx>tJd>5?)=rv%LVru)l=ICU5!>+8G~U@~He zhn_r>CAwt~AFV}u=l-mbk!%rIarh8jrIvU@#74U&;K|{^mbHat_V5;LN31_bZa20k zk%|UO{9{EpLoL5DRlCk|`r=Pmo7`v9?5DV~F1KYMFqA(VATMrPvGD3cqA05AV7gc* zS6voRtbRUuWxw7fADkUKc==7BIPI%3q#Y1ZyIO-EWObdSU*!&&b?f0+Obi^DfZ^Vh z!?x^sX3!y4eIk}qyl5X9v2TaPT6{HEJ$QgK8J3{b#0h8!YkXnJz?Z{Zuu}>Geelu9T=nVG7pYf$ydFu;q8L7?1!zsJbQJ$MNSTI z36Da}Jl_xA{gav&DgEfY!}*VDpvaG7nyC8{ex+$rk)9#a(dZ`2aG{kfhC+ZM3IaNR z-D(Md^9eTg^)k@SA1bivY1TW}jk)9ofS^D>^D4xRR5&Ti=C!NE8@)Jjm-_QwPPy;c zWfXxB@PMmkN*4=sM|#I`1UMMLYyjK=hwd<7x`r^$umZ9{#gd zq$(A$fiWVt$KCl3ko$4b!0}~t2vrbJDmvl(rlOb-yC8S5a$9Xd2)caALIy{{T3(Pk zJ>Ak&d3_7>{yNKSfNIVC*aQ1+QAa{3rrld+_rat$B-apbrl42m8D9m0d4dL!A1fe* z{FZT9=`p7xq5WpB1q{A(uzj^`YUN4wd{o>t4dqS8ykG19hmRx%Zvur$CySh{O!D!+ z)&bGwKPY$CfF6h|Fpe}R!>kzw4U|s`HpJ@4VlXxujSRUOS>;Z$)MqMP-+A{IX?ANK ztFnlaGTs$Jq{dG6ZaNYdf5w?Nmq)u6tulEA-DAJLW7C_;#G_h6YSW|=oxOerdLC^$ z6_~XIgl-IK_ST_B=8`HvOQcQ;rb-5h_vmy8v_A+Ha#vK}#3W;${z?@{-`Qm6Wtg-+ zAcd+!O1u+F#Uv2H7QR_7NWTuG?CBA^_le>>qi<-TuZCTW5BJA;1QvIH%q@gMtuVz! z=S4YV8SUh7$P)dwrCxvNPAW5LYvjAySOL28l&@eT9kvwNCAb&fIJm{Iqzk|GjnLb~ z2RLt^%((RT`jrQQh^Pt>X6Ruct970xPYq9Qv7iWIW7)30*vVGv$0G>nyuL7p=Z~yI zRJDhTjdJ3>qGT89NdUgA6|%zzKMa>XDZ0O0+BxjdaZ}T~w7LnP9GY+2Fj)0}!0`J3MUm&s%-pM+z0#bF29)uItycTb&?KSF7*rIGGsB{f#JfbdskpM zyRync-&pZQ2Sa|IgfQt3EIo|MYZbY6V!45LZYik{xI(<(snMt{X^xL=@90@y=lbeA zlR4EDOE%V95{=!My(+|4o+}#u{qG(6O>HXEJoFqOWpq%ynQ#e0;f&(KIc`a9g#?^> zp08ClCy%z{IQVP<}F*M0AM?`vO|cYCS!sc#bxsQBr)wPWh>0Wsve z>)ny*vT5EQHZ)h|EK%=YgAs^UA4dD8*XH*HlTwsc-UYeJydik+-4}E24gYaG)Htv@ ze=LUgO_@R>_ca)jUuaD>EBZDA$$=^{EuH=xl0jkYBwdxoC*sm_hiY%=d&ZLP2v#ZO zfD8%MzS~WCxp0H{`{7Bqmun}Vp+T6*@N+0Lx6Za7bl;*5!&0ND*>^`~Hpg-%+7dx4 z4EUX1X`4-f*`fEJ+WOD}o&mzM7KwE;qvIDRfE30}n51i>(UKfgZN>MJuhE-y;%hMT zMM?I9(hE9~bxXKZ5%)y8jx~|xbgwf1f?eWD4o&BT=lVUxX6B7GI;N2tix8sCF~9rK zxI4H#({q$b_d>&lN}auL#$jd?@72h@7nT13A39Y zGsB2gCKWM>(ebtSPoK_<+2lCx_lmSlPDyW?L{1|oCrNS$#*GEB{zhA<>oQV@idDg2 zvxx*n^NbUq6?h=hHNS8jZB=Dcx=)P9Tc?JI)k?cuV!JVG1HL_*C`Wk?ND>-s4^f7s z?G+o#@2BB~M1l}8MjaL>a2>hh7veG>-s6?UzrS5ipBq1}(fSD^6kYKmvSVy%V27EB z$u%Xp#;2qtf-EIDCpMhCvQKXQA^Xcch{WXZ$FXvY9Hc1>A%7IFXpS$ncgdGX3Jxj@ zI3jlN=I^)@dOO1a<($z@Gskp$JfJb8=?ZAf7&Xr*!@rDELnXgG>TlM;&HH!P7yuFo zB$x});L9!dE?$5C-lJgT)fsrr)bmdgt!s@G}aVPX^AF(_)0AH>s6EpETw0p@}Ry= zqGGx=M+MQwn2HTZA^Mwyo413UHUH!SNcGzrPtwlP>_;ByV*Mt^m`d#dUUr&Eqvc09 zG{!7&a&=&4RBo7FK1$lSPo_H%k|b#IW#c4Nf3n9W+uDFpu-Pk+wGeRPZ#ZKE#{C6> z9B(4j*?YxedzJCiLe#5ntIsPqtDUjV%BP)t-q>D3<<*t0wTh~T4teFiACJx9Dr6SB74g)#cDD3zHw`MF0Wc0LL7J$>C}!Uljo z%jD5d2=7pER^IAElt$im1unW*aQBK&NpNa4ns5&uf#QDyR8PK)e7i(lZs*OAV&gEA zTN7&LeIO0V`h+w)bCyQ_c^#VfdCZML_OBni=6ZQ@R?R8C^JI)^via@=g;qMU*=BgB zQn4MDAeQtW&oucQIG##3mw`v+M@*#YrHh9a1c|v0wM^WlnRZTG56xfY9eWnwjFwsM zl`?~Nxz=r=9M)OorR2XT>#9)7%}3~`F`F5q`33C7dQY0R8z<|1$0P?6>3Q|;Z6a|( z_50a)jy@g2;6$)js`dz?aa#`^F%rlQK<7c^&HIr2y)2h?kg-9j@>s#MyIvKN^E>c8 z>}03oiLY+;p@lmBLz{T<@V56mZ0&^3BpyyvczmQ&8SaN5vVB-u3)(((*MZrTgq>{g zGhAtD@%m0>GyFB$rZ?|d9DLk`Ic?g~_JM!_mr_=={cNMu=>se2aQb3EKWw}T@ubP79J8D&aqS@y|8!&5vK^Ec=wKlbz>uL8XiNN!m}03 z5hXLhJH2Cn4DD4nwR-#d-{C&ayq3On`|Va+ODo%nva7t1XLKvl>kF1s$BAwW*GQtE zVJ}i%x72Ov(T)(?muWkLjWu^&v4O<^J37;zQ4N4RHmY$*KnMB!s54f1x27#tpC$xR zIS>Z)x0teu_3|{_T29xbEhRBIyJGAi56&Py^!3Ro15y&H_4qBV70Z&n>u?S_2MQeX z`EHFN!YrVZ(AzB0XXi)FJWNUb%_@F5RpLa3gqx~I*Ta2EKawdkEz&wq15R!aevQuv z2`#m+HU7v%FFNWz?5lRB$ll&wyK3An2x-?o+P7d>YK)z#qUU?~lq}Hbf}R7r1;1gL zq{J1e4VH?EhKFUQ(l?YCvHRmjD0@6~yxQ6q3Vr9B>1M5wI#I+ijn$h8Q_U8FLu}2T6 zZud{oj@39um6#zrnM$sCMJ5MacK-M{$3dE>APLqEO0?v1FXyk=^@+PL_r=W)t#Xt! zZ3Po|pcbCY)pdMD{}Q-&N?0oX8;o%(=!lFtVratft|}-d5H!b6s}2*|+Z?&xEdl=h znbWqCdBbXisiTzr@|xs89v`sLWP*fTqOQ{2G_Ed>7dF(8bec-FY(L-qIEu+hh^3ab z+YOSXM1lOh71Y8{`*z_^F6GOnr=3%CTMIJ-Om0>IS#F{8*a$NcKrLObZ%ScldzOsM zv9}-y0}gaFFTBl~MA-Cq`s~JVEm|!|H1BQq9;bJ}mgWVEE zRyD29n9b-i$Dsx;|Mu^zm(n%+v*Ncs_bosRJ;&{8Ipm&|rb5%Igb}&(9@Z&QtiN;K z@+X#rrd3;R1KM#tt6qFhxF~;9D_2T*Yn^-NU`X0H`~gg8O=RbB?gD+5jbU)vJL>25 zabM)t@QTW|cO?G<{`BU0Q0BzDI$BZVVK;dv5pigEY+Pw7={n8_R35v*q#eZCzO{vG zjEm;w1xy?36|2ybnRNng5F;;myySdkqO)LAm+{y2% zSjkd&RK~gd=a6%1zqqtUXREFaG*~HsHY$2`-rpWp^FJ}?f(Q>RQqtMwd`eF?Dc;F zj+qToQ$dZtUmHs>+`h{Apa1>&>e^Sb zv&i4IHD3Q`uKx4Nn@g#jAF&`U^iR$CcrCq zy!vE~EKgs!+|NI9jq&UT;UN3|d|>rWJx=*3upGz2$AtOceoWrpxs7Fym>Xx2es8(n zGI}ndhxYap#X|lZCt!*WR9gZvh0O$xNd9tM!WMzQEi+wbQ`!459d4#AM;pSaW-+|& zG|6peKA+$<8EEXJgmrHmMSU#;pGB5j__^w2LFzQgAGLtO5w_xidN}2@RPffV${!rC z(eq@r28egqh5gf2&tJ$yzAq;6{|HMg%5yg^}hU+m7< zOnPm6W4hj7#z*l3;Va6ZcCsCpN=Dz$_V+YwFZ!t^VG}gqSRsAoQnk{JaL0 zOv(0q{l-OCXV>SudNxWf~vGBG9-YeTCX(3?3edOzZ;VkGf=*tE|_xfg0gA8`GL`U+$TiZ~w#< zd@Y8I=k^HhF-`61d3(Gm&7A<(OgT8@77!Wn5f0a#&a{U1wf z?LuxIRua@**Eu7aEX(Lz}DzVe=d{qfC^(Ac7iU{$#5J!Agf^CMXj21U+SkVEK zwYUkwiv)#WUkm(Q@33!exz8(ZXwUJdIMRRi2zL4bj^cdZvN`R`Dd-%vA+8Ky|Ik+b zDCKh!3ON!D8i{Jh>};{zV|f^_aU!c5raiF3iEjVKPj`hEO4}Ez?RD?7$u(6BmnA1_ zuZqdu^Nk@C3wm+MbGhVaBlRzX9~6HXFuk!!32{G<%Ktp$ zA{;RG!QRu-j(=~>H7>xq<}Fw+=Kd!h@b6ba!MsZ4#pY&SeawGvjoN>^_}^Ll??V0W zUi_PB_+QM;Tlj*}GrgZqsQ)pmN(z_9Qff_VU3KEHvO~ZHS7^M-%hl9Y_rrthyk;c@ z@dCP98i@in@bJ$GkbtrJ0kgR6yUM)(pb-__MFy+ghz6jN3uC+vbHz^%h5#X}u&c5v zqE=ev{7hwJ)_-%fgUAHSeXus;wf?qcSK@_?Op6SJVEpY};UNRIhGqrsT7Ng4=9w=> zSGr1-lGxmeFBGkc=u6OUXpHc8d;fg){oxQQ4N=;$gQdqkM3`G%zcT@4nj>>c`!L-X zpo{C>s$c(R&aWv_F4(*iLc~G+G7a1J5;=I__+7gQSJZA~1p7&P|6&(FURA4lb<>0) zE#>yJ*v37g399XgL`XpK?CuHT`x?df9~~T%R%h-HHTga zjD<4_CrX@@C42%RxRTDs>fH>2x$eu6dT_0j(CLun=YU3@dN@NbM=koCC5n|Fhav?b zHv#GDcr$t5+jv~l$@>VlBS50I!4VN_P(%`oJk8&5e?eAywa@F=%s4%9! zIZjg_2FDFfeDUuL(^mn7zRq2#sc%C4PT5b|E^hZ^@rX_Jn3QGT5ASgAzZ44uAgn+1 zSKFhlzjv=%cRl^&&p%n`3UCbhJR1g8xle~+dSLY#s?!agc>za0LxuVjg%CuBSB)4} z9=Bl@#Uy6iNhwSP#VZ`|TP{YRbv&1v7Wqv-MAy&IXaI2IY3C-oR(vx6o5<7632B0T zKvOaZ#M3Ae@7eUG3d)qOc5qDoAqCe2EvQy3O1~_cem(ZzhvEM$^F2zZ*$T%KEh_zD zgR*GaPt7s7d{B(NK96QzR{|iocFFDDBy@LZN#9h=s&<%~deZk?1hq#E%QyWm)j`?5CEPES>(b$H;tm(uP_UD9Xz zVn4phF^opH3Y`!pSln>_G{ne!vDuWCq+|5(*wvGF7)XN8?j^S+Y+$Rq9Kd^(O^yVr z1eWjvN~zS)BS5Jc<(V1<`oWjhrg>ajHB@d9QgAwpmP7Ua?UhL23h;dqXTxBf?U2By zJGue6Du5X+H4&qRmYUY^X)X?AnN-J5ud_AU3~pl<4Z+e6#J{gle>>MND~quluUu51 z!1>b)%mNg6y-frSyK4@cQ|x{RrvAJ1wE0#3q)YCBiq~+Bq=3&27N1l@kI}g}1P@yv zr+5KN1qbCXV;e+vqg+gI&m0qc4sYGUbYy+7ZC%!vDp9=Huej8S#5G=r1c=--yK$aO)Vl16QU;`ed@;ojINq>{m|n6b|0_%$=|E0;F_ zLlvEp%SXH+k5CkqsH}{ub)MwTPxDc+%i>R-O@dG-%=i<#fyEnAocwE%^~I7G2C-pW zf?#4)n#Ua)sezG#XLR**A2stlVH{J_?eADQmNR|fvYF#AC9SpE;t`I}`^anT{^CHI zZgXv5WFk$tc$FU?%_d0ADkI=na-Td<%EK3md0Btq~c#up2Xtv`=4SQdn+h~ z8?*w_Wc2~-_*JMsJZ!zJSsg{(*5@^Ad|rDA0TS(3SdS4dPcTB8=i$9PeykYleS*daLTA+pVI_W}6Z%wO(G%z35^g{e) zMSr;!0~f4E<{QbnmEL$8+oSO_eQ^|Q0UxVfG-=iaj2hL*(>&j^p)1&a2vWdaO#^;@sk zD?Ja-IACIZarOKrByKkzEF-_{V#lT@4L@l$Kb^0g71IsQlb}JePIosgH9YzDxs!<; z`K^g=Tsv1bA#QNSb0#;aDL>QajuZZYKRaNaVcXw1qL46qP$d_9 z+{xgKqYJs#PuB_HigE6N`OxPJn?|z%%T1`;FUf0N8dst9du!ck&Ty>^&U4aZ32wbt zc8eVFA6=VSmuJpGzridx{7#c2<#I6Q6#Mc~t6wQK?4CLpXU+Tcx=ww#K-)f~6f8B6 zH)+qat0TYa`bCm6WG;0x&SZ5}xxdQ^Eo`K$HQuyY@pz={Q8$&-(^^+F0LvYbVwa3V zW9~XKIhgZ}?u;)(h>|159thNSEX&GJ0w5ayn+6 zx&SEbCZ`3qa=DF2NU0rMj5*Rt5^fG0Z-%oYg46UWZIa9e>Sh8khA@?+rmZg~#Mr!@ zwj>CtR&nd|yfq%)o1HT~o(p1<_1rMGO~>fwwC^toBX^z@8dbe^O-YxAw-34z?ejyO z^NZ6H);&ZcOub5({GSbXyhmLccCObykU(PO4dN~a(ZGS)w9nO!+K?#HW}q4XuoVEM ziuYB^Wu^|^Z(2<>aiJ=uHyERuyS1OU3_@-n`L90eFgHKtnxXY)q2Iq;m0U_wPb@W) zp4LggTO`W9Y6ZOafhF(zJ&kinnQB)aeRu%QRETx)=~qZ#m?Z1)4jS09M24Y&s>aj& zwmB_aXz1i%rqG>zHM-PM^Wnm=#*!nD=_(EH;RQ>(mp{?6rFllHo11rf@{u zjUn!?-3MnD(2Lph^psyUG<#-rui0wLl<>H#$q^&uyB`XP>D3#Z?Pq92mR;@!2xsnXk0&)|ljo z(l}J!){W$)-;l9w8FwkyM6^=C-S+`Dz}|WQ0=3#Ny}Y3n0=8@?!tOZemjKBK>66CO zWX@f;pAB(J!&SWqLjd^YWBO>A48_$sSpQ}Z2F{Rj+}*je5ErE}8<1b$dkZ2Z@y#a% zl`LcXTl9rQRZxbX!-J<4va@i{V$%yy`{mWvOT*K5lEXBG>u=o{qwn`5EIH@5`^g&o zpiTqmD1W0yy%PaBjkLoeWs1_hMy1k2*S~h(y5;`r!oIztm*j(zqmb2`F;b?1&gR`m zRjo=%Pz?w`esaW;%*&eg6$1&x3s#94cW)K(Q%o3{{l#OeWgdzR#lAtaJ|y|U$S(k|){pZLg={2*@!WIPTXC`C4hD@Acccfw z*!0m?5P${|sm)Wpmn22cZ`LCV8%B>5KZ;vECRHOM48fl107!nYPT-O4xB5*BZkf>5 z-nuCvucO(f?L4M&u`4TUb}GtFsmfKYX(+b)Kx9aQGV?O^Ga~%Vb%tPdgESBvH%=a)7m>r zB?X)`g0YOk^4exROZD3yZXz<0qwI;@f3yas#xF(oO2a~wJ`JBb?^sOHe0dSNo=n8h*Q7iuq;E`5Tp)dgV z9dT**c+7&#dMDp8CV#UB}(X6K7hB=Is@Um)wQVY>Tu7H`|mvM-14)F#D>dop0!w#0( zBUyxbGM+Y8l|Zo*s3rGiSU~NBs(leWJXk5T(_@(&oil)rUk1)iYXaPnSlX?Ghj z@*7GNBk}|G#0CIsdNAL=V~3pn;kze3X?s$<%hM$VFY-*kUM**l)0!cWintWi(wE)mJ24b~y-Z?! z@sbpKa7Ht~aan#a5}N1^nte`0)U4+8!q$c_AVij?U)%q3rkx5>JI%VY(NHX$B$A_g zo1aDe_Tw4vK(!zoAXc8>1_Yq%(c16K&aB7Bk$n3VJ&a*jwKGH|JiscHo(n1_;XR|@ zD<>U6RmfvpM(B&AJsyUVcM|3;M(X<5?>EIh&d{F`uE~!XLWF(TWI9bRX#8-qLCha$ zdph)Nv(vb!ENu118O8@USO^zBltR0$kn_Nd-Dh$MK=IxEy|z4Dn2Bo>8VQKf#t|j0 z{-WK{LO<5zTQ}u4F9E-E-{)Mqag9SaiQS)jq-fCR%Y9KgUQC3pj<#$O&c-1j0&uk& z)g7}x5BWLlhsMlI-&4JGcvuzX-@T!gR3>k=72Aw0VY0`W(4Aeeb*V0L15l|QCuY8% z!1=5R{pqBfG>@S$y(S;e!Ap%bpSI1{W^uW5*>dU4fQx0_lUzXy`Yy^BzZVBkM&@u~ zFM}PAt++sGw~e|C&$H{^QAGk5M%{bRa0bD?@~aaIBa%&;pFN{)DwzjGIusJn8`AF6 z5;}6y9Ui}(yae3md(bjf8*d-XdO`M?J_Y??j%1&@!PU)aWSG}OJuq|`Oz5by+35uG zzTwc4X7jHQ(fvh34w*&8LLJxXv_A`f|K*DPmh%D{`nyaN8>Y@Y8;Ws~ElF zz0)E~51vYDcA6)dq1?&SSZM)O5KEtFLRCL%fKh6-BixvuMzZrJovS;)SJ@2r z2~9_Cm5!div2l$SMQ@{mAW`6&1?jWT3WiLIMg5$_0>a}$-v^Gox=FYutlQPW+H}7? z`{!+GT?xe<-ph~$7n1>Qy-KDWvwPTrnaQwPY_dWXW#i}Jq3zLzL2MLuL~PBaopl{2 za6DNte@}G>{jlgEulv0J!mwy~YLIOk3!-^#xL`{_BUK%H>m+j0RXuf^v*B7;<7P?% z2zCPsoSm*!KutoAROYxAumb>>qf1j*0cIqUYw?L3pmqP6o&Mc6ap$N&f|JG-EmH+D z`b+zN`)&Ul{yM-0`kE#|Sx26Mh{M*I;hjweY_>hii`8Oo=pqcazO{iM`bp9pVh;K> z-NMbEF(UpIHzjJsA1{IgLD8zP`zo5fFey6fTyHc0Yn~bhoS04%SCkD=*~b8I|CWUa z+ICg#F`t?ffe6WS#^nhhH~6(?H2B#0FdF_emE##ibhV;fR!~wCN}ExCmfXw?|p~Pu8k-JTknr<7_OxAdjiGr(=?S zQXseA(nQPpeZ7x)K?aEI*uW5Z-c@(~4n+QX&w{QP`z5{oO7%v%h^b8gW+AdHnji zluK{LAXCk1_UMYnqEeFC(_rwyT5W#$-@njBCX0r4{{R$_BEgkn&W2M#!qiXcg$*87 zi}_%4_#eT9d~g?n!DiI(zj&N~L{7GAPWk^|B8Wg%s1e8(H8TJ88uN#xTW|!@QwPLt z-RFNF&Qqrmr&@&&qLXata+`sX0&|7TsiyxX_1v?ClS)ZP2(nZQ4cd(c@* zao2~kt)wZoV=86zwXNa38h^VI_&^$YisZO`y8Ai>*NGb z7Aa-q)_?JC|9DcUCNQ<8ej5@0Gz(h5T8dqDc<_$`S|0$M$$x+HzXSW<<@5i%Ngjx& zT)g{7AtRzC`w?s*LASZ9ypuB5IefzHS^wQ!@I3PjDEY5=ffk+zb_jJ87q6(8&!(cN z-`=-@D(O7`yQ|ODdIvlOdgOyx%rdV&D`z3r#>zbEx+kCmKZkgC017aw95u{6J)PR| zmAZ(SC_>k)bn-^E0~c84CdQ{*S)k>;zA&N= z)R%f8tZUUS_)Edy_oDU4r~9AJ(NZ)P*jDAzKtV*$dMy|}3bFCd!LVo0-0-nl!-MM( z+U;JglI850hQ=Tv`Bv6Rug(?r;~13cXt&fdjOARf4leI8BGBc-Gpe8MpBgAAo(g(| z$0Zg72{lD97d&pZ&VMMNFDHTEvrTX#Blz9EtH87i!!;hG^Yf;lZjQ5+R+&taj$u># zhtDA*EGf*-AV;$oP}VZ;<|CDQHO5RvRX*XaVA$*7EPT*HRf^>%Q7=(&a*lsDT({?^(8YOVF`3Dj@+;=nPBdb3*8#Wy* z@A<0t^OOh+x~5c$mJ&WQpqrWhQ}p};l+r;N6kp7}wHwjF9U&78gF3NleXd=f5)DD@8kROfvAgR-FpCi}nuSuDGH$6fkK~7f zkfC0liI+oQqgJ)lxUG`q} zzDAf%gNhP_xo{DgJh&**CoD_;lRQ1QkGj-WbI*FGD_1|QP|%&kM>WHQ17WFRsnt=r ziPS}%y*zcX#F1{K_ZyJYbcLounm9zI_t+${Y-VMz^dS;@&fd+O%icIYHf=gD#<#It zE?dwyV61<;cjrE8#9D(PPRB)Fa@caFa@TE|KhAG*y=R`$<0%;zEq}Jam`ZTEJMDW*FDEat9l1R~BU9xvx@fBk&0P5KrH=4ldMPdOx37u(91BG}hF zzWL8~u+hDG>=OK;btkVLr6^!JFHmN!Qto%OmJF9K8VDg}1M9;% zyFqRaZ(a|LX|=~1l+E(7gN(0nfM!iJ4V|9ijGP_DK3>@sS$?KB8Io&98+i4oj{xs> z5-a*fd=~>^sL?5#$FZ)l9Z$3PY9#oB7@NAWW;1C%IwM67jZudIF^%%j zjKf`m@p9vC8Z3p-)yc~IdD7LrOd`8s=3pC7UUp4KQ6XF$Xj0|8lq=2TaOz&kiIj?H$r-S z&Cpw_S2-~|E~{GjUzw?+S%cy!q5dwx!mYz~nwIG6M^Gk-@!7^LJTIjf71Vh=hCx?z zqy1vuJ@$Q_`V*G%#?D#W*wLHlz)#%5(aMG%;S*mg$sf7|xWDz2-9?z|rXM8mOLd+< zvJJlYMxvNY*mS1Nf1j}Vov_wRyJ%8lfs~Wl;Tcp4m9kgbJo?()QEpFZ-2vuN(?zon z)$91Llh<$5GC#kPf6^0j#If|@Nb4H6fWO7)tYhDGfzSe)Dxwbl`Eo``;90~4gn8a?1J2A_1N>{<-)=2yE@mtNCLSlMaPG;+!X#UWTp{zSVuU%Z8|w(Raqn`S7#ylP{mb|f{lb9#UH z6>HwZts5xkCn=q)gxESNW8d{W+U5>-_BRk!|C6Z4O)Uo-(%%NoETn|X)?+@xbjCc5 z>MdYtq?Pltw!udc!|QalQN8-Jl+Ysx_XF);v2EXIzZ*7(^5k=l89I8NJwtC?LU&2= zfwp+fxnT7WD$TpWXIrt2c-fx2lh(poq31mgYDH*VFwM8kj;YUn@__Z)fM+RQ->3HQ z#t*Kt2Y`i|a0h%#ZUHt#eoFF&MsJ6Kbrg+l7syqMn!4qA-H3T2`Cjm9m3?3*Hl!IZ zKTKX%fhYdv+Z)*4H4jh^fZ4G%?p3MT5}CoPNJ!#2Mrr(fz+>i$#++3g?m!;s;tbDLh8q3ipeWOnDhMXMxoj8hHCo%5R!xPQS(rK66CtwU)gXW4Qm= zPArw3s{Y(NiNe z@Gvt0e-3JBifKhO;)rIp5`OI{)q`Dc8P*@&KOzs)tYM^jSusjErd(2tMP--mZ5p>XLW~I*}d^W`Y6K( zbC%3f9pS%qH<8S*e%Gwh8nb@8M;zRvG1Q;IqJ?dF96P$k@oU(k^V)Alp4?0t3GYPh z-o*>wNxiivSDA5H1hiM}Jvazg&lqj_&dplCRcQ(HW^5Dp&sud{UC^e^eLi?YwC8M8 zfXLDHbLcuT(22z|>!MIby?G5)=7*N_Iy&ZP-!btZ2Mgk%u@%v#VRpFjA~~PQ%p(_Zj=Tc>s3jl&W31dPU#Uc{gm+`kHt0Z5bRbW5 z_qKQ~JZ>T|Mv{qI^T=2H*)|ZWRTS;TS9clKi_^>Hn^+b;gnu}+OKqGv z*sBh8lTH(6eCop;4Eg4@Z6#w&aJ+(VW+;0> zbh6H=MG{{>4n0Jlm`!U}NTm1n67;Iq3y!E1p1R0NZ)EXhD_deGv0Uyn&*R9I18&*kheW{+rs!P?^vkBr6K>u!%g<&H#W}PYoKrt&z_OGbMKid zax&*;MJTYB7?SE}%AYB!l4nJVS%=n~zbt zWiDmk>W*<^_9>~l8GXdm6ngj) zC=g-CM6535$@i5RsarHQm+_5~SX}b8*Xr;bzLo>!gOok=nRSr4`2tFX$aL%vSiqsz z_3`8Mn&m)VSVr?C`h}l5-ZG@=OI!H;<8gp%T){xG_RmtWKcX$!7jm`4z96(p^Ua4+ zHLH7{1ifW5sMoE8C;TvR-5E#Lc2zGQ(dzIIG0z!;utyj+z&4(s# zNDf$I>=pGHlVeVT)m3j#nmagJ#}hpq7swktWU2ReE_&||U(@pyTT7MNb!=9AZjnTB z1~g%DbjG$Wff2O4%bt3wrGSEX@9bRlQuk=|sW9^6%Glu=*JTx2WCUSnGEai|rO)*D zie7b_hb@oe$oqrU{e`fLh%Livwd=O=U30aq-mEb~S9S({bwjdSbJIU$e!cKwd>7r@ z@oF4oK^Omt)y=ItN`voD8jRxMI*In3%v+EepmtNkkNZI(fh6k|$^ zjYR>88?LT>OQJwm;X_AC-I0@h7Ej6Ro@b>T?8%<0rT4{GLQ-hGg-q(&BHS3g{g4(R zsWFRo(^;v0@-q;00XTd@8?TMbMAhQYk~ekdfDj@zQA^3(?szRM;v9ENPR?BiP`UXF zfr;Y^0i0vmSKm*Epf6w6LP#$-(x-RFTIcP}J54Bgs^&{XI3APhd(m~2gAN+k*9?V) zCwBNBpXo#PDmvaE?F>229_#Ku{mD_J z+=M@aWYN=XBNQ_w% zI=j~Uz+WX4J_c*2x;_@X3Py`V)H zr<-FwMC)?ZSdvO|r+sgLat<6(uWFXDRC*Je(@Z7roD;|yD&w-JZH+oLRb#7pdp&yC zwO-wI2t8NV`R)Md2xLYd*eAdw8a1g*_BKW9mh%@MRLK9@XnnE+g|WQe*DCi`_vQKR zAUfb{QZR)#cZ_YVxnKp82K*2{k7*745LH=98n^`P1}te+a)kf>oFS&r(fbiR=t8 zB?Ec>EE2^g_fu17*HKb6SRi6za_8+}d&G?^R8afq^_@tH@W{Ru(8TKASNOP%fnLJJ zJPwsN;Rdprc<>hP*e1ZD{Q`1yf?`ZMNTKE9UQKsHsttTx{ZaR;S#}w>C)L*;e7`-z zT|ZKs7}pFcRdqG4a0%=c#%)h!Tyh^3U3f!y=+c~2lzeF}BqwBVV@!!Tbt0yMliDp| z78BzyF!|S+O8U`hiPquo7uG5Hiwh=t9LtzMWu8ZTM$-~-EPqN2{^L;mtU*VN&2X!E zs-)|qr!is>4w?{fc#qv%3;##pF~Y6-jWHY4o3D_>5^BLfD1(E0o4KNqv89eGH5EJ{ z{ZgM>OJya~?yY2$SPlj&jSme)6;`|EDS{NOnex_jQ=6>wARi*O`|ZGam2GzoGabmo z%J=q8ol-ibeoy%!dx4?6XaRFWg{=iO+DhcgMykW+_&R8Lsa4$nn9qA#KajeS12!5B z>p6;{<5g_VcOx}DM2m}1kt1a!OSxp+_-V~T=&V4J4utT=s;C% z=0u+_*;gr{_U}YW55%1#6_xNaE3;Zz1@#E;XsE-R@U|$cPjeKOwOM~BXty~HrG%N)QvPeeNycL7Ll0& zPSiqBd$aCw702XrFtQUX6+{}~CSM&KfP`qD0XOu))*kZDr&FiTSUEwlGqAjXQSqF^ z!;CI0L5~EN;J}5s1}5qEk~CC@cDS1mY)2lo@w`T+UF-4|DpaKpYje|VYI_TFm92i& zMvA@}el!_A zVayz;+}om&0_Uu9iYii@>FTQrDE8v`npdge`kBSH>+Uz+K1bu{t=GBIa{=eq%6CG@ z+v{WIcAwO7%ykZ^D5XX!H$>+5m2%t?$zXPiXvOQi*&X?2*dq}q;yjL$RK~oSNn4uK zw#{fpA%({t!S5}YJUZjST;gCJtk1kKL{im1$zi^CR#M^0arm$V>w2b_hh<&u1Wpsv zZM`&uCtvh9In7i3Gz8oP8rUx+*seT*H0*Z{imtqM=_^P&N!H#_%xJv!?I>4vMJ;P4 zmm`}zj_oA<0zQ{OyUXc}aVd08ooY)3qQiW3vzil8)0CM#dlf?B5=O56zC}}BOEKK# z2?5|y*KyvYRFcB}0XoYp;pI7GSG>Ry|C>H4k<6y_)RFi~8?ZIq;~dxW^EViNP8upP zoV}4LS5@wA<|?z7pZEU$?Lj$l+eB^g(-r1|m#H&)#6cC+1|aR+dC-GJF0lsla^9@O z9JBelS{`|6cVpH7thLVM>3dW18QB+Brr8Kb)^cr39vx(hlnMX%~;``I8GH9OEmN`7+XpW^-&roi!kFdQxz@8UW z=w16c!iLHD`b*QPRt@TU!xCp10cjoBT&M>7U!GM#!U98oLV`lB{ouu~4?}PZLx7K2|EC4%1B6CVspYw7J z<+K(_d&uT>EAu1hJPG*ED5|o%y3kt1zAT?RYxJ%6*Dja$Fa!h;GK5wuVcOsvq%&j^ z#hbTkSS8LUjO+}z2*KfzvqjK6Q9X$e_`3-|kNMi_WSRuXtiJb_S;NT&^!=U>yF-io zm6vslU(0Q3wA^PtpUaGNirDY_PkNyG`fAql1|IG`|BGjwpuaD zowIm;U4hugXJ{d_Xw@>ACeUzJ5;?EXEsmLY&vPNeN`_(43e_5~uL?irPN$iaDk*98 zUr%20y34+?E*^HVOrM4I%G}XpRNQNcZVJP8@l0K&YMmku(20>nVW#(sNGVol8R*IO zd(;p|E_sR>4YjHKQ|P1s>rzP{MYkahf!^8X3U+32(KyFpsLD3^R7~?hHo=^6&yFt& zhMKNsvU+Tc3J7=I`U;mE>v%)zY|%GXZR+=+aGu#~yUSyJ+u_M#CD#I&j;e8X$AW=@ zvz9_(qA_2g8R7N%jC{4|k)r?e{xEG-B2*E#-nBc~SVfDrAgGBp=2vIs-6a}?-wyJp z-2T?|@YC4?#roaH%VvEmG7jeM-$CsilbGG7#H?`WrI|$wOdE1WTlRFnrfm0!P0P!2 z!spao6D<5Mk%RHye`NBVu$|f$Xa}^r3`-{Yx_i$Y+n{Z=dGX~XHGS3fhJ!bbb%%a; zYdLFE**nZ>+~^ZmGF=TWG8EvVc6|6$r9*S>`i)ATxp|{))T)zwGKz#-)hqSuocrd~ z?<%6)&jp{~Wwl49v0p;5DIXZA+<=+&0{n!OnV>sl!GL0mh_T)M;#}F| zN4|EAQlfWRZTDhc_07NR#z_EdPzh>}tQX8`O_+B%xsi=1mV~Q}`ih7v_h7f(n(ObRcf9KF}{RROvMtkeG{kqnLKx6O?H@s3P3>!M2JDZ_f3LJN*M(a``HNf;qQr zyxc9uY7q>UaWk(TJj0R_P*{x+{MBjr1B#MG7SohJAFLF6ex)Fp&PFiIm>Da6oX3w; z0ry}0Ta#`+CmY`2dybKtP1~Bf?|6f_T=~j%c1fw86v2^pX$C9$>z(&s)nzgT%WP`K zr&*hx5;Kbz`WIQOuuH6Zjdm7=CZ%cO(pkacgcL4yS(wb%U=AU3FUck!O?#tl# z2fWyM&c{GMf|WZj{c9+H>COK(I;~}ALtg2`58ek{y;rlei&rk zZP@j(nZcH{L)e_=59~Y9G(a3nO?sFAQQrzMC|m}iZnQzNfs(DR?h{|tz0Gm{`)@DD z6qB1>YkO<$S44cX*(}`1jU0k@z88e4BP<&}L!vMDDwmr19SDxsIwQqpb1TxGni4L{ z>udAppHn#UOTHi^5Cb6ZJK7l263}nD-Hrnf>mQ()WxB=@v+C`gt<>w*(ow;cA>?m2 zeeI&%8xr^*#jCP6%0tqLp}O;nRl;v%tv6?CLeji_W?v=-4kzOl+A{+ZvX@*3Jq^KW z1C5;m?x&Bx*t*1HOgPbA;Z}T_W{@PS+W{(p;q9R08|;3EdHOw(*r@1}Gp7?e`835qyA-Z_hN@Qb^9?4k31m=u)vX>*+$B0Z=4rg#F&e0nlO!&{Eha7bpSa zW7|h;ownUe%j5_KS4`C@o)F)_+Nt8$TZNKBF8tW>5K7KvI-9gyfM{`d@zyc2FohiH zC(GH|Ba8Fr3kS7mhJZsdeK{{^+@l3P&2u&*VDAyn?B^g43A)-iW8h+=`9y}jdlkY$4xG*E4zf6M$Wz~4PnniIh! zJ9yAHU8?!UoT2BB4ZNWLBUxMexZZdtQ%dmhhsOYM6(@aH5?;vW559^2!`@p)#nEl+ z!byM-JOl#4iQw+8!GgO6cXzh{AwUS;xLf0H!7T)r&_D-moHW*0)6m%M_ub#O&)#R8 zdw<*?_tzeyMvbcOQEOGrHPQ9RWE8aeu_d5GMXPMGG36g3di#?kp%xLJ#M2H|*@|R3EH4Ax zztB1?=#6@#Rg4p6$03u0-V<{b!=wMtb9t68sW^W)Yr4&Fj55*Bd&AQ67z7~q1)e(1 zOqOr5=unB7jp>5~RCYP8l$s%!cW8l$6;B|IL$7Cx{8MH`B*)?RQTeEhS|TOauT@HF zDx^-DpwF~!UVwUrzLIj#BOt*khy1;Y-8XB`7p>*EjT)G{eX{N~;Y&S0zN~hiWaCng z(wQwkQvB;f3axDCVpOQSG%Ap?(Kl7Zx$PH5S?;X-SUSr=Z4;5@uo@RtbXkHM2*P_b zvqWJI%`Y9_X@fifOhazow>qsc&#UA!sAN;J<_b5bN=ITx9&NHKeTHfrEk$3BpTt6a zH_}b_Ms-2{J(r<#`!l;my~v@I3vxRNx84UXNCn?_?*o2NVSqnKyDH4Q>o!LsuGd=jwdTPEMwR>T!?aot%%`FjN_;18OR> z?*aHc5nPl3c05c*z~-iD5}xmab-U#Iy%?cvNtHUxN^e13wYCXUnU__ih_i17p4O%9 zzNqjjMr-9fOlozd4lnVskFiUI*0@IH#T#){5~Bw5$a<1lp!azFWR7(&DmxTVIeSC- zphdiEvQVQfuGOLkDsR8J4YR)fs?#|1aap*<@hi1nok8^nSW;$F2%sHk3QVn95bT^6 zDgZ)fdV0J#sdgQnk7PGrLInm<2HdE$=h|g=ZE%L{ftB94$Rh8cfy>!UV1rK$`7Td+ zyw~x{RCj#OH#8n(JDm@0N1@mqv47P%r6|;mFd9U{ON_vm zJza<24DBEEJwp`QW|JtYbZQPMO<{X>1QctI!BYlh8PnsdsuHot5OAOzBxQf5IIwJH znmh2;u1**mtOxn#q3gTvR-%dQj8R^9Hun00l11~DF+yI zJu+?KO5N#ml#GfJbARfqZze~(+|D!ErN{CC^b=DA);96f>U6ghwC0%p3sSxwa=H?0 zxQvtsA&TP*TJ26HH0N;Xi}OD$>8Vx?X35{K>bHeN1YHhsmOUdg&L`v9%bNavalbVs z0s9t>Fgc+NxjNO$6uQAQ%@2a8WYH_7v-$GZI36?I-Qv)9)4-S5c+s5fLZz36!C{zl zB+i;Th0+U-Nz1)0sm!{l5F0Aj_+IH|rvP>b&y4Gwg=xWMQ9p3)>h5_^&vo`qnO$f= z=|!i)s_<-;Em#?C%HX;>elPHYuDD}(kB@jBjD(-yMmT6&5G22iz1OtBe)A`#ZsmRv zB}<6lfjKTTY6gwxV#8@%X?m{r2NMv}ZW?_4&`tsuw?tmog?F2G#1lLs`Hub5aelpf zRtQ)3-?r~{VMZ`Wy4DRM9!ahGv~0b_@*g>kIwVmMqF&7;cM)|JaaeA4O+!Ex@+^*U zoE-cVc&sAT`AP0-iRtqeHG*LMb6_MrBj*%4sChaU=fM+rGc@W_Z2F8|8M&4@&r#ER z%dggx&jqgaK|Y(wT-_Z%*iIz3TXXL8Js{SqKTsq32Evq78O{*LZ5}jsU9!s}k3(LH z_$G3zcE&iJYgDO(eD{Cr)KI_i`O=do~OG!~>g|jFP%kleq9^}DiDK*^o|8g;cgOFOXwIJ7ILUfQ9oIv(Y{M$_v2PiV-3R-QCiX{?q*!BhVJt=@d<&sjyooz_ zKG3}{PfOuE1KAhB$|;!1}f7r$P3%y2b12sbfTPJwj0~pRe`X8rZML z$nU%XgsKM+?FSKzkYiPV6zNWqYeX2I0_Zv+o=)vNfpKZQR3Bw;)Vt2k&Wjgq9W=sG2pYi3jFr<=LfEG z%ZnMNde!=D99_3*u>OzdIyf+)9`kT>>gLO8yzbL( zm{WmL96o1Dy3=gmpw@KP8v?P*>hqJDmaomNMF{FS=QVtEUhjwIX5+JqP}56{4lfC| zV#F)*i|fs*q{~FI9tX*#1==~xfV)|$H2!SKD5_OeM>MM0i}m_R>2zrN#c*B#*PEHr z;a0v_-Vp>eeegY^4F|5&A?_6rArKIEu~L(NKX(74puZ=o+GA&|1LmEb-m#28nzIo! z#IEYW5l=gsa{_Xm{Q2BYYAd9KC-tM~YEC!1(+^LbR0vn)(PY=64i5o5hrc_NMQXv1 zQd=}>YEH}cs;z&ONW4R@$$1&}kjI-x{-@FZk8Oj>{>f|=EP9P5MmZ*c!yz*d31e|7 zP=a{}_Aa0G-1OVNxI`$*<8~da4F@BV*Bql-?e;@3rqJk44o>zL9LW#WGn zT-KUz46Iwct6s3w$8i-pY^;aTq2|D8noil)wF}|I_>=&blnh%NSV=cL$;*epcj ze=EY=qzai?#>kn977AV{sSKWiox^Hn`#FYT?<3XgkTjm0E(F-ZwpjTd6h$A**~-@u zKok1ykDc+-XAMxVBY4!4qi5rmxyn?#mt5Eoi4bGw)w>I?Bk&>354Dkrdx=NC8gi~P z6+$3}tQH|JHYUk32Shp>wg3_&sP?OKt;41I2_3@w*_^QpO@+Jo*Jurv`D>&Ttfp-X zDh%dkMco!J?!F1_f?))?26*Il>&n?LmjDV{!QVlSAv@L&?x8AR&ug2jHlTrKK%(Q1 zKc^m>r(!F8pV;J^^;RD;_a634Jny7Kdny)nn|AM`9AI^SX%OUmzmr$b>{AB8Yl;GM z9{bn!S|r)cyE7>fCO$oV4wD0@YCcyEw7-uo=1s~(T)UU>r0rD_qo@N>I+VZtEG-mc zFZXz*cnNo19`sEP4Ku4bru^Q+Xbs)LDd3@aS{CN7LjJ4^`4uPUQ&})05`_MV&#%ya z|CM zUV}dpNmAbS`Udkwh5JLr&Nf+G+xrEB^}$OC$djrM5-F(2;%JJMh1>h-xp+_sf!<|s zFf_GIc$_@L`j?y~CEu<{rl>ctDOWt&)E7?IgGgEEYJwq9YW!S9Zh?xi^X%^Hx}MXp zTt38^z3RDQrzBY78pURJwO|7nUa_rexJXEONxoXxx; zi)o_|G{}e z{1biyR;Tx?LpVl{ZR6O=f?E*gy1hof?*>h#DcmZX^y`wS;!aU%5yrfDe;%gMB|$s~ zrfi$q7uH}vV_|A0+uTJ+&15;^U`wrgyV+@GnzAMrIO*i9xV>lS)4TU>)3rg22Ilye z(uk_>Y2}8UhN>lon!RDa(J%FAzB=EOtUAPFc`ZhLx;#Qjmmt1+tL!&W8p|UHU6p+~ zR>tpL?qP(mq3ZrBD7bH?tC^t2$hBjjCYLj4PVl^sq}vDFqGv)(_-D)^lY80ZhM=JZ z<3i8luF)n%yyU@&+Kz9L{AEYTX>zF#(cV0&^zCl_;3cL(5Je`LGxs)1#su6YA3J|J zpYAdHx+?GsX!!K#?F2nu(1rNMD~1z`bo+c|gAcYD^yX|6bx7n)Xg3zSADVI2^yx6w zk$8EfyBw3q4yYhf)VxGGC(nNQK2YWPM*hOkWW72|-7N1)rruy)b_MWX4w!c~R^ zuw7{J3wMqwuE$;~NdIEg+%Hf{x2L_sKFwD9@TtNW--_q+WgKM_`)KUdWMOLSEy24CQA`~%S0HdR)g{dsDPNMp2zicb(SIbVz&Kg|g<@OqS!Z*)>9h^1v`YdlIR&^KLj;U`Xuc5%FqvGmlYrI01o=zg!&z zRwY$LU}esy>}4AKNuE&uKVY*rW3OQrX(P8X0GV^-q8B;3b}N7a@XsGiN?2f6b>S1 z1xZfx>6tJhR{XnRg-e|^RQ{@t*7{nofZQq7>*lB5YA}j>*Q4gSXS!OR7UR)FgX(|g zv7-PPY$(E(u}=lKH>(*Z;361C_E9Rs@r7rMFJL_(PKQr|`)1?k^8L2iO=%}#}j*p~8{wx3!vzmeN3%D0RI0b(Pq6~?SmbB&^0Wx$P$J9Zyx_F$I2xbD=bX?mdR(e&8Y;3^A%A`26yLx;wgO3)w zV{an9pcE9meVXm3j>UYTa2J|!uOdinz=UN2lmwaif;YqEY0wGq1>M*7CUbE^tz$W@ zWqGu_rNuxWO}WtG&2G{b(V+>5h6)mGHn|t%2oT1(yeoTrc2L{#8|zgnJC%RFU)$sn zbQ35cl?_zQw`K^gBVS*T%*~VcNokWyn;bM6RCxgUWgjm&cTJw?)}20ZCZSmF@;5ST zWIF3Gr0pb)K?AhNru3$F?{jXfb7O7(jj*4Fer2GUbJ^31klOsvW3prVnEQ|8Q-~ay zJ2oFRW=#6zekM%SSk}K>cTe8dKV%z;i|^o3@MjX{rU}{>@F{nhUU?N>?i3T;7&aZq zYYq14w#DxN;5h9y^Q8py{Yz^1qL&>_sM5!{!B=(G4)3;gXOp1KO&?Xr(Hn|o10oEC z1E$)x7=%>*wGZ@dtX$<_U^YB5 zc4cTuEua4_wo<1rjAq7Sv4?PNpf$GDsb^1+JUZHkaQ-Z;9vO@JQ%TEcMr3{H0?#B0s)5 zGaB#*QIuApazLUH3w(-ntKoYM7wxK$Z31#Tnx33%@o;Y_^E^{PEn+C1@5a4Hi(U)1 z%$@qR=BF_#NS3+51PNPPo*-j~FKsR?Nm4}v zk=VB1ArDQyByK)L#uPLKU*<9PkB@7K8JnABzYa!qF5`54@9gZ>Z?J%;rFC>__#!kN znlXCLt$Z6jJsI>oI|I}AF4|OFthldjZMZ!>c5`Tbbs6(Fy2!`GK6A={=7?^+4;vy8 z9>%kllsHWP6<_*I7kw%##adj=3!yHAd@~BGdJB%>8k6MKuG14 zaTZi=@2gL|Z|IwAP)L0u36MqZg{V9my-_#bxacst);t%+0t+LL){ili4qV|-a|MWz zeZ8wPcuA4w9pENluwPwq$X&+Y>Xf$u{+ag0IeWA`CkP(@j!yB4Mf2fzN~vgEi?O~f zr@JdwuzM*ojQ9!nH%vppj21c4en$rSp>v_Z)&|u@BSIBtIhYH}EyJvj%8cj;H z%L0GD`)0ykbS*gMcEP{DU20CYB%%VARN@8IRBhIwjCjljFil+Gwthvp^E~Ed_@79c zpjog1W*P9z{0S&i*%pc_PJJJH5wF_yng6bKmW@rS7M&a>Bc4fU-~CNj`bt zf71W$Q&VOZStMa@=x5%9cg1uLmephI%6m#OmoSOM5&N*b6!T2Y;f89dGD=hT1c_t8 z;ZZhrbOuxfz0CxiG4>H@tigDwk>{Q@Bd+-o5i~Z}Q#0qG6h})&KS(9VVHqIsD$#7c zNtTu4;=UNr3)jhs{(F_}_6e%N)OKEsNuq1bfSz=N!&9fEVt$vi zU3cBQz5Akxu4P^}#%qz2@Hph&|q(#M(HfN0*ItnojR$!pEAinZDge3LKN7=sKjQAK%0n zuoK+-kLdUmszhnsn99Yc^ftu9vsG=gmAT|-@ozJyCgJzAX`^Yf+mB%Y+q~NZA0)d8z`XTjXy z`5063SgRqV*g&tbs^*vf=&^O>jgSozgFAKObfxLKI@|%g*)ATBw{J`d^WG8?XK?$< z&?}mTuJ8$6ffgXS#L~HrhMD`_4}DMal<4R{c$W9=bicth$~Gbcm3jdhGVoP5q^@S` z%918CJ2`(~CgnA1S6KMlRz81P61}di3B=g1I}W)IP9F6T=8SBaWpNE!U!#9&7yVqn zMUie;0+4rxxR*EW(DGP`K-2#PeOrM?(RPJ=Hy8q<$?~VZEHyLGPeZs%Zwgy1(m*=X zw?UuW)GdUg#Sr+N-K0x89B75X^=b1@jID{zv>4oML_{f{#vWScm*Az^b+ZqyqD$g- zWcsfD=s{K~tpc@R&<;~k4*uEf12PZl0zX4uX}8Uf)crkhSXo9z(9({4OF8@14z_^K z{ceJD1C04v|2o1WUv=1~9r%MysOmaE`QD!$EAbK5?J`zLcO#BgS#!=C{4_53s2b<0 z`#=eF>iCohSdk~D3KyAMHPOEGo4#^VTK_-F_t=+asQ=X z%lXvtX{gN=i$UY(z*u04zprI73$)J*+W7L=SeI0O(03&uZvL?l09cV|G4!=#w3bjQ zO2I-{j6;|Api5y7y@3F`A1||~uwcS(^H-oWU=T^ZJIBz?wm_F7I_QBDVan*t<(I?B zR6I49t|dn1_LD_@z}|o&@~D!}pa*y%6cE$@6fkfV0WZ#v2nY@;Qqp1~crZO}m9jAi zC*b2zbSU!wiV(f}wbXn134~urHGh=IMy2I{K>BLX$E30~^ESLP)^UidGrtiBqz1>N z1e_{>xVgk=Le+n~)!Y<-z7|{9Dv@in^P6ukyR6g%B+>U?T)(Xi&}6~egFKYGh3eX5omLm)zW;y~#$zi{N?v{X1)pYzezn}3%2#Zj z#qd|;0dLwtmTBXp$Fy?!4db!ykh-h36&B@L|ICI57=Aq{ zS5Z2y+d(2Ohz~bSr(?YFeP|f==_t1IyE-MEf1l7@fV|@z3Bct??tl>Ji{_2woPS6m z?-@p$`Ee&+NAqS80mDj**zu_^t2odA?rh%MmXS>5o^Rz<+=4i>mK*RLD{yWuJOnt5 zTgQwE9OxDsqkU0XovY^h`rp21)%3n!cRW1ITfI;lworWF%uVJ(0xl1ls&U)ML@^H_Z{#eR{WB!h^ zB;i^1^Q%;DG=fY-*g6wO&si;~w)f;r#39)IdF709)A@1?pav7vke7?ZM*cTu`NEV;t8>34Zr6z-vFH^7o6zqqUfS=UX3NFk)VIhB( za|Cp#&!kd@-wicZf2`;V%y8~|k_HPo$qZDUVfiz{@2Cm+Kmrap&wUWWAMOT2Zzk1h z&oFn4SZun7OE-NpXBpZ@kwPXkOlZDW>mTIS-HgV4C2;4eD!Y~4m1WH4pT){Y4_>ai zwVwFG80Fk+&!v#LkY)2iwlv!*Gv$RUhnrg?xaC9PK1Tc6cwIM)I{YGAz|eD-fOr3=>jhi>+SVil)th(d0J-md_|UOycX)t}s_)TkL(N)h&A9fQEaNDH@+9td!@duI0xGCeZ-e4 z-yh*PJ5wyc(#q?Gq2|wMy-acdy^=8G`PqTN&RVuhGX!|>*9Ip;g3ybK^!efZ;4Q(D zHYU=`59sQ5AJt92zGQP!*;8=Et*nV(;J~V^`G5+dX0TC7;4j{P8oa8F@a!0Iddij` za+<5tj@+7lKmX$TQKP||583x@TEA+cT|YW-Q4;$o;MbUQ$+_nfhHZkQu|4h{zngBg zgmSpUJN^~#E*I;a5O@m|_Dt%+_)HdEed*O>#`yEpNP|Ml%|zat$i)@ETM9?k!T| zIcI0H)|^CQk_?=y&>Fmj>hDMzSbXVlU3%yEiPFREkwjyJz3Ac>S9Z4gf!{rrQ4Z>L ztvo7dXf+?WWf=|V;#DE8{)N?n4P_?ofE)E^D%nEvE%uAb{Y%y@C-N6_k+JFRi(*?B?8m~o?44I1_k=d` z%)*d=QaCC=oj%&7kt~!CYDsqK>yM~-<8L^@AsAFI%z^SoI{pp-)d$U9#ScH`*fyJY$GmZzxMEUGe5Jp8nfM4hgXA0H)&AFG`8WB-rN(@8FORY^Ls$=_ zx?+os8PYr`nBsE!pxLkx9|*yph@O*)(6`o^H-BlkpU}CnN@^N$lI$)dX%H`}T$4#A zDcZ+snLvBxN-`Y^fncx;Cd9UiBDyF zg+5DlXvkM31mfNLfa5QXsj6oEY4cHTKpk(g<37{3-+I$L^S=Uv8haCXL4If14AWfu zD{HpGBtn9&W6g_G`2u*jk}aXRw8dPT!dFFpZG!JYWrXYNWj;6ViMdC^Ug91?7dVz=-1n^DtW)0Rtam9X#e2llwSDEL*PgI6O1K3y^rPM zSKBOm%A$i(@8yLgPy3j*-Y-3A%e=$|hiD94R;q zFNn7|!GO-xMlSP*ZLGGLmgNPXve`mhanhM@7q^u)yzIyMx@kyA5ej7eX3x^H>x+<^ z2Q+XO-F4Vzp4TrNb|6yb_6(iE&$OAv;Y;=YlR zPJDXL%*wpQY^_Fw!L`r4WPviPl+JT2>acLmngVkxLU*IBpF4ZA-X+s6Woe_mzcH<_ z5CY@%mp7#3C=GCYtfMZ&2)IU}GJ8is+fuW2)ww%{^=zF=-}x{=R4d#GJmIxvK!5^d zXjTJYSz&JvT{WBE7G5|*^LhEng%%SEiBU%qR>={soG12U;IjE6!q~pL#MWKuVY4K2 zJVIztkWSBO$2gTEy4P;klSs52bP42)aV4rQ+1jf6gB&1>fu;ADW z9}AU`cO86T+S~j|qgoj*bjto0Z=FZk+1AT7xPP*M-Uqs*O<4~4rNkZhG0Ibahmkbg zmph9Qq){W*{DTKTq+3Uz|_&4brEk=OQy>6A@~NSi!ZCB(~WuN zIZe#&A2CywGPqEA$_PhZyGp@K&xa+(-aZQT00~-alA}_+nUL3a!}oz@GN9_i)uY>Z zzWjw09s!@DSyl6svNG$e;i9e_A4N>>NgdMcY|n{+MDNpZaWS3ga;*4wV^#h>=Z*hwqJBdWHKTgx_58lRB`Y!s8CoA-Gk zx8lS3+3i#ZR6k4?QXhQ7d_6z*PVcYj7{4QYGsz$73>)9Fs6R3>q{PKi#A_|vWL=U98WAAFo9NXa{G|NrdAOl+;?{SAVEs@gZ z;DjGxe*+Wk>N83@ah`!(o04T?%8lEBx+#*(sb@+Y3g%?n7K$kC>938IXVkbJuVvNc|X@2T=m)jz;G9vhPuJ zS$$uu#x_wk`{r~r!=rEHvmeW*JNLPall&I))&-Qv?x1PC>1TF=?mOENnPC`c-@5c) z_v)D$gmo473<`Gzwa>m5PwE=pD}hKg&o^8uB|AH6PU1QL+MQf}*EJzA8&9HQAc2oa z+7H3c!>TUVaM7S=0dNq3@6hLK>CPv@14wUnUN3kY_=>mMk}_N^$@uf7o+h1*2LEI3 zD{3qbX`)adaSJQxwfNr8ua=%8Iu_lJP3eeVJWGD`27SO?7@Z`h#*iP2+y5VavVT1I z*Vzpn>+|vkO@HjU=3lqZ5}DWc^BOTyE!PA4k@9a*Pr^1^PXXC@l4%ldriXt9|^u%dorXb`-i@SJ`q^x?$c*| zTqgqB{lgqtrsONCe__{u9f?iP)E85m@-RK$KY2%sPQq@uFY`v0SCegWs8BSgUrT{j z;R%I+vwKQJZ+TVScSg4dwPTF$(*M%JUtc7HYQ$TH^ZX5};nU|M`4YXA##LEcGi`2# z;hV)(4X(U{ezQ9b@7C$mYaHl?`z4Azd=6(6!_|n-s0Seb^)`RMnY|pU#w_SCjH~m% z{qfJ+d-OjAYx#HxlxyUFJW9wOGk4DZpIazDJhL4X`)8B??E>`ukf91n_6!ZOOf;{;T8UBW(|B&H7 zWcV*J_5Xq!MpA69t$jXxLcfXjpF}uaL%mtpA>` z;-jb)Md&-V0tD#&d{L><%I7oEkz)FuB?djl&vKK?1ujAUp95X>+YgFt;-xvPEI&z5 zUpsUlEg$kxg+M(GsdTE8HxiwNpiwn!u*IU}ef2Ug)iIk{fB(RdCoN7}wTWKI-P5T? zK2-uwr*J+rsa{(YY1fc7#!38B3;gSXwvBM4g&y4%FRP4%W7#3tL`HBk6x#&<#u(8llC@sy5_TQI$f8l3kqN$Cwb~TPCBoly zBwyR5eC`_{0TjcO!GMm3TbupQMCYA1b^G&@sVqL>HzK)u0kN_=mhYuB8l*M6l@I~Y zAf<^we!}0~4`%$vndx-@!hp14IUi{C41Ml+?Pax>(15ktq^(uta6xJmz$6BvPuXdj zTmbGfX|qQRw&Zp?4}uQ1^;)oRI>HOhfiItS1W$l*^9pInWnF3XKC#mrD4PD300EUDOzvXKmQf01JO|W2w{(m zrL3B=GcZ_n3!oBk4U8Ps+e)i+9cI^Z9P)VjgysXBIb9u*v&y;uC3L84yKdcdD*{S>8??Vn&M@3FmI1+;cV+bR(-Qlhd1#$ z=Z;_r1VYpjdA&s~#f(_X8%^fDjFi3E-<5-TY%&cSm?PNoe%2XZcReY2B6=diRn>EhpqFIvVPH&)?kq^B`&Ek?4~<(QAOx%J>v4sxx5Tyz@Q zaLw1(a_+xIxa&4yGf#6B23%nF=2Y4!Y7ZHA4wkVFFzB_(V|JuJY}VFR>hI3UdUlGh z#~mzJR5C1{XIBb)dn9emO>deWG?|iCr8@cwBH{vT*}QiS7gC8lG9FR^QPV$OcQdg$ z;lD5{7x2$zwk+Y~z1kpSH zJTY*bf8wud$AQ{u|IXnk)!4COSLf}`i^1#ODI*SR%S^)-cVC6TFCMzkZE4jtVK9qh zO|P8preSW2M`wWTND!anFM9B42p&_EbL5c9>VBo=Pyl-tx{kz2&~u?PBP`MS0FX;t zZQ)UkK|=s%fz5LB)wxR;@joqK^>00*x9HIytX+r(M5%&+{iKmzteo! z1in@u>2TGYdMr|%(=3lyMWWY+D6dl_om|v#&&}Qm{Z}Y_xBi|M%+whI=uguNXsu(% zhZ2-;5J{H_+zYzP%2s-gPd@MLr&5R@g>k<~ z@8mtGXuoBsnJfFk$`wuZRYHTAw>-Ep-~f4fybcj{J6aGkzg|AOdCV{;O8goq`5LSz zy-bkTB@MrUkufJ+z3;dvO{z^+qQMy&WV{{iyC-XR;n4W;dz)eLp?oDmB__K6)qUd6 z;TJE@BM+9UYVpRGl22E4%RXwr7kqgJFF61sPW+LeI!|y^L*J;I2X$37NnU#IMQoMK z%-)7S?r=UJf6wKxKw!98CJWpp&isu>^{I#q?hqu_RJG}B0b8Iq)}H03Gq)QYdu-7d zC3-!>YZC@}kd&bjyH!fl)m6twtDMRoN)v|8R?Ttd98IS=1={X?_2}~GnV3hwIFCjX9u2xlZA8114pVkn0 z@R<&z;~KA-B4h82ee60hH$MfDUEMFooOY}(P)A@Be{1UzSSW)&N+Kc)sVG2uH(p32^k%Uk<}5Jj(6j zDR#Ma?GyD*ar+^Uc~ugtlc=}c1l~K~{sdvMR0qD69KW8%_kIZzrV6^~gPe@A%%|wJ zXspV*%f0?LLF4uAxkE@&ysz0d85+Lxk(raOEiY5qRI}F-7o(x8#*2)-0Pq7qYS`N~ z4ok}`bJ%>{eR#Qe-iM4ez&hdZ;QEJ|scbspN~qUadzA)C<0&4R_&87Rmo+dZ3~Xtt zrso^VJzDvRDz=Rdt^w-FA6XDjaj3Fa*sS~2e(i`+Tet3Qp-~;k!+NxDRlRe= zG0OFR?Q|0``aoXb6l*^l`(;1?Pbfd+th<@4hs@D&Bt+XIxu*i5QH$nZ*?GjO-)r2L z$X32`>;#784^1v2aDs?m^BRoy^BV9or3my=k(;f`F&QgC0J6g3I zrv+T?Nl1w9?pZ3m&2$bO1>6{|(U1cFaD!wzgA=;;7F-~fF1y12<{~P=3;S4lNG|BC z^U6fB}tYJi6 zViN0PKV~eoKJJiFe1Th#_j;FpBb8_1MJi*Ljg4ydJXSBiS*`0DC*2Z#xkO#;kS+I< zDt-MtSt5O=;MZDy-7l=}{MJFEJ-A)q`Cifyz^3ca6G0JlWefXzIMQ!&AEX|Y7QmnBp!MiVWp>IxBXFmY9`=1OF%eZu@0`GpXoH%>y0Kbo{bzNGziTBf+ zgZqcChz0ATJ4|lB9OW@_hV6v$&iEM|>P6k~l)AYe`QtW&DVOnZkGGNnL49?(7W5EigyT;dh z$h+U~bl+rfJEW``I|xGi2(JXV#R#+(ERqsr?5(!^Ym{Ghl~{V~Y+nUA)GCVvo-~b` zPa|JyWCaBT+U{}R(O|_uM05N2=x4bGNhldFHGBaa;ED+X*wjMA7qbE9H_S|y#uNY@&ZmK)f5PiK zFiL;U7&zuss81`1eM)F^?{#(qI%Q!El>I96&EI!nHvUoj&Ygf$H(on;fKG#C-4#nq z%N)x#<-=xZUC{D&*&k@LIn=i4)xgEBNT7ha^^dyQiJj%S_uR(wRnH^NY7!W~jSmZ^ zBAzl)#8naNQOLI6hcUJm5TA~9p5;IUfM)?^U$(B&;>IfVY8?*rbp|ZXR5YCWxhDXx zXOG1_LO|bvXPJ8k@SkwSDlMz8y!(4#Ztvxv4gki>hdR$R*PpZpn1ImM1e%OWKh>(S zO^Jq2S&de)9e25Ij_a>yRR_wYI`$PCRBtrson?IDl3<@1zf>`noB3L zzyGo>Ki-SZ^4IC`>_vtu^hFU~;Wn$7#sT;dN&o7kmGmJR;Tx+pU}NgE?Iej`Iy=VJx#h9HQhq$T4_fXh@5;n}j9XG0YFj%e!+G2YPsXRhOtb}Lbo3PRCppa$eeU=8`3H}qk zSfU%hl1%+=DZ)Q3>lyEKKLQErHO>cLs+TEITez#n-Qp6Jt?>7Mg75O|cXX1yN>oaGUU^IG5Ka6wtWg)AM6BUc zV4ew6P*6yf{ecVUFM&e(Fy{-on_g_10pxuL(LIk(5LFv9=-jsGdL zM&ig8xLIc}#?pt)Rl(5s$D7G2r;K-shF5n|Kcq&uW4%qOE94u5N5Ejv_*2i4eC!lqFJRTdvKKI)&Hp94~{ zoh5m%Pu8?#4T6&cUjZ`q^nnaTaS>05y>XPXFOnr+1t2*A9f*98eho>!^UV)Ti~g;% z3hBn0b@%IB`yiBQ_>C-@@|@o>|E_dC+{KV zuCexg7uoG8{!IeonzPorz|o4lEu24kMebh>s$#6|EceQT zv&EKtDEoYilTmEm0`DoEW)-8_SYkmh+3_BZSI%G-}3`3BC#-v^mH-&dX;|krnXUT1b}0n=(2QN zz>{dUK`iycar7z&=~xwJ9c%1}RsI4sgWISfTyuTh})*lk>!dg(qf#fU%n97$no} z<81`r$t^2?o#5lUB+o-&*90~QGQA@l!EMxU@=9X*c5+vOL89ws7+-D%ydaIOS`HFM?z#tbf1&Hp z+tI{;A19VlTS@)0Y)TaB*0}mahSm4=Pih@7sw9$z!sj@Z(Km9i^#Ctb4SQO?c#LlD z)33rbIkx>Y{A}sD0d?H{i#KfMqntP(zaL1)c1FMFR4>xZOfU>ec#Dn>n5Iw0`wO*x zhD1Ku-Q1n3_}+>Lc=aqWl_;sUN#%`xKxq>r_l35&r)g<2hK{VVz{^-k34=Dal`_)2 z;JusB#ykrxyWdaH^wln%E?8{!dWALfe07ay%8ro1y(Zr2*(KlX4cL0xx;(P<9TtIn zaFSBy`*^_Bs^g4zrJ;Rb@ZjYsb2FRJEf^BEdXGFZnC@35*45(|>UPIqAP2U)7Awcb z)2|LGPSTgf>Kvk_QPm62&9-X@!6S!-#P;8YldTgdZUxLMR?_Pz34#u9-@M(wurN*& zsz_`JSz3}PxlSPR-(Vb`U0A?pFqHUTjJ;)4Tuaxko!}NAXmAhi?h-7xyA#~qAwY0< zcc+2I-Q5WutZ{dDIKB7tz31%xoN>nZ{;dH8-PNml)vP)1d0*o%y)!n&GO7|8P>A9h zb(No`yEMFC(hl*%ZnaoGt<(C>hM*83SIKw*NB&*1jHQ8S5m{n)8QEhBg8gZ;)S%z7 zmBv#$$l@MiMzcT4}NyOyt8Fvj3bGQUyVK|MJCp+(#~9KwR-?J z)lJbVi6q~vMLAzoyvaHrc2&-I9Z&35cx8|*DyMjdKv1mze0~W*e1_XWFu}%eT7l{e zmGKj8G``^wHHlPRDw+y2^Z31g+$?+e%h^Va>yPh+!kw>44){3VdYAOMGf;bn+5C_i z^5}jbt(Dps+uAu%*IwWhldCaC;_Znyvy8~E(5qf%DS|xYt@fEE+&S7UCr6O{+yS)i zyF2csQ@9>VUbY*{^>#NpjfKd3&MdLjwBAvyopohwLM!3H>O;3QSwqcakI(f;JzV`x zMA`pgp%;4bnx?J3fhIaQr<6qg&e?m(Fdght(|eD03AnvTs+sSyg^aH{Ic+y*+mnRUtPPXoO0Q#7a4DjmTNA0vN z5s|{+TSz}Kt=aIevj ztoU$x8xk)akSIcRj51KQJk^(#qh?KGh~8O|&T?$W83jcnW?25HE1$Noy-L$7zF)~^ zFLu)1y8Kui(?riZ%r+}p_z-q?5s~v{?*b1Q;1uwOi%kDerG(Uh zPLpXhA1PU>n!Hw%!4^$gEu))Jhg>)pK4XK2Bcd`NtYzhHZ-!EOc!^e?dkDg#XzuZ3 z7p|bUiIbcFJ2JcH^GFxVA8ioqb*fas>I#AqsTX|1t8iId>S|;*`xAxROYEmG{uT2R z`;&bm2B&?D`+JA1JYwN^WQ^13_4n~qX_AKz6{o04*OPQnp|zsgJdVq zm6l3Rv?!)AY-aDPPL_mp-Lz~}FaA^r-v+S12~<{aVi}SZ(|W%C=rKZ3gzT6B*vvO$ zCX1O=pC#3x1znsYrh}VvdNHbswo2&Y6K~S)uwwL^B@ZQ*)pve*FRblLU3?ThiBMk1=ybehJ5be1kw0a}t_v@HnT!xbNluJ*33itp`aFpo!W{*UN&dmOzURJXH);e6y zmT?ePZn;OTdMvyc^Km;R$OKdBosz+JQ=E9Wqf|_qWX^<*`)8hy*F}$&ZCeuR3T!bI0A-2TJ#9tXL~Vhe)9%mYfJ%jt4iw#l+!60 zL4|j39vZJCzAxZ|Uuk%jh7dn-92T<;ns3WR2vy;!-7^PWly`$ZMw4cGK|?6EgE27J-EE}e=0h}x^RUJwTPX{ObXXhuxWR}sP}P& zQ%-$-FnbSD@3}~G21T(5him0hskMlKMPvFnWuTUtKqQQ7fDenkw?ZOU1pR2f(~7#Z zSfh~~YtDc^YQ+(1)`YX;r`;$wPs4HSOYZ76(GNG`B{#@y(h={uqAyihG@o}tXPmSr zqi$W?TBmKrl_EE7kRiwb#z8vO92A5oBplax>z@hMq7Vdp9CJt8{xOuL+l4xfWiIhf z%j+zR&OfPibDq|Bs(*mm3!Fl0j~`w(RWT8Gr~ot(yLd2{RD!V}dfE``wocqyp(N~~ zrxcg+Gc5qUR+qFcrOA{Pj>jdAOOl$a3wF_^v` zjGXFl1%0ACw^nc_>#Y(SKJng_1N4Y69V6d9TRD$A4a6+aa@MQMmM5wlyPYU9hR5(} z6I7{RlQ(6OXbc{&kN-|_Oi67t({sCBxCqVWYOY8eI*H9a3R(9us+psGV3}o+PrNzz zB?5IGyn?lwjIk!dfT>f;^Qy)Mc_qr>&U{J?xJUQ=n9cZq++uJwQx>XM;#VDhOhC#5pZ|w;??=S*_x^Elg>bAdKN43nA3IzT-9{bc9PFhesvG zU8dY^fr@~zig5AddK`7WMG;yxU*RLW;>%tCRw)mtUF4%ui8ytG!KLZCvR8x=@n?VN zB>iO+KPEl?OriY696d^zrJyU9Tkzp7(UCH2cPOApyB9;Y8H4VDXPy?_asEg^P4E4F zoN2$gPQI)xyS2Hc&bOEfX8lBvl01OImGAx$;}+E2QeI0%wjjwS-38~Y?p(H682E1R z-Q5;km(9^YPlJG-k7A5r;tZvyNh$VE+QzBy2nLR(Og-+py|(8vAot3H zx9TigL|YrE&*C)3a5rb03QUQALI#u*d64HX=a|)_KQOm(97C+;~=ZIsHcDbU__(_%XIuG1vU>IHV<^d-~8@TToWhVxm)B(SY}W zO_6V@pRu;!^pAT}kJ9$-@$!IIy3@ddk30~ml*vdk}VHY+80b|s3wHjUrRMTXegb56?C;gTzrzU z>r)w%WLc?(>c^;UvOO~A_gh#j|Nj|L^RJ#nO%AieBi&D`kn*{4>lL!vFYBNWv0 zrPKwL1$$~>AYTq=xH&syd1vhRw7jgPC2Q}wc0`BTr!D}BqBWAdFMs3EsENT$e(N7> zqnq5x!++T9GAo5?V102vw0fXd6nGNwEB1LK}p!Qt-#iD+-|4Aa!f8nQuXKBVW+nuVX@OOtGB)AZP z*)jEL=rIH!MzzkA7n1MFWW(;f7Xv;cig#_v(E@|_DDfX+CZY9bd39Aengf3J;`O+z z4>>BS=_Uz!EFWalc*2bDb8?Q#S~ci~FZgnsJ-6~r9d!44T~$@zOT1`gSZ`=~{~$+{ zUjlO&5%PZ@M(hn#ohZ#JVi@2S%5t1758?TYUc$#a}q&_{xu@Xo;k>E_e)OalVu9&)IR7URn;t4_` zG`p5koZn3-k4E7mGa}sbTvADKq~&l&}|n{9a`Ga zpIgjYBcf8z;C*f2@m{Y%6Q;nA@JSmpaX2KEMMH=5&j8EXxXiO#m@n+m#W2woR%QEv zW8?SHC8D%lr2X6q3Z+TZSG$DOp-Fz&xphBURh^dxRT@nu+o35MpC(5`P0r!&W3BjkYg|+&2)hSA%KVCUG{4eT zTk6($G%dH>|4f`f&+JBjLnyoeY`)m|onV(dKF_(r>nLg^ZmUXR$~>h7>r?-sZyhIB z76fY!n&w6#pRxq?34_ej6@rVDF780B+V0YtLb2fqh4_hd&ek-HBmWfy(f=h!#)=6` zgZfCNF4y&5!lbVrH9&ei_NACOo)mru1402I>(1T}dR?u!`{}R&uze4%NcFh<`-Jcj z?V$-h^vvRUT`u%KrxGf7T&E!W%`=^bQJ~=^A6sv&!L@NO4x7?n1Kt|lszjoQFD9pf zL&hS4jV25eBj{gS_TN$)ZU_}uilG>@lGJVmMOPBXT>YDF%dHfI&PFQoSY#+Q>!nI@ zIjzHtdE3wUG0)lk1pg}Z{w*FOfl@kFAm34X5r(+@+^qs+W?gv5hT2~`U!Ci56LgCe z;`!WMs!gC;;JoQP%5p9BkJR4Zf6pENfxgXg$Jnq~$l$KrW}!^)53B?TC1-uk;Uan+ z15HvL;xF-nEg!(YwK>p_tPtV$YYiNy8p`AP}l#|iu`kQ zEE`yO9WOro|9FL;P>u-T0gj^y1_J;6{{AQ0j{hoTqCNwc_Roy{Kd*)4NWcT+JkZ8r z|9wXP--Co^{?caSlA!-nTlUZEVQTPzQEK^s$AA9xpVyMq!|RZU{UMTQyp>Dgj6Aog_(O1b@G` zM+th}MkSlIR{qhr_o}s>t8-6K`%0$t#>HmejG#{8-RVk>6YEIkt`vlatisj3aCBK4wJEe5%eTUAmFSq4^-FA@|1dB4gDJH1p#PUe^xnN z_Y<10Qq|3J@Z1-gF(G{yfLMR$(nv(KQrYYcnwwYGN!|WHr`alUOT@K0q7JN+Q&$D3 zj5~OYQ{x2WANa8+^BSWFWH;a z-&uVE+u0SSb6CV$F_F&-PEF{{oPK1~sh9bL%R*XZt?%&&U2ZyVW&8W`vvBcCCl z9fiZWG_vQis8cS-xA~xA$}Lg&y14EKXgl)~ITf_;9+`5I=g$q7?0T0R3(4^=ay@<0 zRV`N$&YA^ii=<2)ZYTe;rAf)b(1c7Z<`gtJqHAy;LEyY+HJeffciTGpt*BkFyxMFk znW=YrKi29lHJF}yHn^Spy4*}l0C4ANS86kDR=O=G+k+SY)?;vtnRifoBkF`2-*-k5 zSTphocwNgVt{U)bo%YYEH#^KPStwMcvWN+?9ZlrrM%V)+*|Y@qCnwC&?JEg*-MFz} zD5NkDw`L^Bqrlb}t&3^QR(czwRoqV4rA1@vEsGmco@l88#?cv~0Qsr>{ zGenv1D`@vuvxSj+946}duh*;N13nuTwY5?e#V&p{3v^byk`+TGPtC-jZGalkiTmYp zDBCZn*6Q^sswy9G$~>EDZ>7m*Q=rP{`5rJ2Smf=JPNK_Zf!=pM+<&DHZwd@SZdr_W zqbP~d-s;Hl>$_}m#NAGHND#?hJE+O)`sTX#n<&TcvBPsW3MI1(KV8f;0<66J0KwRm ztvJ>7$k3-n{*zoeUbNo3(R^AGY!x%r;>uq0Q<_o?n`Ny*I+>l3-b-2Om!nvIiL>oK zC`8kAHA;uX|0g_nZYQfXQBJDf#HW8cUAnm?*LQ{~%mjFEFvBqzzT7f`TeY~aVNwCj zaEY|qT1nJC@lKO9&OI|Cxaxc+5344*DifMdhL3%nhXHI<%90zBpa0S&Zd1y*EU8whfOK=% zJc8RMG5_AP{Wsw8$<&|^OAltqF9zbm8Ds;c1tFjjcGOjzAE!6(PDp>2T`ae9v|Yub z1Em?bFUy$Z-^C#=X2rx7yDmq}&iVIg#u&Wgh*HW<0;uo72H?eKE$`n$n2K4leJzIy zEsWnDQfm_CYjHYxf;3$LqBw5(a8UyQsZL9s6jq+zwceK@5u$Dv!9a8 z9p(bE$$W%m$PzV96d~f#rGX|%l{d`^S=k!sp5sybxYFnog3E6juT^K;vCWH+)s!Gl zQ?}e-kw>FSx8Kozy^;|E?u!2Ip?$_xGf+oE{g#L;$rSu_YXv}$7M$cZ146ky$Yztg zB?EN|!m(+~%T+7MmD`uaZUI5Hd^UsD4Vt58Rl05Sm+_`8ER=w|Q?1cDDw~M{8?yY( zS02yyI)GweYq>*#XJcQcjlDY$xR1?@V5;@|_EYE3)2YT+-^4-L)O-%x<@~$RI(pjT zg=SkmeF8=C;;B@CX@q_;7!bpqLZiJ$?qII}y#b4IK>;ES;#e72uz=~f-dfccl9sb= zwd2;?UuIcQ=wGYxw4*cwlcajO++d$m;6Z=Rvs|fIKv~*w%vs8>q#hA=t-ezyeoIqs z6jVzRGL#@+*REQoqUdt+n}(eJ6|!Q3$Z8=INiB3$Bi~(d|A}hKH<0~WQ{Qoudq(eV z_d!$CMDG-?-}TZ%ZsSYc>c(rRXUIOc)tz^P_W(@^ zy~LxCc90Jo54!Bugy78Vc`IoUBA{--fj*zUmvRvwp2q%SL)aw)3R6h8FXfQP7x&}` zi4ynsoetCDwIXHSeS*dxhf@1ljSAk+n=NWo`f3dp^D;?{*d4O$-1zYuye3j9jz^wr z%k242qG3(45qC1y&si_B>B-Y~qiwJ*x$Ud^loq@%Pq=#G=5-2c7n34*p7k0?na)b^ zA)lX!r^(}iSfxMRSfN=Mk0X9{S~um9uw2tVk3Uq0RYgwlh>_SfZCnq# zxg{-%S(0-axMu-5U63S8fB;|6yBGEQU|f0!ZN5HBzP2?SbChn94RWOjyw1s;&52o8 zC%uB963Vk?lkk^eysj@l%0lq6*hZyukhf8Q2enG(2NLs@y2=f4HC8OIrJ^8OQ2V8uJ=7IcippKduVIVQ52)j?gY^UH z?B%6m5h=XL35um-qZ_=6!V=KF^$MY~nj}*Giup1C2C0wQJE&}E#)I+>a>9YN2Af9g zCRz)bKdAPI{RrNqWvexE(}rBn{&0LXSyWOSH0=JsE#^J#4olQ#Qa*hVXPou=fD%Zq zVheov;}_Z+f$h=$5-$K+n>>d{FDa83`))RBsmRy!{fN;q3T!01u((V{Rot8dwsm2t z%e`o)lubKK^w+j+X2}ail*4$xe{4EJ!6{9H2!gp1P#_)Caj+noFE>GVYMQ-BpW;h< z`GmtG^~z;nF8S0fP`n?FeHXPe;M^hVeCbPnnaq2^l*7%#qrgRsHjW+xDnLGYpA9Q6 zIYwoGGa2$${2KY-lKFK0+N4=fLd;0yyQ;pK<^}BniLW(6d>an7$vkU(8$=XO@j%ie zrEY^kej9LkS}PY*w(|7F%OiHbUX#zf>*0fuLb3N>{{Yngc+J%jJvj;X#>ZC^znGDD z{6+}u+#;2*D4kj&9lzl}vQaQN09#1YvyI|qQ<4r^N_0H&@fD=8{J|qOxeRdf;u=Nn zm};v}qDVv}u+IlL|DNK1mT96J`0I&6(i|+i6okC6jaD zO#)!gVU;`2&-QyHfZvMCGwAUl{Z#!ZHhKhROjY29qHz)~wc3Pa_<{*Rm7@%JT^dLE zoJ0n1y<_LC=d>phJPPJWVEdz1Q2~Y2N$rX2yo!D=xZOF7IWwjgiw&mRIpceZ!VBs8 zNCH~OCVdJi5~#;M)*A#2-KWPq{@(=O|9*1Bi1>6pQtvpgu2>&p+@n*>^1JDrQ)P)D z55L5jzd7*m{Gs~x;O>tCpCj33hp>z|JQ$_D`g}o48P>TtUVl7aR^%J+3QlZhy#-f` zW9;bh%dyrSR29TrJ*^OhWJPdT)Ot4yKL;Fup(zKV%WU^s%oPv2@{9h4U6E@0IbO*} zGt_-3Noj>6aDnmqKMdF>;28XkFBwEM14&P!wSkJ&f9k*I-yhP~`TFqqOo)QwF7IF` z?rK&nvidOSg_-4xb-pg=%;2MIifTDqpuap4U_(efQ-Xs4WZ`-$(j>agIMZjM)QoqphRd|hXJSm_pa z2MDAI8EGFPyOiGJ$D-ElKtWjM1U-u8O<9Dq+9ot&)a73wa}HsA_38pUfA=mnuf&y}r9&T`1-g6wS z*HjAW;^5@=`5@Zx`nKProk)DB`*y8shsb5@%;)!u!CWds6QjdJGOwP8NwKG?q*r2)RC9qb4-ooG>8ZcV1;hew4$>iZ(^{=)h|Cqeez@xt!e08 z(kt^Pamc2rPB_(HhsRI@&aI+yv*8R#D&1ozt0GI>2EQ4fYMB{1ZI;VHp69c%{@gRr zh}VS7RJ~(;^6?`?oj=pV8FUtpU+}W%HPl4W5_u~RAjnRs(7;B6fAJ0gCD~qUINk;5 zFjiu9NL#5>dKTNrujabO?aLnt%7fmzfsVtgsGEy%BEKiq zL%4IM1h+->iLQ*F6)y~sW`QUWLqi8Sw59jUDM~3WK0`w^b{Tx`2SCHql+Ku` zXW#Se2%BL+&_s1(P;Pd|`*g~r1d>_crGCCmdYtIoynNRK1G!(bU^{E9JiS76uMuth zMh6g6mx|K7%cU*i+MwoQrw+)gb`5e4Xq}Su;n888b|XLg)`c?eHM6pHh(byP{%+Rm zz|80Lc0f@Ge)>}E%LKZl`gVIX=iGd3g@n&CgVTMy?)ezdJ_hbHds6+h3mNe!Aol(l zi(6Q_)<%Zqp5Uf~JY6sU^@w=PF#r70{2ssGYy2Gd3yL8f08`go+n`jjO^v!Gp zuTT7$;`6~BixGVk#{}DOx0g_plS@Cs5=Jw*h~R>Xvmt(z*KZ%_)s;HN&fSM2jw$xV z#jtPQ;H3zuVUw{fepA3C{!@gN5dNqGZHnE)+8Lr{uoe*-Y2gzphC3BGaTM-?0}T_+ zU_Ui$ru|iM<1#nZsNWZrg5SrZ^BpVH2-B@hL7!`VhU9JsS8}*;4Uw#$QC`Gu&pcY$ zRaHcgPO9Fz5PRv=a%EcIvoo@;v*VF*>rYC5d1kN+=R`%}Ehw-13jSW={gx;UDpl0m zU4VE*gNFV&GN=;IZngT`lfp-kGt@*8Y@BGp%)Y09E93w(>qalkB3AIr%y-r$rA>d@ zo-wd6VMb7Sgk7Z*BZpf5z}dL;db6qj5!m^d_dGsb^CDPd-FE{=Uz{}b=Q6=Ni9T!( zMHyM}iR~Bwc{WwA)}#J}t?w~~!`4?@?S5%ws4wKKck9H`<+?#TtUT4R*?tp?1tm2@ z%W%A%s3=pAN*ky~g-o1E8qR0|Ev8riU3^D%joMCS+j5CurclJYFD>>FX&3s(TxwYP zqNb}f)c%LQO_fe6;l)J1&#&1 z1@^@HY{b1Ii)gZQdN1gbiNzI_EGYjq$WCWL#*ofwt;hP-x@%- za)t%K%s?VyUU(dh^f`cA^&JD-FT*FhwRZY_h18WP8&pxJX9{`WNC4i*Q;+R)&(hw) zocLYN-)CM$??={t6#LfxICtogV$KV0Phzu)`V|dd@!5q?QGmG;cR^S~LSmOWZMm{! z(Q`Ji7peFtMb9}}yM+0J`}ebt`U4IfB>6~O@|hg4B*F_~JFrrCNa1H4*D&f1(VoA` zDvenMN4=>RWN%H5vbix$8)bCo&PnD}SG#;s2(ZRB7T4c~6@LoBBbne7YYx$jBr?m; zx8IdZXWo-ng-kv7{U0v?Z^7=Rcd$Q}DrFi6 zphS~$wwN1!@M0b8s#sT{Ci=M7F!!ykYK5lV>mu{=tpWasXQzCOQ6aS$MVP;v&4&v2 zf^K?mL-!NdoS*nyxl7vy{ODkucKZ?g+C!X1sl?F3Tl$vh8!SwB3+s6E@vQTeCv8i6 zS{CbE)H~C^{qgL5E@J9s(FW1=WOdhf;~Q_Z6LXLhBF3}=Ro*!)1({Ac6oqg-a~Q}? z;T~0#C+r3g$Pg0NIYhSRr*O{PWfh5zXiASgB0<^Q_4`!t={|<hRDfwXU+UhLg158ac4Dj?i#N*v+5T7}(*s9O?mMy~Luxof z;q2*++?fz&B-wY;$G{m8X}imLQ<~Ol0ySn+%A+MP0fES7 z$GdmJ^ky|vTE!YQtD^T!(+SYd>9P`Vhoy|%02Sw#g_WF23bz}2_95e$v=a7FudhQy z5xOf(OTn$G>q)68D)?#&LnAIVnI=YxQkS;X9zV6WgaG3jwH}kzY|?4rQonqK?WUk! zpHBzH=Ic={^@M0xly5SY2DfW69s#(et7-t-TgbDtmoj~9R5jw?DkOUHhS(J z%?5|dOy5nq~f>+V#unv?ZV&>A4>`zPP@8Zm(+tn6i8cRuXFB|HJ7em%MA~ zmZoF}UMfVV5{Q=*;8VKQ{#vAs@b~|0=5zeS1oe>TJ`W!Kf^)Ny@seFU!;Yy3UaRNX z0lqOyRjjnLN4RR8_Uw7!w}nMiYlksF2SUNVJH(k0tSNFSY})*G4$aa*fYts*{U(k% z{cs(_=Zu!It$SkO3KOvEvV!m=wJ0UH?mg8%*HHh2s7ZJ8PRKi}cCr@jfbhZhs40Ki6bSzFn|uY4>!uFWx0VTJ!}d z(k5K2Sk;8spzi3?>asNcc+syB8SKXGvK-m?0tpRNsZf|)tLJwj>;V1r$G|V^BG%`Z z6-x)jm`syue#A6V721FfwxCD#?igCcQg(gqT%pg|*j5!yAM#vezF5E_SD<_FdGzm< z*0%7N?Y85>UW5oa+(T5h8$!#QYA^?7Q zBO*|PVu_^~2q`GtTKP-x8((1O`rFRQ;PbWPrQH`;YrsL z$~Ex)P=kE?{H4Rygg+w9Xz&x&432|CRJeu$v zNea~kLCp$Pw0L@^!U3`aw~_C0C*E-7CU`5YbNE7A;>d9U($sSDc42Wr`V{3;m*HDh zq=6O@hUw&ndZs3$X6q`ui9~D24sAHJsyr*L3L^qmW8MQg zS%PCEffCSX_C&%O!(J&-e_PnH-WF}qkv=UIrT`T(tYu(|57>xkZiGpvTF|Y2nKpVV zI{MSu0K%Vj>=>V441oLPyX727PE+98e#$@jVhJValWJ*_(DgQoU|vJQ+8cSw)nzjl z2~_@~@u&;Czdw~SRg=47uF+93ilpNITJ3p@z6#9QT0YOzXaZ_=Eb!N>O<10_xE_s7QH%LShOHp&&o&!{FK(-1uKoj+l{cjKyDf9F;4ooi&9K72k9?qk z+nx=xRd|<3Q`nwov13C?+vB($Qa$mCZma#1P0lmqxl-;Pqh z(XmIVu)J8Mx|S6UpJR865JaTkr8j{(24&~TUasm;X_v<~yGuFzEK?|tJ;yua)z!Ez z%qm}~vwW3g?l+pOM!Ld826>Bri)9?Ja_*txl6a^RnjPsCIk#VKdxLjDf}^-Df4toD z7`t=NOcTfFIR&2|^?k3iNMfJp`gWtYfp+X)k=IrcOPG)hwg~iY2&K;d@j+4F-I9@Ek7i4aLlTY^DYGTm!bKSsOf*LCZ2|fh%L$S z`}Q8$tkdO?U|HOoBq~AV;@p27u&SBo9ei!c24nvv5>6l0<`pu69!U;&^e5qTjDbr& zrhnbu56uKNTb(IRuwCvapXx@I>8@KA^Wv}_;eCo_v`(DCs2m#K6w}se;ry)awH(Q} zdLp{Qpu_Mus42yW5jDF$mMP{Q6&EL(P&pzK({K&$s0=36%A}8CbO_8owuKZh##G-V z?P}*Vm6uH9Y^>7`0TfDFdjbiHEsk+Z$yAD~eC$77WB4^kXEj!!xdtNDUC*A@_{E-U z9XKB6DX9*l_S+}l4}O*q}OKKhH=v8L=La$dJUrTJZZURGqBN}>7$L8MKsCc}2I z*g8_Fj&_~a7KPcgW*hNi+N7s-u?%`4E?y%DXj!fsf7QNj21DT23l1rrQ|iOC0@|lE z{(hz&J%`mJrF)R>D(y3dz9oVKC5mF7K21(2A~o?y-18ShVx;4d1vC>ptN>6aU4<#L zs?PJTfe@UB@qQwZLuyqNu_mZ2Ze(w%0(Ru@puhH!b1GzA!P{D11?^D%*a}(|8Qmx5 z*xNnDR9ChTxGy_(hxcm!U?SE95kQQ{^BcmHXM^lJAUP`G$=$TgHg<(6nTXXk9z9PJ zub@A}x8A_cz0vb!(@gWE!!-6B({A}C3*X;BynAGqMJC*P)7& z@Istu=cw0bR@`{Wp+ZmnU=t~Vp;{P5V{ci}FUADA5OOCZQT;^NiMpsbnf+ehb>)~Q?J*=PON+d-!`VM{5CgZHpj%cetl!NHSEiu zCs~5UN5Q*471Z>wD1>tvKDx-wuo`MxE%MhlDxG8##}AdT6RYPu(~V|kexr)c(a;_3#KBB6;V(%;QdmA5pt|SzA)asA|%gXc1o2rbY?^+0^R9lNLjxEgJ>;U zA9qc-K7XidrzVWOH8}5ds>SW&7rVMWj~p{`yR=X2p&L^ zuBraF^Co)Ptg!#US7d2p*luq=RP_B#UkRHp%`b=) z<7XOx-H`6e7UwWK6In!oVE~lSsg`Si(pvq)c_fN?lM@fqwT{pC*D2GiOCXiK!{+s0 z?1ApW#3BasG)b8j9G;^@q%Ns)1qqu**?`%h#{ZkXat!We48n%waZAMw1>T97@ZdDc zqDowuLM6Fh%?TG{3N1nVvxC>J=jd0k6GK__W%%K5G<}JV9?bEZ#`6n1?Hd(Vy3e_! zSYZp?wMBWay@o`jDIUCQjZW3Fv@qi=AK!JCNYXJA`=+6$M5Tqx+?MFScI6(Cc5aBL zpBb@m^%R_lQzA|OybNiLk&JtgWR~z|upHnOvFziNs5o`2H5vC7uWrU=dcbx{DEJvM zur+=oh#4=mhtl5Eka`|Rd<~q9akJQ?YF84eugZ(xb*;wDcg7#Q_D%*F?>D`mI;l z7;ZU*TY)YUL$XXJna~Lf4B!2o!qaN+>E**EaLRfuwKpW~%-rZgMZ43!CkN+EDw%E* z6ma+hw3fOf#ls|4A|dVK@F%oXqY*f~w;@eL)L)zW*&p#uiZtx-rd?w?KMwhmGZ7Cm z4=FD0BPC%6r6_j=qiMS7u}uxbrZMy=LAy|1y;untR1?V)1LV3bEWiN@!IzlWU^D_2 zemthn>>rqr;QY)}ju6xkTpkS(;7BVfOAL7jx~~w|(e?OAdy%!8vOI?G>!e1Wue*0E z4(9$1g!%M;17Rxu0ff;f{tpnQ!0B$n?N!%=3FKA-Rt(Y`e|-iACCjRwq+Mr zT^Fa6nH$Hq*ku8oHDPp-13hD`d?lXvgKJ^CR3E-Mdr-Nr`@XYsdS2nDZNY_Hpw9Aq zW(|7zad?&F+ZGQJ0E_fEKZz1a^#1_n$_*HigfK(rV?RFzu|DI1%mE;)qjia+j(p&d z$q7_+CS=*4YmDo-hj4y9JsPdn@6Gy1(XVgg=3&M|@(U$mI{CAeuk{8`Ih-_^9xV8( z(b&7%g~8o?0?96`WHfg90X<5$G7?^YaccHW7w@@3=9O{tP6-)ijfuH9&2u0nIor}m zQKtJNm1b%^i~*$E7n@x=PMU@HF)xTx3c-1Wzq%dA<;#MRXMb-Ntd4P5Y7FVh--d(J zNjy=zt$XSc*UJ~!DUtPKk;UO&1eeF|i(s+%<-Nf%l{mOIU@SEsxpX~`Oms}|Y`?kE zEsq-pkfLy;QRxJ9 z*Ue-t+2O5R?tg%tO+Q(xilV(Mn4n#3cS_--QN*2i7cLv_dIdUGC!|v<9Ty8K$+ZWa zpUeTyJNhz)zB^rRJhMNmiDtQ$XMmyMK5y#r=#w;fX7nvDZ{f%a!BFuUTo&MBR7MCX^_v7zRiGQJ+ z7?htgCKoNWaPZPR=~R2)2Tt8gb*CK6u~{j-zpwD3Z~?+jV`2~>!f$AFJrh7)*un}# zQlAkKLaENFNwMAprp>%u2Ld`srlP4JI8YCX;X-dLArFbee;$Bs1k+AdtsU=^S9R|| z>zj4tX&Ex6ooO;Gl>3>}k6SwO0@vZlTxSB){Y?V)iGt?q;>^b&#?LEv6*9j5i(Rq) zyf7`7aH~0y#Erpmjayq^A~vLp)8h5&2y)`+E2Q<%)T`}hz*>y`eXu@uSow4G^J$*M z5WtPnr(zKF8mcDF4NIWkV}Klh4L5#tfJFt6Sr7g~`e1oN0c?M3OuyN5zRk@%j~mKy zaut4)H92x7W0Vwf;6x@~rOd!M5%%aFRCNbw0PVYqDJWv-FFl?@h$ax7)|G zt06`05z>u^pv z@A$5~)^;FZzxt4pb+a%tg=^B0U|!j6ve>?;F><7NI5qxEvc<4OWQ?XOuN*yO8&Y%6M|%HTn}@iAzpZ5i z5{sa{a<3KIVRY5VNEe%C7_4gyUTisG>_yQen z;S;Mb{a!wFQb&6&Gcm;&jcwL90ey;koD41MS#b9UfcS2_n%U zCXXE`e1}x&C)=c&P2b+FFKUqM%;f8J=Vag98#iA$l!U#NwLe-$XYiXDN{&y9qw#W~ zjg;?8k(+D^zn8dcN#idT*syQ*qSmYyTcg0Fblo{^V~lroBZkq-|$2+yJ1T z`SF>av2BO6QmE&{m+;uOGas4T6VP|eI871X_g6P|s7ELguT$mq~xv_usUzrTcp6rRP5VTnE3aipq8C(mA4n@%jVA zr14YOkp8rr`d8B4lbZGkzx)3omc?64(NwxF2Uk6hl)r8AJq>OiO?nZ|2Xa#V!eZ2| z(qgyEXYyK8+l*OKRtK(aVBD@ciA=OHwiU*7%9Wybwnue58@kA)+#ShmhGQQGv8)t% z)JSk=gs~TBv(T1!`FY`0vGu{AsBG(}<|Z%?A1s$uM!N<4sszJ>HVO{qh{D%pWQ2Kh zPPF9vJ06bk&WBaDGgN(QR{Pk&QAe8A)r3?@cyp6AjJ+4zJ>K%@D~do7>TDHQ6WKb! z@a~$lQ`5`c+0tJdsuEys{Ej}*rpUt+$>W z!5HoFL+u`~?DKV&HC-)B(Cs>wi|XN?Az$OVIo=+}FH2!g7BP=7dD)$2+B7}>_H+Q8;0`8c5^ zRK8hMKz%gdkm2_LFQht&jLvGsKpqmJsLO0-7teGP#^nQg!Uj8uIk#g z_pVzm!QRM5pKZA*3AC&sRN6_@ejZpH>Q_64pY)K=NdU38DKdRGi`X?UBzkFGrWa4kkyg-r+#^(FI6jI-0(pFD*(g?RLYk@;wL)u$%uN*c?-u>4ZRO zhdPK!ea<8!<`?x>>t9L4`B-jMv?`*Scsv;{dLo$2nnaiXX#H@}CfvN=$ z8Y;Z2er^BuHivVms6(kwNDCA$BeJ$j04WgjLaIWHShZeKBgCKPVT9Rzy;(nA1{*Pc zbqa15@yggfYN4+JgW|UxC#Xze;0oXHV*nP=7@BDtnPL(Jw|Vl)-D1|TJ;8lx-xj>M z;$4aGr3&WUTKO055o&X01-Rx6AYQSP<5A!{D;g#*4Pg!FsaKz}q=y_>R7 zk!G$6BifA!(WN088-edWSax~7q9>*;$r-!}?(-(uuA@JP52-qYcj`KIDIzT~7nqoC z+u#wlv~m#CxoPj`n=j()iBwdIk6Fbb9Mj+Zw|C(o3zj%}tt@<-{8@ zM3Oarv7QjeZJcAEg);ZEd6~_$rs+h&;NdRPR1&3py^o`*mi(^436-6k!0l0acQ|Jj z%=}u*<)Iaxj7tjQzrHfA`$LoooPGI;_(>B(lbHd(;uV0 zGfKkvaKj8EW#>OnuEsGAPfw)5Vhsjo=} zVoiOXIh#lP3yTrHv0}IWGtkH19Xz9VNXT*zUb091unH9Swv@E6}VFw|3oM@RS5N>~?6Kr9-ML`vFTFqm+sfTCx2bv)sBTn_~ zvsD@jal5BuF>&ICLDxH^`3I3wb$hMY3h6D?s$|<}7OZa`M12h2Vjp z_Ayd{D8=0lQM+9dGClS-TTrqg1B7`u%%8m&MVy z{deC|ww-G(2!vd;M~ryjiqR@;; zmC#kJHdD||RkXiYo7$HnT##!99HcP4CPc8Pzx&j#4s;B-lx7zDXre?&@@Q5CgM~gJ zb^34cbUncw;=RnHFm>LUGwiF4X^iP$ViA%;kXc^(V30K6ZpmeoI?4|6+AA&Iv!vT- z1g59e-PLYk=!g66`aU^R<}NM6+M?`Nn9%-~`6#3^OP=#ZtQ`Yk zU}=kN^e*B=7|Oyb?0%j41ljlcY)N^ScmL!n7m6{`66QeYR_RXP@9!W6wx@LlpE&%U ziF*t41U0ok^l$#0PlR{ZWLqoXXg37B7jzx_WjX4_-k)L~hIv%CnWx|lyE5tL0CQS9 zcXJ&Qwu*iG#+H(T*@1w)zxg;M2=DltAz9MTqrvZfh`I@7!*? zO_BNc`Ps31wWjKv>O1EqF0LbGQ3-WTi+ z6|UoPwAAX>dU*3&wf)Scbv#Lnd_Xp!pyhCYRc(!`4myFQO0c;iAB^b_(jUo@Z?T3tBn zt)ZPb4uMX!3@Z60)j`A6q^DQb{D68CGmt?nqwXZPjE=j2PfgI`$4KOa-`GXUX43=s;#|D6Y?qU zsP-Xot}u>?G-GFDF*=f$rue-wXG{DzE$sFDsdttxq5bMyE(+LF3s-ozzG{d0(7$?w zc;*)&&Y*94#aa&1d>Lo&ZxLS{xJS&X-tp;3lJU2Y$c?!6yJ&0*f;c`~O`a;Ev%)Qf zrTGpqTZPqdNqom$;;|S`xmp_c%bMdp^3o=vWWri`*G+*(ovO70%&b#>)+uo$<@wi; zU_F4AN=qJygzY-kLags5DMDho2FFAbl@yF*97@@b64XsuD|s%(*TT(LyyFk4%2Ioo z!o`SmLl3U2LlwmThleUUMjk)$#BG_BSqPBuWw2mh@!H`U+;~sRn2M z4OTbq5-12}`ppsHYu}ChLOjCR|5&B}!>Dy(`NJhXYBc*HOKy#1yyti`u{r)Kh~e$Y z+Yq=Qsu~4TVFI!C$ev&XiPyxCH^bEb+`xaB!rc+?0UIftZQU9wU|fcf$iK4{JxZR; zh{L1EL|27&))QhAA@#q9;ru|+EfR!n)Cld+%B3G0b+rCX!T%52S;plLb^ibF zKLA)-lrilS5eB|QL@#sHssijL>{Y_hAM^h3=lC0JOCxdVadi-6o93z!1Jv}IpGs_I zOY#-@ph;;q*XK<}(_{e;d1Gbj6h)hAyX18r3d>3S4C3DKmCaTymXxWNmYhLHiTw}+ zgZ_R5+1G+)I=J-uGRpzil@$7v^J#pxId`jziI-IXSWz<-qn$3H54&@GHQ>3DG+Nv!WxU)B;jNN~zua#~1cc#>8))SV&ZqqSLA@hY zKqgUzHrKb#`=s+3rNRD49K5ZwtdBe|4w`-sKvqO>Hy_-)XulVyj4d-u1jaP!txq9$pGL9&mEH4kEbx3 z{s0}t7zGGfJtzIV#7Wi=XC$%Q`qI$DtAzDyef9YyvC{~60=Q~9dQDGX0leBxb=Q+> zxn^Y)Jy}=q)zNbGY{hcP;#1S%{Ifhjm6`fW&+8LhFOH1$aMg)b-IJEKx>%V;X@S%- z8`CgKjUjFTfvEV06uP2N!*_uGj-D_$-k79M#Y0d%zGArH0P#l$vU3+~Da9eZ2! zN1Ih$0O^Bn^hakU3QTxXy#LMB>W#rTF z6DU~`1`Y84Nyc9D#D0X7w8$PRc!YzFn824 zxg9k`9v`C6C-uekF$5ZH3n8fjoK#&S+=f1z25uKSDk0}YUfUgUS)VQdG>!`30?97I zCkI!bD-ck-0I+lK4x3jmVHyC|y=o|4XYhnfD=%$?lVqyJ3T2~$Cu#DheTy-s(lYg` zg1zaoqv^geWcH>lX6=eZgsbN;e)7OlXZsYEdrb<5@!Rd}w{8vu9*0gSx0jrEiMOR9pHIO07;| z2(B{sir=$Y<8${51H6V+r+PI-z;kp1P{*@$9R z1+H1Cs_sKh=i^EpNkB?%y0e^f*H^#_cBnkLQQSzNpds|sjK7+1loL>b7Wz3X)dRJ> znd^$q@!u=FI{IF6$qZGxjaGoCOLc;dMg#RKV4OM4Gu)LZY@Nza1NjZahCBPKMH-x| z!t|3(gbKtBD7FhaTz-X_XiJ%Mm8%tqkLem zzTVSdEI%FZsxojKITbKJLFt)1u=MYf52rzM5ICB@FABIT^bR=Vss|g2TZgtNyKVH?kj=m~n8XS02Dd(dPKeBJwn!4}2nhxeByNRWA>x z6!0jPvnkr z*(u*#8e=lxpp?WvLmfhfNXS#Y7%*rov)ttlH;eu>Baj~rw!e6#(Ijtux%O>5+=a;? zFzWGVmF4$?w6|5Q;*6N1wb~2YRchs-cL(P>Hnnu+c4F=H4a9W44maDFM)*b}dmgDH z8@rt}bkApwT0q}KL4#+NS6}0-=4*6=z0?$f0*UD%qdm8q?VLK9FPBi?WA(j+iO&%q ziKNcbXqGAoPT{8huGy05kw}3DZu728vcWk%%fulJ8Eku!!D&tNFkN$}_j}x?ttXzY zby88VD7f?VuH4Tu7_}{zdwctD-9Q;~SJ?DSUGp62rXzME1eHYdH4DaXk*}F6l9kgW zF=3$-K%I&jkKP(NS_TCbK0$9nqs-iS$OZ1!_|xNRSni}IvHz_it_88_h^R765{9OB zJm=W=YQ*-dulT{dcAM2X!`^Gbal(K&HbqxkqwQzZjAHqFaXpG19ElU7#ccp;q92Mg`)2fx{YxrMv*6;Ao%99WujRe{Hhd^7#0Y_) z#t^Z3@P!rrc*jlXo$sLC_~w_JofiD8tKiK%`9KYIUi!B$4NkVIH7!g$7e*Z*^TamKla&v-p+5L8$cMfhSnbD`nfw-la zg03$(E_R#^Tr5!SWY4+4O`vXc8>-E3il_8yp)2!wdYAJeI<$zTq^p(HE|doC+IGDc ze)PzhIoSO9c4R&_9|fQr{A3ZqO;xt?Zu};3G36Y%-t%8&eW>D1fwqhVeub6c>uZ9y z*#ZqI^C$czu<&R5RH))P{zn=@Y<=WAdbK51%VnkWLI-DGTcD5E>%}D0GUG?t8gpaz zjVN!?mpUw~iLNh(7kXiWk1>jUi&%F2Vl8u&n@sS!Y(w?S0D7$=!O61slpDYGP9x7g zR3~AJhj_DM9_2!q;A0GSIqZlc2(HSP!2JDJRL>nCl-mb^Vu^3Z^Socbill~4H4_yi`phI5h*(JkglGAk5eYnK7@$AQg18bji?e)1{ z7!;0ao?Q8)dlCVU2UFX8G>uQIgZd~f{+d=vi)5kstbt9OZ!ZdE$s+Ya<(4@lTlnX8 zSCf4PKGw_d?t?~5vT!G_bXG|UZj7SPGUp5bC)pXte41BgTiv8E{7snj2Q+)W)`Y$7B%pB1+K8c%0#qUNYvQtV+P-C}= ze4Y1h&bjArB=M1>#cq;QFI^#>&Qo}nLiNe?WC2B+$|hY$`J($X^-YsB=N7*75Xh4y9c(zv?elK7y|~=-@|PYLV&NJLtl6jiY?KIM1-N3s?JwsnK^a=q~&$raL1WaGh2;vkcOW( z9a;*@<7_>ha=ZcO1&1`ndaGIxwn)ZZ>(tmnc)so0rvP4HMPmoHgc_lHGYH9)Brp|jtcEzJwn-A=#z zfm->I>$ba49SkD}yC>xP3rcZ_Y~Oh-+0pP0H#TtN5JzRB+e)7^xm%dPrvVxF>#3qQ|8fps!1+Fr zV&KFo-tC0LwX_fH65hhJUiI4U5)3E<;z&U~ySE*ao8!NR%szQIG{?rm?%*SJ+JSS@ z{qo*)8*>`gGFxFLEe@9r_D4is>l+Te{RGC+=I>7UF6W&)r?FwdtWx6{TsZpl*i{w; zKV`5t!hY&hJ3OfZF=c!w|D=uHncvma+3P(JxdJ_0Xf^n?cFfDIQn%KU{08^ctI0#^ zS2sf5zu)c68DT{BzN@ZB!Szo2^dev#j#IW57Jlw!uuH?PCmj1&wB&NuZIt@dCWJL{ zTVIyb6fBg{K%QcVr&B1G>{J&zL!s}yY^uk2wA48KSYIFG1D1>$9Bb;BRK^`u&hJge z5HwNI(O_yJKgTQ3Au=UNv3!+(gs0C#*W*CyTK#0*lV7+;hM%>+qTq-)!ySCUNc_R4 z-oRX;hus-bKL?8zEAr>dNkf$IOm|J$p9juLL{TgrBy3`0iBgx-8`Q&Y23>lMV-ZHF z%a0A;Iqms#0 zD&61^)H40`gxowd85A1j~9C{3cR=--EnjwV%72Br|T^{TpSB<~# z^3d*)aD9~B#V{L4>jTB>z`3&_YkwQ$#c#kRh|h;;3)X|iwT3k-T$0pJ_2>NVF?hqw zq=tOR2)6CQ^lq&e-Df0bhHHjpX|uB^m}fudX&L4RZ*6*_#*5_gNk8}KA_iBdaBPw? zDY5Nf=w2Ny9-HgX;H1iH|0LKa%PdX|tmy9>V3GTguq(E4`0&<1sUhKK^_%-j_brQ% z-yT^4B+eb6aUH69I2Jch@W*4E3wHsPI)A#V&Uk7$P2SoQ&4>_`1?qxAItrW#^q0dp zQ-`-Qr>5(R>aBIVEI)v#(Bk)8u=NKQx+|42VfYJwMbYH)ZA} zLt$S9PXd#rf>}|Rk7Kx4hPnH|)#UMK9v35thj;v9IKJ!L;kRjlW2G-J>$X`HdB&ga zF`qEEw~giup(8Ro2e{jj1|lNEgs#XV9M)8tIa{99g$5~rK4^323ICOZY<=rzHS*7l ztRQV)ec1fIj33O($UU$&VcK5lt4gY0mR}yVFD=&4a6zi(IT*jC-}L07K7l6ogw~*P^k))muKPsJ+N5{0(Ir1k z%F&Dk1n#Y~a>f7BM=Vt&mu3Wa;ta%A?gaU0)YG83G0|(^r}cbC`*s`+*%)^35IZ>* z#=i#N&Hex+jZ&y&N!$Up{>3?HY4?p1TPqGY?rYCc9GDd5UCw|{N-~|bI6dHi7#t?ZL zq=$r#-@wj^_kS-_ZB0N|cidnQy51n(&~53RXAOT1H&#tP>yxH~0WpWu9nYkV(`Dhz z5jQTx9b0A&=b++%@xRN>1ss8IDi3sTmvy%nF83xhE(Mzt>+Ll$ z-+ym{<=9xDKQ@>D3PA+-?Vew3! zeSno5{{=~t>MYP9RVEl#1Ih>3ESVSYs+;<9{7)H z+&?8H857ufd}C^uohI=CN2x)D%qD&o{dHK?#R0JjlK!bc@|z0`ks=Jc!^fIfm}S>3 zmPxUq<*tD?zwfm1!c1P8;n0zSA5$|E-*(lOGtbql-BWB9ftWgJ?e{JOla@U;`R>w2 zKw2sNDFj{Wh)SFBPa?voS=6fCn>_6}E43eAqIFm&f476Py4jTiGuga)`}IlBW|U^v zNA!(|M7g@8K=3d~FY zBS%0Fwtad;0U+6k$ldzch*)0T$XbKGOoLAHl!8(QDMN_Kf_afa!XNaYjMv!-|J` zeD`qy>uzq!2rUjh#y7RJM#|L+zDmm<_m>y^es(BvEh?jmt*yYphv^yOFlNvWb>$cN z@vC(R?3!JV?>Qp1ph2^~d`*^5!}{4WXhMFoQc$|3K86h?<$1EO1iKB>#ESRJq8bkv zq|mF0jK?!pJq4;55bos?^|0P z`alL4aKb#MMym!JkX<}F+(D7&3<16xd+mzpay_-}Fm78A?dN12D)w?%MwuWEpft@x z5|ZQ>X_*_ajMR%^QFwT@RAvEm66f>bnwiT+Fj_PRDbcO77mAx-)GKif1I1O!dx#ap zW{Cb?_<*T-;&(=qBjvBIhr31O`l2={bZ*5(e{)^jejJ@hNdyggF*F6Tg1YA?pC9o% z|1HRy5Fr}tZYHRQu#oF3EN2uql;f+?nEL4#AWeu1g~-}GhBDg#P#*piWhQ*o7cD#v zD`jlPA*PlWX+9d&qz3ARk%LO2ilbL^|ZFe@^Z(NfZ}tB zk;_b;X-x(U1g<-Lw#^ zxj@ToD$yJzE{EM9y*Y%;*gW-8$Ew0wAWard;Z}Vt#pdUJv%yt~52!6j5bCt4%SL2s zsO0&9Xht)*l!+z2yt5q>U)Fr~n|r_4?kW)VP9MI}=bF?{fqnPW7nbaW3=NXZVLSOp z^U#G3%Q(7Ro6>yAC@b5lTa?aO_Qnz62<;}@De-0sr}G;AW<5j= z-6Kcs4=maZ>Wm}`k1QlVhbs1{mu$n(NJ|F0*u)&)^DS#srCX!uN%oxgNKa+^zwDY3 z0Bg#SPB=(RBH+wxNvRoXn`c}-{A$5hFGjZ}d)?--pSF9DoSsdKbue+gN^xx-ca+_@ zcqLVfYuIu_T%0SJUPa)kA~UhETZ}K0`_Ah_Mj-?4(sPGpG=n~!nP5#kj?2GPlF48G z*}uztHQ_^G&Ikl46lb3r=ZezZRf1vR9U_e)V9`0W^z%-(@wCIyh+Rqr_~Pxg z*`nM0OWL0EirxGHWV$+l;6bo@YavoipnBwIwj3%sfc6%l9C2U(19q6w%)op*!g#kY z%)o&bz72i9f_`d}X&Cb>NxDhLoT`1F6Bm!+smWcLqvyYB!u7<6LHS-U0 zfMUWH_xh-8&Vcmu+Uw>jli;?!d=#HOLbZuw`R4<<$L+BKlSh6mCI5Bd*^X=-V)>6o zyDV|5+zSYx1WBL3AkWB=B9uVJlI5ny*l{KynJWYSCKb-EjOyf{j-8zI=hOICM>%;* zrx#O;0rP||IR+fg@W}=gGA)0XY_YI-`SLpye`d4Q3|4%uiDjz|KFFV~06707oT}OW zy21i{8*bVF|M|`(yUUrq0b`I4tH9^rkY{gx`(V{nrL|`#rZ(_rWkFoc$h47D!lbn3*WquXf+aXvMHuI-mBg_K6*2Hvn3jCPAZ@*|&=NL?X>fE+1jQg>OZtO7mkYZ&yhelk$A6Yk5 zPV&C=dVF20tSWn|vQ6GlCYTy)Zx4^I)uCgT-x}rhvIu-rOkFdsE&p5Q3~puRUW-)E z({n~SmWcE%T8rVV(kH*gOJvk#Sd^aqYGNk5n9(H>zC{E6Miu2yuU6N5*4^3GY$BH# z>anNOD)U){9_lZC76njNW%8~zpMxD#=Lc^4C*esZj%8N5tAWVZDjtvbQc6+E4N>dw zS=F{b+8^uWXy04+=Wta)l$8whTjk*>?nw^g(^#cbUP6E9WaDZ5` zLJ0ukAosaGoU8S9Vm7r7s5ub#tzD&3#H`q0NMgltCVv0OE=O~;Q^(OGuBr2x+#jpr zE4gwNz}FkY7)Q!RFBk1!3%)p<8g{M?)>zU+(#{3Y9vXG#>>3R+>*4!>_qazL-(Swb zLvpYOqNvp=(*EZ}CZvDUaB0txE@`MTEnW*ZU`++9Wpr`5% zOVmCh{6{KmIguDMOxh7S6`~(}tr&l|%08(+6`}D(qa%C>Hwf;Mc!i4*F|uj^qWb_z zJ=dDe>kL;3?mEDw+h|X6o>wUXKnZjY7RB8`Or(q)sB0Y+)0CxZ?$cz*+(7N)H$0!l z#Oqkr3eR??_FfV*09S-OAX5Ora@7|c9$7HXzlX>S$2J{75r~dFN7lmQ(Bpmc^)YD0 z>ZHl=j;#!`4(c>&-a`$fZm`4)+fnzMh?AfECJ6>+06vF zAW9g?xm2Su*PwgLLRj_^em1hdQ3in(6!Bas20Fn}mViOYjT5AJFDl)2X8o=7n}L_6 zp(a7_$VgN{FdKM4s|siApo~{KGerT%nPE%zue^dP9`28zdr;JOhImm;Z0&aQ)*RXK zES#Qk>9{vlE@4PF4p-0wI8%xm%kfU2`%JT8Mb8QPKO3k~02abfJq@svS(E#!J5ztr zkJPF(TFH>YS`6Us4oAqheU#B}*16If8v!}mwt|63Dbmlvi0aHk`0N2sQi9*Ge6=zlZss>&bZk0dqW3gW&&KtN;95hTQEBn&iL#z<-axe~-Zb c=Ogfpv3l)=B2w154g-8-B^4#A#Epag2R)1AMgRZ+ diff --git a/docs/reference/transform/images/transform-rule.png b/docs/reference/transform/images/transform-rule.png deleted file mode 100644 index c43dd6c1be9297ca4ad2e98b7b71370aa34f8424..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 83160 zcmeFZcT|&0`!1{~qGFe#RFw@%7wH{AK&6WGDj>Z{M_NQTqEu0uAWftNLZpNaQKU!> zEd)parH2-QP(#QW_CCLLUc|q@^_{ccKUgb^$uoIo=C0R$-Sb*oQ}y&orjv&b9XfsQ z?j4;&hmN8T9Xir}{22I;9}MYx=+I$j2PGx#drC@Iv_0Hx9h_|r9l8VNkPf zopZ)z#jDMV{MCu1CAY4CTF<9P2J4{bIK$4~Fi_`s7s*?dYG$*^s`SK?P^ zbuuzt?h?D!=hoeuyoBKof%jz1ajC)OQNxR)M&pF7kW1XNdQo*vw?r+Ip1~9}(~}fe z=%2Rz(4Nehv`S3poTlgx6AUH6K5_k)aM zFU*w}zWv>RrGBWH`G}R?KIugwOUA=Px3?g{$N;SZBvpHqX0B&npBU2@o8y=2mK@SJ zA2pm1A%r^SCH!Uay{ILmJjFWgNRjxXLQQXDSIA-KT#J#ieEw3f{Sr2Y_iQyZ4)K7` z#}6G2b2xMqd^!yNn84qmLsS`5|MQ=t=!_%(^Z7{o-k|S~?aD)k6b{|Hb6eN%@WKdn zoGF@2^UFf!>xC=lu2$C2`}$(2n!;;pOk$~&PtsA8Xq!$d)6(&tY2rR}_5Sh0R3Yb3 zl#vs{M(>9fxJpw-hETG0!dirY4NrF2+%eCw8W%2GPCBZi$Io3+cy{RUe|(tZ?=Bxz zm3p)9pZ|U>#j%!M8kW(F^G;g|{HZB(n|)J^8sc{|hrP0VE&oHd{}E$_`HM+GQZP{;uHoBD_ZD#UH0 z&jY1)F_p_N3#_{&s~MZ@T|1YRfB|9_y z$fc6R=K;S9=JB#yA#~A;K%j)|e;FtrK|ooQke3;@3~tZNjqtkGE~A7<&ZYXYpIQ^6 zbe%jr82|eHyvtleULJM42c3I`plis!>FOV2coB+NvNN1N@w=(k;L#SgaKe|=lA9jH=82Lu*qIvU{TQ$@#OxJClW61tXOU5S^l!%~&I0q_&*4x=IQ==O zo-@Yr((f|xOu_oZaWs>fe%(P?qvyRz&be!Ut#^5%r3JFPs#v?Wjum?J<;x|BCy$7W zSi$CKR%Xu*Aywki2(LdeNm$?fY4+fg7i&P&?OZ-edtRoB;=O6v@OR~Mj)@1#zmwM+ z-^;l^Vh2x7f&FgP*68vEhvdUfZAmTF5!l`5VHGU~N-z80aab)P1|pTt$Xyv|3#O;y zH+nSq*|K`ze9(?g@!WVyPe zNVv+yb)-fqnS55}l9tSt?#^mRfYovnL(25Fu?aP|HrA&mHC!xU^yzeTqIqD^tQ_+J zdE&pFVa3IBH)L3#X30E3fq%?hp0eY)P(4rhNJzAp7T@B3@dYIx!>7a#J)k4%@*&|T z(7S&4spW0R*o~UW&u+-5RG(q~J*!`Wfhit5{seCA^O?h|g0+`0rldswjW0|Ea0Y2Y znNnf>Fptngq_F04tw_&T4e>FlrU{ncbqZKRsiVd_;^e-_k4H_m3I} z=B;mDq!bCCQX~6Ko)2phQe{8Ch{d?NhMi%x+8FVP>J%H1D91V0%wix4twqRHf^Umu z7@MgJ1!y2Up^g%aaB;|X!TK!M$Obd-$%5-xA4-NfMs6FvxloF-?$3MB11#L29YNMQ zm2ZJpuQ`UDow9|Zt=gtJyUTOz)TMLZ!YFT2VnsUBBnh>vjm^w1msTIM@1`fLOeK=X z(HQd>;2&p-gr5BPvcRU=!s6+L3Vo%ng(ry?-n(X7%8(m~LQ_=0pb^gCDQc7Cy#t?87CN!c^3+Zqgq#!S2*{?TV;9w9%H z+KS!%s}d(h(YN<+E|XG*Ph0;b1BXT_LVb)JjY{~=Iy#9I`HDG+|21)Da)>RD9G=O=jnAen9F% ziaCB{)wjkb$osC>`~YIuJI5#6lx%7sE59poO;d8~@3$2aUbEeK_yl7B% zmTs|M_`7jOxsWtNxTQk24Pn1uKm3-n6V=#;WCQd()9WAx9*|B}hrb3~kt5mD(+%6D zLwRNH6V3Dkxn(z97b=0bLwlPiJ9w3LR=i#ntb7t4VqBj_H%N9x1x*x|qS~Wwwz~&s1&HlqN86A94(TyR_C;u*-sVt$4^9V!S4Oue97e zVB~c1PnDFW6t_CFAUlpi4Gnqm2A4QQTfz_Xfnx=~*>(}_4COG#n8)yk1A4uA(5_Tz zjQ3qwP>PGb|0iQ9KZSM2$3gA#OB6fUc}MS5(p&+&i7Ul5o)$hQRn%Y4>{=^Hi&-xc z-c++}y=<3e=HX?QUtQp;1rJ}UZFh~Wx5FfHQYX={2Kt%C1rb~w?=wwRpa-xmKorD9 zb1|e3FV{Wmc+)g#%IP-njT0sNJ;jyl-5)I|J3Fg__kFDWSzTvRefX-7{n3a4Q;syh zomickEMNF2=LQGb9yPTtrL#I5KI+#8b0v41>3(JQ(jC9<-+Tf1IR#_{3|- zhS7kws+VRmMsoV^xA5>$k9n8kIVi6|fpBNC5vGwQ(R3$l(7%A+D(AgzTWX?WC2vu! zlt9f+o8y|U+1BPnjCH&om)T5GR!oOV5o;`#Jhre~J`y|w4P!{?t5_NhF7Tf<@YL_| zMAc+G2`movL`e#-4|~Qm{Cxtsv!dpcZqO)-F}GOwg{#<*+nLQW3@mO6UlPPLS8rFD zV(dHojiCY7$$2UJV-vcI_C1-#l*32D5!@9pVDUINMAxD(YDtKlk$OUz9W37bUPZIJ z^{Cb4K1bhD?L@hvU!43+YL~Neeinr_ODf8$mDw0BB)go9?cxAGlofMDeG_T~B_c1E#;v{WO%y(Z{70{QYqF)v|`?{90D;r`k?h_wcHbh)PSWf6Pr z)Ul5>GfUDXm$eLFKDLPhX5;-2HQQ`S3k-}qKgS(OE4-A=7WqSzJ6cKol!z4o)|V|Wx1<-A3y!o)yCK<*kRWXAwG^LPVngZjccqOMRtOo% z=PXq`T)Y)u?t*Rp;Mw)w0=6doSIVgj+*D>5JHOFXJ!!#+SG&Of*I^Q+e)v^Al(bY& z8if|vn5F5+C$3pV=^Ckdx&3t#y+8d6;XYY580+Xjr)OpZl1T4G*SA3qC4Y0AkuU6W zA`@Pmz5nusL)@^bXQ$rGRss4_vEJGbor{v69$1;xbiE~|_>Mk>rs){6$p;61ihXug z=9l&-0}ZwK6BoJShDFg>i@@dVw==F)P1Bqk*{t{!zxl#a{T_@}-|;&3;7uXSk`%AQ zkFIT^=aGUB0$~**8~EpwANL!S9|j@S@A7Rs;+|)Uq*&*>{eAkox!Q+O8}x#S^~2=B zN3$3uwbAJa!8(=*zH+Zt^26a}Z&vRa6sz1$*e0_oY1wWbKaOluJ= zNmfFXyF@@}VRLmku@9s4UY^RAJt?4J{$zKHu{jljtSJ_d)>z9il3^|l+#vhf!0YA6 zrt1+2RSgNV=C)Mm^ZP=_kYguwo%0fa@{*FkVcu{u17VYkG% zcg24m#w_a}xBCS0LLh+^S7g@beS2l_aUbk{@HbfI#^|M%#N@b4Q%tgiyHynwn&)3> zpzG^+YF}C*5~9$@DHYY71PrNcQQgwZx1tuoTYBzO^oa>L3+I+^F)~co({=n+IvLX- z)q@TX>tG-U^-inVvP~JNLKdaU2I!xq2vpC;tex!d6>r}&sD>V`oF2#+xplw45_4{_ zO8ayK3B&q%(;QY_7May(uEF|6-{Ex)jB_qtSHEOor-6)Q(wo_J7#d0*$!+Sa+9zS% z{?x`^dF9i|1(?fUa2qkCiEgDck;;r_pGIW^XHtFRWy(nlrQ&{*CarD3BSJGDJmX{@ zzl#!&WG+JHOzys1m>q*C$Pl04AMqhva#PD=VCA@L2}ZVfTwZM;nG!V7xnuQiN8lzU zaCwwO;q`|JYszyb7(2FpBpGiv2a}KW5B)7zKWhTsZ)C`LBp2Pewg3I3sE5lZZGC6E zulkS{1-!+6R)YK_Qwf>BnzYj;Z|3e@OYW-KLK`E2a{0hq`l>Sx;Jss^0UIM5);pWk zs)Fc^?I2RH(MtEznfKCZ!E+Yw!}-@+N`M(6COe^T*=ys6BnsFJil#&kE9b8O94T=a zeXZPg30E~4RKGTW-GeUPl7KExy#+yL*4vTxpIK-2M^@T;8OkxbYNv;MQefjRnca$p za~#{#9!$|fm(So9g>^cf7U0lwSD6MnX< zr!L^sUPGx zwlv^|1A3a0Ymqo#g3j1pdae?A={&_3YcX+_(Od`PU^8UUx@2=$=fpk@KbLn++jr%` z)R*a6za>e#W?I2oU9U0&G)a>qD95J^)q0)P0s$=%$nEjTRzwiL?sk?Ba-EST{`dzS zC|W>&KWy}wI6O~@sv*?FA7>=EK8YEDm{g)d8O`%Pn|fXi)#i4_Nibg(>r)q=@_}rO z_`2`qlZ%kLtm&WV0kzH?$+5syed(g@yA-oy*9Wolvd_2Mg^{n%4(hP?~dtS9#Jy3dRe1pGCJlT<1-9IRfwy`JdM zQoA~^_*TGkmzr^ExA*V!*<8*y`WAHya^8*v_9 z_}l9zDnz8^W$-kF^YXD{?7kXTwHLRrT&{Txw-fV`ocsRGy2|tQbG9gJOip2_?@P zcr-end&*yKuG)vQ27`7b?F*!yDJW6%Z~;>?X0_Tm{yvv2Mr0Vu!WpG4Jw40O{s2uk ze^>bZsMmzW9DN{K&*)_T5~XDXt&%?#gXE#j@7;ZwATsCT*mgNLJ((G%pYAypv{7dq z=2E--(tTN(v4429`*aPV+{tt^Y?aC1IEyviyh(QQ#}Q^z%F~%tSv@kQn^q4Rpv20e zwNJs1&RxjM)8-cysk^`R{oC0_5O3)j)Xd{5n&%3d+6Cn>e{u9fNAEImdTp3NBFG!q zin(cjK{xJ~$gO zw~SA*Z@Pc7k*#xExbw|$_gnVjM5?`LQItqjk(G=3OC4@Jko(C;<_Sf%I8cmR^!)DMH#9^D4FZy@&WpJ6If6p2x@sitqX@vs>Y_6FyjJOR0PtW*CX`!`y1S;%oj9gqrobaneK_suy{7I zR3Vr8=-5OPepb3qSbg~tB1D|e93n$lCz@j-#9W5&q&p9EnAiDF*_Nuqqct`5oYSd} zxux_@uI-0n?X$BzSsX!kA-h}C5pqL(SyKWRxzt|)zuStKArb0hG*hLfiV{`2>1pHd zn^8zVzgc&Gxh*s~54Rrt?3Sezw%&tq!px4;_!7I+dp9PGt+sdqd*Z{xe7%PX8W zw_lz1axWeWg*^SOpu*6DPuVb>gT`Oh^*%TfF9=EtJ8}bm01$7{9!IyGf=ca=0l-@T z!2`&0FuT*$gYW*H`fo>@V|uIyN2b7ZJjfWt!76D+2PlQ!dI62zpF!>wNmsyh@nbE< z2f*1Y+O~%?jo;taQ{At?U|Y1ez;tZ{YyRItZLs(orO&is9U0Ky-~up`7YL@C+9+n& zpE>Oa-aw8vT>L~%4;JBRt@z11E%ya)ztZ$Ixv}N$9=B-AKkH6 z26jxpe<=C>K@-OYtkqK%wBmu0?r=~Kgyofr#ve4MGGM+H325lS5qeM$G=5;`#PhGx z{zsId@D$89<*rM6aD?~5IVj>|*MRPURfVh^2d-lxkKy2m{$E#gos?5y`3GIm)Op~f zS|0Ep9D&_c$QuZkGI{t1JIevwVN90sAGSRf7-xF#@|-_dbX8!{*;Em~7Z8sgFS`ad zmUqrs<6sf?MhuwmzXt_`=f4N_zm*~5zZ&(wO-0=Q#Tr%SmhTd@R5de^lj7DnvI{Du zOpF#@x4l4h+um~zWKtnK@rjZ&_DbXdpQD{|@Te%1r zHm_skcK><_=oZyox73L51BwhiqtIDui2~@ZRDkWu>~6jj@&N>GDG&=mI$X+mu=$d2`~781WOp%1cDn)WCM zkfb$vwDVtV7bx}WxcTjr=vl-c2my_IBn!?O6W2O^vEe5#46oBx&zX!uC=mDl zo|cv{_8{H)IJ$&XpSgR*GXx*OJs{g+e1pWUYX~9!q!}Na(1d#LR?U34Oj;^6F!f#& zb^oah;bTc>(5b`hY=!TEPX#VoyfL!tek@Ok?#lr4!wgqo-F#omn?7+ZbF*+aGbn(G z9dHH71$&25wP_VW9~mRDlC&AT`K!U-jFvrc^MP~8kUoN{ct?_ZP--i#Z(@|RgmQzf z1&rUzWss#1g}b8nO5897e!VwyvXk8$6IeGSOy7bDUp{u2hVgWNZ-_7MsR6?wHpYoy zR!)P($GitmQsbJoQ08#Ffw}%6<^*cJ+%$g`OWC?2BdBRosJUJ{fcO|3CWqn;OquHZ z*ZCHzm#Udhwu>^322dcyUv^eo?p#X%sG$%xxI3 z(wcj6K0SEbXrXM?HFS+W!9rlyAIrZy$U%vGrzKVYnY>HG7P(iZZK$R`=*V;E^OAgz zGaC72;wt(zrOAaKe#o;2N{9{lWewVVuGxixpTdvG#$7Mm|F} z<1Or}ck1hjjpC#JYB8ap#k zG3pp$2ny)5Bst1QH_NS9Rf?XWCOmA^bAz>~%5w%+Zc{?Y_Xe~RiS^BQ9`oX3D-ibu zAypN?DLFY~xUVSkkM`O#?Gb7&V9W!S&TGjm6E;z+f|JB(`QGs?(y@ znEHIWhW{^OA4(T1?mSqbtb{Q09mzkJmwmB(#An_y6M*|B^?Qt%Vxo?k3e!kJ+gbUY zH)Hf}^;`OuTNOh=*#am zgszE<7n)QQg|3y|Y+-q4N8L$Cx@oCcyE5mrh5m<+1qnT^%uwIDWRjZ$qi3}63uf`(d>~nEmmVNzn_lPhD&ChoBhMZ#@{ynVNnh!w^G>Jh!qQdL z*_y;;2_uLDp&3jgV_Xr@AA({1aN zbvcs$#MwcQg2m*?_j~ZX=a_nX%vObGOSJ%G9Vp7@M@g8s`?m8DY*~U^;)hty@o42V{i`onY*SL(YV+pSWx7Rz( zd=|aMBV{2K0mOwN4)A!Y-^!R4y=$%(!=d%Lq0-N;AqU(@2L33Il&)H2?uS4{88T70 z2Tplr42|HgzU%yX*MC{JTs|H{A*n10LVk=7iMQ{S`*$3(C_aQy*`;mLV)2@oG=oYe zAOwv$npJ>;S&K4bs)bX{=|9*n4((z%WO~as$?|3898`YFpnwd@+G0hu<0Uxza;S9g z-_%&Q3KyK$gtwl!YS@{m^xNx8BhtVTM-Mu-cNUmQBTFd3q~-~Vzr=L9Tm25#AE>b! zeBE3mpTS11dfU=k9B_-JJ#%4c%Fe3tNnDQay$vGTK*@@jB-G=b@IFg5hOMVQ zXo4o7uV$p^H}LoPIr}A$B2AAfEyWz%xuFy4l%Iy3(T%6~ft3G#G0y~K6csUlAXFZ8 z1i6)C?BD^f56~@FkZg>|tshKK&gF3eSWemM$R8q&Y5<9OlLpleD1t&p0?1GpUjN}R zj?Mu95WQ&qTP0vm+Cz-Nl8yg5=l}sG_WzGXDUgg!j5lAtzIeS^Fsr#K?}4>-N9D4R zrD9_vG3E~$usxN_aCsC!)9-|>FMjmQUGK^EB-!`nXk?kW`5WN4XfwGq2`M*P)~>53 zUE&C4V&w>CV~xj}O}){f`F+1LmZ^1qh^$7YT1~%~QpT3YCl2V|$rE!VZf?46es6eH z7R`!PZaK3EkYFDmSJ?1{%4P`B?118FLqbG;IN&Nz2>C2*UJ~{(jl3)@=j*}rRm=$^ zP!;R628V624~;7!*i|2A^#wDAunH031AUXeS%wtyV)ghH*t&A)%;Xa#sfH4bGx)S8=+^A(e zo@miL$t2@caoOsn04vYOWqzM^RL{`41_ET06LW%FJH=*NlL`^;_m}AN-&7-yE=1(P z?KWo}pLh4S12K6$6FeWE@{8$`Bvam8DJc`KTX`>~`Ui9)jS9cm5cl&F{-jFu2N6fu z(@_U90jm7c5&z8>RPs}tY_?w}|KWH8se4}`iiX6|I``gdw)q(QJH%!h%W2QQ>0Ehl zyQzB+2uQ9_XyI#d7_-!&i%0n+Z?Iy5eEDzbGH=bdEJ~Fz=AlRG4ys^si~=}TPw{u) z2~?`+j;8VBJTb16x1#aUD*2+dEvfriBf$P8X;N+RT~A+B&&A$ow=sE~mylzv(NvIb z^YobRfo0fV2nkHsn|tzOlvganWoY*MYdQ{CF{FsY$REsHr@@uzx304uz?Z3p+$_ysUC zqQnxhgLW%U512uH&G>^Bvg{bJbxyN3E`I>!3&4DZA9xOe^7f~JZ5rrqAN~XUuMg%k zlGOV{7YEZG0+yMGi2Y4afx#^V^Obl&f1_IN6&geVVr8r(=*0261$d@lv{%xY(!G3e z#Oa*<7!y-|)D->Z&oK6hDDlxqkYR}g?>>D@T5x!PS-E)Sw=v?W|JoqJ^3ndCB@&)n21+KPX{+FR#0T+wkG>#M?qk` z@?9yJv{)^_Pj>8&9?l%5(+2fZGVc5y{h~vBOy;3Pb%}2bv$((+>7ArYk}HXdN_I(^ zY8@gIk;StgX&K8A)1YYYCpqf33IoxaE6^rD!XxaZzMbAbp#?>zyt*X7fm#Nh*Mr1p z^f+@CSLF#?O1o)kwX^zF)dMT-yA(&(H_uxicB1eTOSNGG9FO)nJLAx2+Ab$Y0=vJy zQ(<=byj$(QRF$jsbGB`i{FR2Co(f@;XeOm8^E1h`J4vw|)cJIMCJ+MGe-R+^%m!vb zF0NbfiZMD`zMJfq_pSFzHxISCZZ}U&&>R!lef=~daL9I^pO#~z*Ix7hFxs8GCrbGh zxy7)F<|x7b&)tf&bP4 z7K4;QYpv$o^k9Zkl$^;zJxL%|#j9?6ZhmY4d$l}>GB+Fp>BxKN|U4tX&NKw(-x$R%yVm#SRDBJUHA5~){OXj;0rNx8J7CKg~ zS2J5LBo!}z4P&Pl8}pyHD4p+&HL-BaZ?Yz@VLJH@$k#O-e-VXIP0x0ZSOX;E4m?e^ z1VhdEy^}6k?w5ICzq56+0}-U=5s9}7u3A7?NegwVg%zVxv2rgV6B-mo)Wp zyf5lxmGZjbBi&cK2$Jb`v(?3gh`ri(Dmim;rn4~!)>rt9>?~vrhB3xY&a#yOjUq_9uj=*TqQ6IC>jI9a^WApz) zSLdXE3r035m7CvZn7eC%&?#?o1ic~?d`C=Nw2~KZA^Gc;3`KPVh&?Ty?EG5n^MrSy z)YsEJ^bwvQpfxkp^0x5D>n-arB=qy4$TOjpQwi9K3gig)8To~bTS!={+t}s1NwyZC<6q$Yrg-vgFmmiGdP&}5(SqV{ zXZmEtWnkV36ZCIs7`k&@)h6?Y6?%QfV0Sr&#ItsQB`a6Q?)IpLcaQ_uKQ=(H{_If+H0XF*!BN&QS3 zcRt|>H4or}huEBDVz;)b2Rt>u`wLTQpHVIr-<_MJn z$cXh)rO=Q){Gq07fpJCxRrOkFtN|~q%qp`8z;4%(e{btMeSeJkITcAN=Qr&{PPzqpqc$324PK;Vubr+RWgt^b1Z!#r{Pj ztGXv1^{%b30np*`&%v@cilxvH*=+8z;iHh&AQMDdqIoa^fHPu+ zrR&KL*3dsisLF8b_-$}j&{3zVgj@ur&6|C0Uo;xhOb;q>uLz2?{FfNb{4`xrb5720 zNE%mr<7_D%F(;*_XTMq3ES5t>OQxDrTs!NSl6gK9_`(b8g+1U@Mg>tNB)sz+G(-)h>RNq%e&f+S5Fyi0h7ri~>cu9AO@~Y*U5t?<)tbTQ|P>_0_f8JZn#nLDP~6DWDxeR^QOl;};3En>4+?u92)E9`ykmv|(0? ziJ8gwpA+H7Yz)26iJ+5wYD|LPjTZyaC-!!9%KzL2@P86z1>wD;?qK!Yw4`Z?mUcDa z)mBRcfaPhc7+IgrsQ#*j5?X5jS++)};HdF>^~52Bmt}#|SkB&WI`7Q0w`cH}wGd zoHC$4qm^md>DCI1jT)5VqkX+HmCRSmtJgbQM7MyZ&znM#K^L|`K)g`hfZt%|JBv(> z!(%_gSd+k{lK6nfyzrfrLA&&^se)HuUxd*mv={#(1#LvSRE(4Wp_l3t?}?^}76Ql; z(j!i$Ycm=84%@kb6uGq!_b^%zsA~pbGfi*HMs42bq<%?2Qf|9SlHb*{`{IgKn|OHP zaRCYILv}HW842rJQj|AkNxbYqM2Fzwo?w*JYMdkJ($?NSh8-Q+SYDZG=le^*&f$CM z!>)NvCrbRPdB5`i*=G=VNAW)TWc)s;Vi@XmDc(Fl4Wfr^(O+Hh^dguK5|tS$L7aLk zbPY-igg~#%p)Y4H2j}5W7K$3a&*8njMzC;!zUdFv!bmR^0WTER)pG?@8i}`d+v#<%)KK zRkz)b<*L1F)1C2E&vyzGN;&R_wc&rbj#M{C3U?VU*6!R7N>ANcp4v_w*yY6p<%R9b zQJ5@14`25trf`t=)1y4cM-S3p=KSE&Fu&)F~QO+PgV*-w(f&Y2VSJ%48fzF9;x3F{Z6UTD$@*F2FgOA)qO{4_1Z^CEJ>b) z)v)Q6bOGcI@p0JndYO5f2(=Bj^Zl&zVU(M-!}*{s6WOp0D0t(msDhZs?F4mUqAFhg1N6;)FwK_Yi*0$xOm9^4dMn&~N`kqK@?!(PPROrq@MX$LQLsg@g=nypi$`a2yJ9iu$Wh(BZ`BTNQxVOE_&bt(H zr952<{V3cxAX8sFFn{trwmfxb&a-nq*X^A~eq2eqmpM5tmD?;UlhlreoUSW67(>LX zh>%k&zVPO}rizwX*>o(G(sxnu_g+nOF8{teq`J;ruu#_LQ$_I49#URq|D}bwkJ-Vp z`cCbxlgL1Ki+teg{GYdRC@py{wl$G#LWcLE)YI`0IuWs!xAoNe82hhYDDSAo3o11KjJaNLVXg}!r-%B*>kSpO7c3kG1;j|e+E7M z-fl^1Ln|hAY8HuiA1m6A*PjN&K~bDD6eU4gkf)p zqfHCm9@-1Na<`e*B!z&(`b@6T{gz19i!`j8q-|XT z30q*gc=^t>Sy;NHoG=YfI~^=RBdgmAwiU^c94uI~@X_B)UPJDUpc|6x zx>tmdGZiHHjPB(Tb^dFSL|k^sd{Ka?qhAU#J)dmmn9o`Auy2gW3p$3oe?(|GQA9VN zYwjx(gy6XV^m1)mglRrXe5}okpUh53ajoV`L3<<^8&CjT@8sfCc-o;~=VWxR#kLq) z7`1b~OJcPaH?_Mqc=7ZZw10=B-A{ee+Pr~XUILfp_rxK)^mRhj)Vw1eDeF)C9-4v= zaHO>71m2U8i5vsrwP=K8*~bYE%xEUlc_f(_!BJA>r1OACpFG7!$YDk0HiT3fh1o z&7YXRiW9lDC_fY#dsco0ZrqZcVfpPeCSou9u5hy#3sFqIlX=xU#4nI>rX-rK_l(b$2pfk&;<4H!>5mP%Ue?p zi^xFP;QbNy7ND5sbgOO|ag06P7v4Z1Vg1nYnihNeKR z&&X|lm%vX7d%n9^yWITgfu|nt_SQ`LM1k|)x;*(KdKNQ<{-*g;O$;V~k(vjoszwO) zq+nbyY0QZ=`FXh~DrLub!UyY`8UQXU2!VMioitsY0^!>$LF7WZ1^-UY2|7)wt;ot> zAJwcA%{C%II+^#`tw))WoA2z6TfDQNPIxJCXIU%MnyI=QhMhFJ!x>{>)yK>_zV1sHZ<yO`wOEKrQkVb=KBF;~;)3m-7GM)>_~O7#3Xl%Jc%3X)`7fxnqRf8@>v(hWct zvfAWNx8NY27y!*Fm`O(s_|v75)icF7kM=u{FY@OYJYx!9(mKWXVSS%;WErU2F9KZ)sB(~wmA@N^wkr4Sc^}x-aEmaqL#d~{?e+%5| zbW1=9HZNaKV~}3@Y_?9Wo$?6U>QPO9d>Hp4l(0>I#`#ObNnSn^38~QFZ($I2&;b(- z_C$C(Eug5Oe!BJzm))Rymh}Ux`2|~2<(y@npP#ED`Y|P zwpJHCu@A9(wEd50(WKbIyR)_VoYG!W-zjGDIApY78Ne=+itzbekB2?}28O{^gVr%-5lbn(&2&7bKKRgL zR6Whq8I7x&+XY6w>bam@!1%>!r-6sB=L_KVjvqYQmaEQ(<_C$7>_9kEp0+m4v~&og zbO({3mfXUS;&vfMX4|hQ#;tNWq4H;Q!xFAR#&e@J=pjF~%IQ?HaGlh9F8S{8)G zYx!eB`=qX0=IF3k4A#Ehw-}&?P5j~(jNC_|#(RK?H^RJKjjzfN=ik0o{CM32>;^yO z$ycLdz`B4|6*$kQO8a)HyiL7GB2@TVGJJ=|=boTjADZ!XZ=z?kD~;_k_BL>>vTiwp zD5uB1DRiyfoilVnw~o;i_?v>(3p_hTo5%)b6Nj~fYaBp>v|K2nv)9h-WATLG zdAhk$rUU}E;5PBMEy!){sQzVxwMC!o^hX-{0BYH69K4w z_FCTh0SNWDNHwSe^t?9Pmqq*|aT&b;5Q`tk8df?m27XQvp>o~l05pFnL9QJ?7_3v?cQ?`Dt)t20T?8no&OB*m${+aa?CXdF z^S51ngYuB_PL5lQ%j&}GsYaYaOHoeBI}$4|$&dP?b1>f~s((MbknNM7PF=T=%%3bx z-A==@#5jRfkk(x|IFs>H)L7t77D)gIB~0-){$gap2m`@m!&*y$N zz9V7frT$@7uRGk2NAy5N^>|1edgs?f9{6VOv%I*eG8gQM-L86SAD5G9gyu(`B(d?_ z2URpMjR9_lqb4oU{E%a>!#4$0IewGq5Pja8px(StYKClVJn`;hu958$i!iD`-%1AD z!js@Xp7>xQc=6MTs%Qsnk%rgY;8iIf`$XD79?rwCq+Cs<-=_<|cu783F_KmKYV%{u z;aMST>jwBcwQ>h(yvw()R34{uy2^h$;()!vk)JzS@`Kf+@Q%P)pgp zyRcW>R#sQn?n7EBA_Bju#N2b3t-SP(9M%53{KU|?Z#y@??Fj7;@cz-!Hr_XK?eU`w z&SOO`>+6q5v;Ewpf*)F5dZu~$qb;{xem@Lwu#U84;GQWb@uq`>^utsdfI;rWWM{As z6#Um(T>;jL+B+xbfEXSZi2~Nj;e*+Ml958jb6~CL```bmbS(m`RgSw4{r-y79tqN)}|5yC}x8ecL z-G6oJUj_O9%24PQeQA7saU!CgY}Jue9FP6Z&|k97KWk1~A!Xm4A+Z%Fd~*J@QRUS1 zd=@Cvo|bGX{>`W<@LsqCT6b%IjXN#3Ik`1=yVXbaST{Fz2zOqRonBy@hu5g^A#pl; zL{2WoDxwzTwU*pGGe`m><;^_wBih89qnOzfO@m!NJ$jNZ>-*YVcE;HQ>0$91ICXi7 z%&j#4?f3a5cT;l@So`o3;PN3x_oj5WaBx$Y9+vzqhT|GJTidDJsHW*e3kyqW=KyS2 zsIR(rqy6ymG?vg3DeU!HL)^SV-}}h1;CO6;BGO9`FK2xJ^4<+P>)9(4G4dzS#`jyQ zw0?I5U*e9AKHYe+xoWWse=@kU+R4f*fK>0ewW=<(fF+D?FfwX~`M46W=IBztWN;^Hl`xT{#C(gTeu!MQp`)E{tQ??=(V%t)szhqv51>4G6EU z-xq>!%vj-PbxT=(=c=uj?lWd*8-*!PCB@$^4$r&A^l3_cY&EAgvsw=2Aai#rWhis0 zgRC#SiP5Am;StDKXtB1PFBKT1bDk)acf&0xe&F&)>+T<4d&*sEK$nFG63zW3x6fWW2dYVT3Tu1a_HMfh4`WMXA;O3$^jY9C8FUwUuQ#BNk4V~l zr)3Q=_^NrpW~nW1YvV=aZDbUPw*BT_^A3TFUV}gWYMWWdI_vXlw~M=DQY&7=OHD!F zgM8HCBQz~0L-WbJd`Pcg-$3HB-7;vRXrZF*NC{{XVqG1Yv6wE;e&Ev1ml}1eV0n1< z^TK9K>8jk;LWHTTb*X`tMIe3+=K+$fMn&4LL=(0hG^VG*jWRpnl3?iZTGkXgp=CztpUn6}X>yD9JU!R3*$}f%rdU&8|77VS6!veI^E*1*9Ic4^PIq_p0s1kzAp%VEj2m(>4C9V{p%Tb^!zyB%I1TQh4FAz(;Ex9_lKpE!9&_6b2^ z>67O~A|SYg^6fyGpd;%Q0j1CIq(Mt;=fvv;y)H(J_quMoJJL(RSH!S>b%U-c8mlSe z;7%Pn-^Uz3Ryc!Dis#V+L|`QVlkXeAl$`?xeB%^X%vRABd*J&&M5 zcxfYdk9b&`ATB}+Kb?0bE!04Iz@^0N;2OYU5?jDbaKQ)L-|1)eR3V@zM|pb_!qYB* zXrzM=W-5^k{W7;aTIwiYTdFSeJEakSSyQ^G9bK^HQK6-=wyRyT20!1lr1^1?meuQQb|*X9j*Kh25yBkj^4O+x6}yXp@x4;NE? zTLP%zt~F~6_`%FN770HE^8lAZytyRCaptUi@|75wm5jI>X~l1w)i}%(-j~+Q`qmJohTABZ-%;mRHN$>o> z*n7*cDBHb#R1gf1Fc9emX(gpWL_kVG8U`h$m1c%UMMXloLAr+;kQ@+cn4ud+siB4e zhR%Jn{%gJOvmQA1mwoL0VIR*I4s@9NzUtTK@4N=+?SRy6!ws*Q7h&9EPcGpRkjwt% zhkxDO-tQ4^`8FT<7g)8SItnqzmHpz{flA_o{esrv>JXbHuJ0{h?v|2TBP%HtL!-B8 zL^>f1sukcQ?IO)SB0p<#yVh>8&5d8s!Vg5?X>h&OJADV=jiHzNbuJ%V5iQ2!hEf<) zQ`5p@c=yazBz!M{ViA1uLAPgy=^$6}&OiBf&qp_=WC7QjNY^RX)5uE7%F-QIX&AG@m(D`{@~p=nHZ@TM%SzW6 z{Vw7E^#{3;zj5uZxM<;OUq*MwjglAj1{NIdBWqGVa>7y`y3oXAzl1g0Fg`l*gWg%B zK~ug$aI9eykCYp;H3CUhKV6|XsgTUK#g+dtegd=1U8CQuQe`*AXIN<`I-u=Ic6o>x zJmr&d`}u%(vR2WA0%oq9pWUz^*(N1u83UHn2oJK$6{$M!J4I}3f;`tNakYqgL{-}NPI@-MdTrH%yi_< zJ9UFRCFJ*)>fYRD_pvA36zZL)c{z~c!Umj|hCG{Sv-%x?>kvyo>gr*A44!tbS%Oi^ z=)?aoctsjBL7Ol@CATaUor5|d6PDbCBX{rQRtq$b%HM7%VZ7ictCUA~x~&HCSR0*KAE zV!z^S>zp2C!*b&dP2`Za6lP9hOADtVl~w0GiiK&mXceW6fAkgEr|YtB1&(5}in^3A z4x)44WEi|4Y&)VH?2L3O?h8p*49WppBqYCo@4>+>H6`JcOU%M!coM{8P&U~u4DT>V zY7%_3WOT2I<^|=;!=4*`t3$PpmHt9VkzHEDqW$^KeSQwZ46rLwrLNMkcH2@}P3;@{ zBk6AE$fd+$*?ben`t4AlsAH=G6@f;M%Kb&!|0VqXk`(trg>hOZU_r7`PKqO$T?r1_ zsr6M0or*WFQOyyrp293*R>g)Z2Es@q2UASi5-aVy7D=)#7T5c`H9Ky#6s9HA|B$sr z&$F6z^?tbK!Y78xl$v(Kj-k7&7a)=mI~jbrA_qew_4qcJanOCb@v7+YoFNfz_ra+0 zgk)P@+TUaCIq_yG)(!emvS>@3>O?3h#dddj6-?y~s&*}kzG{+H=IDXqU0x6m@!caX z;=A;QI_@3)7Ms2DVI32*t>dv9U;bMHfg>nKo@;rRqc<(;%8%hfV}7*A?pg=M;!T%( zS=nE5bi(5<$(45Nj1StLsEw!d4|~~i+}YC-%40ao-4k3QRr>Xwx`?Z{o9cJ32cp0*Qfk61E)=<{h2Ok zvWVB*pQ~B!(23;Z%U4PF2nV-bd6ric6^~bYw~?+)>X$dfCOXzMRjZ3ipU$X2y-{=_ z(pX`Yk>%$a$Nc~eFZ+_St_;Ty5&MH1eOBi8a-sBxf0wekWLYn-q(PbbN>~CqDV0(Q zSDqLbDmo-k|Hoa+%CMPk`~6)s0cm3qs2pKX_Z)4otF{t)B}pjGJ-tIvDZ75vi;}c= z(^YSw5<|tEG%OTMm88`F;{%^Z+ERZpU6xkcuun^M9}NQ9OWytHQk5oUJ5kd zmw-9)M&=;O@Sg{!(lI4VRD64CH5kdPK-A?nFtC!7itgHVUJxT2#0{_1vE$rHX{S7n zdv>#|!fcDC&8)41=Bp^UEhZsUrB{s!()PvF>iGI7{Ka<;&UL-Miag zRQMzM%%)*Fcy!z26a7UhklQ5%Jg1mRixkiAM|%TaD-o7f!Vxnqp}d{H4b|0mT(>pp zO!MWMk?;eAi(bB65qmx{)W{Qt@a`Yon4z><}qYYgFBi$H$ zQ$pwBOd3a$tcy9`aO`4>1^tP~*vV|{u?Q@79wsTM#gGzc(X{jW4^VeQnQQ4)}cEA3;~BrIeEjQ z^j7ND77GTJRZj?6-=(-6J)9~p!k#bHXY;%MW@SJmoz?W6l)NQX ze8RUWQ+TU(e&|b|+64fue%mQIV0)9n>PXe2ITmZzCQs0QO-4Q1#iue)tKl29aQ-13 z$K}8IXLtkw9BKYAykl^uHSDtQQ%4^+GtbQpJSWSH1AXb2%?GYoyBJYDJPzb-SgbB; zS~S=SDs=e6?F#kYXnlJ9!s?bTDaR4K#Fd!4BwOmYkS8g_5WUUu!6f)`!PjwQ1{5=q z!arg%FvW=^&9#1jal36-Xv8_YZ>Gv$tE9dk8h59IqPx%`FE6j`-2l7T+uvz}59A*@ z*F5q=)3lV^8N2*mU1$riq<$iSragT(RCDj=LXWa#UqHPk%to6NbTnF4pE6$>a!y-w z=*kA!t?fDH2&aO-KPJq2zkh?_%^y5uj@RCXRguCCm-~5F7KK)Ums$#Pt@%Z9=|6rJ z%NK7?WstHHHSi}9j7)0f*%BwoUP>o`bg!${7u&`6>yy41vQ=lbpQ>!8rZy}yf5)j? z=@Po^#=J|MWm{is`k_B^exrPG(qc&UId{$y#(g@HO3<=XwR|&YN0Vlw{{d9A6EZ8a z&Xw;pZ#J7IMRtcrFPz>wh0%`X?;c+|e$6$%D__)Et;rGPXnpmretIj4&e)!%C4{H= zs#fa$lhg^$2Z ztMW)voaB6znXxd&3v^B#03*&U@0VskIqbp6uc^@|VQPmA{q$D+2lVf&cl0(Kk+#Bkc19qxX|-Kl^HY_HVfh zzp4N%%#VsY$cw&9E&$B@aW++OPfk z*d6&}i%~I@_FG_wbwpAU7pHX)o0Rd~r<1Vv7c5V>75@GvIyNrO>lP&q%jfH*nv4&1 z0F-L1)vm-CAzDx_mxbeyY4V3J_~7PPEjE00%2$cGs(La!{l@t9AMKCMB3$BOZrHve`eNbP1^;B^KRD*>cBlG)yqcCj{zmre85VG% zLuTDl?$vXndJ^P^5U+GM`9dohXR7_<$wq)?iLB9+m(q5+r;_G1ACSF8QY`fEf1m zb`IV=x5wnNjoQG6#ZH>{&YdAfKUpC_8wG#V$qv0}JjF!hFP)zrVF&+lR!ZGy?JA?{ON-6{La55C#);RdEP26hkXj!8P?1Bx(v5s6GGnyKha! z_BTJcD|8P8Sv==uXP+${=qC;GzWn?mb!I@Ys;91cLkypTU{coZY)CQ99YZu!2ZdKnzj!%y8e*jFXt z&Tvf&Oiy!uE}!WC^{pIP)1*<{e2_Dy8L+{>TY~SBV=~+x@fz=}N!&r$H}?((T$PyOWR4}3F6L|s~|x+L_8^#glsgZWBG3{vRJ&a2=svQr9H<{1;x z=)Y$LF8SwDD5*)=Wn;weEA|UM)=&s$efI6=^_#OdcjScaKLh@QB~ia~e8zQw2mU@oHeEj|&7<$vC%w4F5gF3}8@CqHtT#eJYMR}; z3D+yK68S#5iOL_+UKrR-QuJC^yHvcgQ}q!9KhU{#t6P^qqmSa9oUl8JH0mOEz|RjZ zJ6e3QWJ2L|Zcb;O&b3g$cK?3lOnGi207vxNEx;p?3sKg-=&J!|fy*PrB;onfBg+_Y zX4s#bX;NJD*xzR%TBxA=MG?;@M!=i8IT-iy!pA-W+?RmaZsUu_!>j~&Oa#L0+7~_c zpK1QHn`av0pF{hPYy!#y$gTgQg8tF8vW)*26oUWQO;ti^_y2GK{Qts+mNr#~)Vk}J z9+7Pvlpjz#bEEaoj#-k-lPyHO|Gz{0NJ#hb@?fg67bpO2_lt4Bfg z%S9QvPo!swqdvsotqYu_Mg`zQK2yy*7d%$hXsdIQTBuHEOB%*GW{YqDE%z= z?@}QMyD)L|0!SPM$KCmGK1^UFr#nj=WhCdZT`(TM;IqWhugv=wE#u-@;)o8T%6nnr z=njxLVrEykaA|`8=X3FmmCZ%z_}}S3;)wDgC(#8<$0&a0Un)vysa!A~Jj%1g(I@2> z7cE^M6X3OqkH;Ham;<~IY69n?JVSTq6x4&1%1WIW=mx)i)fY27+u;ERAP5SEA>4-iuDtTBZEq zgm(ED?i57bfANzWYmO}h)jxH^Ok*UUnm!Hyy5Lb`WGx|)_K8sy3MXSf`hv5+T_0?1 zY4Myd4LoO8`q;>arG`$X>hv9Ii(+~7Yj_lk+!UeGWlk0h)w?ORZ0zjpiX%_@m60TL z?2+)q<(GM2n9b%anP8H)$PppCuc!jOw_VzZKU?sEO1wZ;{pJNQwZ&W4R>X$DyK9NF zehE{B{2?9+4SSp1kXYS|%maZFFL|XLti_TPMo8?tU9iqVCUZX$zW-Q<$Akx4e;5O_ znX~zzzf$3{P3WmEG9uUJUNYJR0hghnx!~f`yQ>3EQ?OI#&4~&JHZ;tRspeDDxa&&N z(LB5;yrM$rbI3OI1OWNX;rJUOfNRh?7J5#feSiER%d|x+OS=F!e}pr5=H*qVh1jsm z%*<@BSQ0c}oAfLLa9(3@-_$666S?KVJYJwfvZK$GsMa0(4cc(>G=|rZFHyu^KwjT0 z>qUlO=6C(L?{dcnn#D%?P{l)l!f+m`T`<3kI$iHXmAMw&pcNYJEHJDJHFBMw2eo;- zyRD70v8ZauAYBeIQmoV6`~W%R4cj-?PEKW$?x_0ytwQ$U84~fZ#AKKDx#W6miF@)0 zC5V<2va|Yb2Px!Qy*E>g@KG0?EP-K#cVj&TEINg{MiJ#ELDDjlb^Juirl#qVdySW< zj+gI^+3;Xqmb{XyM%~+EPd;^QFPE<`>x$;)s9*b`hCep_wi%{bWTOqx2X>Ub{llzZ zYWm?0Kj`uDh|E;Q5XFMG7NVd~R)X3i1!!qKI^KOcR%xF|RcY`v|79Kz(78hH%6`tf zXxzBIrx+ayO-eI?IivaDkh|AQ#yURfFq$-Y&D2T1!_yH0%LFoiCNWx{>+nn#DA{Kw8BDTkOK`Z^SW-7WkS z+|UT>3xWilXxW`KZ%)FnlVs#QM1eSIyDd#JTHttka;)w>0jqy7kv87yD#(F%@VunVssh|E2umXL zb`UeOw%PHXY;JM^y+@C5It5s?It0=zO$rN)i}lN&PJ5$y;VpM85eak;fd93H?=Yock?MJQDpMTPVKk*32Xa!@g{O_v-j2eNik=jQcUSLpK-Jb_ytQl(-y#5!dlnRGY<^Gy(Br+zSpPd7 zRF;%*Q)DKhIp|T!0Cn8=&<16QpRsQ{nlZCo@Dvd7(F6i^lbha`y=;C;iz8R_!#Q+zF8}H zH!jf;Olv*mlmMdI&OZ`xY;Xl{%(P^=NKT3u^D7GB=MMlZZV}_l zDX^8E+!d0NtnBQAeC|@W+rBC;YIixhs99jhVqNRqua#E>qJ%CA5a!>Ipwet}7~5cIFk9X8l`S+aPp_5~I0 zeoo1lpB#Wf-!xbq93*z6)Lw9=MDSlr_lU zE$M3`zfG37u2chd(v+lV@KtVdgJ`pdFnP9hYxmf2!7==)l~4tkX#CJ0XHLscqvPUu;pJ{09cJ}YGUOyL&I`NSY%L&&e0A87p{P;n9 zilMt<^JqoT$XBXz^YJaJ9Zp1ndcMqJN7H5sRdy~Xr**k=_jGT^Zn+BeQGbf2_!fUb zg)6{m-VCT|hIF)hM8{~EML~43%hnJsDPOw)yk;nrg&yLl$3 zE`6T6s}&Zut%pxgVJUL@QpNJ>&YM2dWi7Q=bBskUXx7kUEDZ8J&{2k%C%YNgSD)0_ zK0)6c@a#+RIJ$`%C=|jOA#)K`r57I!23MSZzQi$ zUVSKos>nPlF5{69M&V>9U6jf z9@V@7F2#k~jg>7Vm^E{|_8+H6IpVlD%9E1_D!p_K2g@8V@5*07LA>Rr$wvbkP+NNl z0&B@I2r}^B#LRx*tLc#}M{F#J(P~e$k`m4D!~;u?kt!F5%LN{!V;PVNoGyGY*Og(2 zakl*9u2_C_H02vKa90X(6fba?H#@87@JX8O%@%&xu9RQVBf+Ilr|1 z$yqsCe5?k+0&|-Z&oKaQKaT|n5%q9|V)2Tiw{G3?DmMojLUlkEw+S{>IXUxmG^)D6 z*F94bBQcwnWuBZQM95w1_>GOz0ddzx0wP3Z!2ww9#aV636o&ObLet2E&4c|X(Xde3<8Smw| z$nU8Jja893?L)dIOHo-g;$f3csbV{;yCu^mx-Q#C?k{Rwtgj25rs#fO{d>=opKLMV zPq#g*a!V^uZMVJ|w#{{xu*6pr)Ep*39ta`15GJHFc5aMjNOfIKt3U@mp+8)JsNUIV zOZAvvLIjdYah}wHD^V%cg&Oc#~c8#FUu4e{Hfl{C(G6uMzC|C!|kJ-;M0aVZyQSN*f zdTNr|OKcisy5n6AF4D{@L;cZxTE9@(SYzIySq(7G9wRoApP;bsGpHO!Ep^;<_H9qZ zDxr&1m|$Ec=091u%MNZ<-g^%)+k5=F?sv+6nN zdh$zBlyy_KNtUe>O$O(0tsD>6s6na=549JLg*tHpmFIG3j5Piwt9jY3HWoT^N9`PY zSyHeqm7wOK+!w|RJtpKO2Qb^AsDYK?(omo;Nd-Q1=j!BX_>pyhQBD6$!||axz*}kQ z+V^>Q-;F=*bbpJH$Ri^oegt$V!+qZ>HKY0Rg!>{ha1YtT=1W~&MCB2SaSt88)H`Kf z*%2b~UZYxp>6NDIV8x0OlZ77iH<(dY&o9u-um2H>fABipx!ya}(QkfGOQV!b3_1dG zj})?T`RKCR3qpwH#7AOVpYqPGCN`Wb81_MlvZvjoypbI}C!fbH-TA%MQG@W)>u`%Cfk0rx716D@9PEQus^7*^R zVjjoUg*OxmhhxMPT&x)&A7#EDza=HQz*5L99mwmw=g_)BH|di%byNoWECpJ1p-Z%H zp>UCB2#4W8Zkh6AQp`lBpoYx6>mw$nX|4>Q$)RrZGUkm&T?fkNPgQtR%T&z)i2@92 zVt&`*tpo+1kzNW!1otX*+HvmFA5;GJS11`#+*tK%Ta;+8Ei7MK%~2NXSKytp^OU$k zz&18md@N_L9yFQ_GXYsr)kC(sSHq)Gi(fwqyy-2yRO-Ds<$c)RBTDFFc}RPxec9%$ z=i<%V`13FfG0JNzX$GdI0bl6^4YE~Fp4Au)>I=QOd*50k7w;JRZ6DsfVaPNnZ9+*J zCyg=eqc@FbGgfw)o@B_fAU+jCctJmZ-dH2Z<#x4su(s3`_#_bIA8|#wwNARzE95Ro z`hIo*A#o=FpDzJ3TPJ%(BBrvNkYQUQ$Z5cA*t}gAJx?5Wt<|XjwGUtoBAE@nj@7@XQ<@sk9p~7{f z@yrj+w;6=iAI5J^jo_iUcRcIPrNU)j`Fkm<5_#R1bauy#^PW0P^FVf2zm?5*8aTTH z8JmTCftp5tQjN(Pi-KPXmp&k@v@2DqK7an)p)IDLT<5wX0?2W}>M&Z!LO1t0sKQED zUCY2A0kFdg5#55z`C&kIB&NVGL>&zGbdFe^s?q?=BGY_l%(lTGV-E6LA}%75tOFpT z$a>S4uEQ_xy;i5hVUsu}xP(t+uoHTE68V_DF%KKu4ffau0!LJmFAx-P6d3~AaBsfL zH!?K2){~BAcj2D3Bbe7_+-lq!GwvFbj0nl`FHg#zs@;uiVTveZs%-gMFYVqD!R8DW zL!Zka09E2o#QB7|Ec(|703bKseZ$JHJi@`3jNGbsf1QpC+WiSg6(sp+LHUyS8R3uX z=i>oA#+6ct7~jX+H$OSxB^wrb1~=CCzFgSayot~EFkNJ&&uZb_6z$ZZd<^#fyBl3JbffA32!HOt8Vc;|6pv8@*<1l|3=`Kla@+<}&!IO*4~xvrsYhm<6@y^_^Q@8_hnN-@pZzbgitBYfT?uQ^a_-33-54QK(G zj~tCViXf-tRv@Ab|M_8tjoo){2i@uZD^tn(U{2n;tm~+R^{-njyLkN$zfL2;Xuz(! z_c6<1B^|^9b#g~6g5Okn=RK%-^9e9AS$6z=w$wY^2G5^G-!~_m5D??FD`#QO&6et` zJgq`4eTyMy;C}b+A(ct#zQXCV|LS4z|9yQCyO?J=sFc4%k!=#B*sWua(H(ygmO?XEp`WZ9#7mrwbs9g2EXNRqCCkc87e=jU6OW3waJh@RE$%X@$_eIwmEQjR+;X(RNcSUO`l9RjSIR+wTbUDH(oZ=R8s zkd*#ebkT&nazcEzO1E#;vpX>YY{Wrk+W6AWdRK>B)++2JcJwK@-<)i|%zRrX>?8V0 zt>dpjjejIpnjms@Dwg?b=#J5St^AS|=OX)4$5cC1^aj=UH{YKOa2H=oy{T?f;OtC5 z{>cpwQ(b3`Ok92}jmezw+C8e~5h}Tcr0bKr{R03u%@otF@oMF!{;;xVBj0d+cM>&0~~i zJrOE}V9@=UMH(+<1-MN+3`(%&OnK?;tAhU(5dV~yRg4>TPM~{q0I8?ZXn=&pTaDeh zW3`~or(YOOqo8@^Obq>0@Y#BLk$UB~DiE&Lr+B}z(lrboo^{4lorZtNcFz6wS9xUg zBB*FHC}pOCw@tuDQWpnS9$&jkX2J9Ed9ll9Nt0`To}|XE0A92Io7?S<4ZAD5R%(Z& znMw)z-RzTEdRK<*_VzLgygou$){p+W%ZWgL^w;vnxZ}g)&-YdZ>IYq32Ry-1F*QZ0 z{i~9HP0Ih*d%cyz3Lp4%?dhvUsaJfd_kj@YYJ}iNADb$N|4IP8&+)52KsU{OrMKkJ z3_i{t3Cza-tV-g{9BLE{D8@OMtm)&{x&+8hymmsf|?4ZIcz3sU;Sn;*d=&{%2=R3`?e+}M~Cp<2E!&`j6_p>h+Ijh)Hx%cm0 znpL=+g`tZ2U(JMn`0Wzk)GA-N%(ttO>1Ll*pQ(P8`eyp$?wO8d3?sbN{lHsoXBQ*c zV4QN70q7Mnd=hKH_FrP}K{VHb#Xa0_K44Mz5Mk9gYtNi}%qSw_v`|0Cq0UF0qIE7{ zIkRfd0kvC4_SbFvA6wU+1r+^_0^W)LW!nF{F$6;%Wak3Nv#L_K3h>Bkf?1byq4U|4 zujOyh|8?sBV?`Ka0Y&X}y>;n-U-6gU$Y0J%{jYHR=<)fA^$U< zzrNPr2ta0ZvE9pIAxGf%Agp;06wbjy{BPg>Bvsm#vAYZ;_Btl3aX^dae3|_r=@^jp z`S7B_ON>*irb5VO@VP+}q*BPi>**Hr3qaog{T}}`Vp?^L*pS4^j&YJib98ZigxCF+ zgHDcp)`t&4w{Fj5d6fRt%vjFT+=wcRknwFulVK4(Q!|gX8HKk?R%POtt%Jx6O%(gX zl6o^11W{&YK;8`cm~e)Tk8k09=j==`;8(;1?e^-`+=k0wq?HC>tHaK*g5okld90M>q z$@M`or~Zw}CAKIyy*_F;lTThh(g^2F4_vS=l+t zHZ}>gs?w|!O$Fj1rDkTo1Re605=Nz%7|n5SUI{qDsP~O-Yzheo@*9$s+2+}pp>s$kRKcQpM?e9|5kx< zYD?9y)G3+Apus>>^%buuXNy<6;m)Cfe)R%_^<+uRphQu^QgtgsfU{Y9HP9`4f^TiT zw24~tUJwMj5p|yix&^6{&D_dvduzvoa>UU}hB<~~KPh{9k&OgvjMt2Cm3xgb7st}& zHZBQTb~VZ&_28LNT-ut&^$X9Ie8yTd(`1MH=DiPhy4r5D16fkFs42*f9$E&sC2Onq zChaghNESpqip%Rcwa@{fzHI*cd@^3cr@oUY0%Fe&iAHobh&BanZMZm zUqvX4Zn0X~nofq(80}=V7BFXjGknNqTF`iD|7Ax7NEi%$f{1%upuWB7@BfoEHqAhU zWE+RXWuofI46I;weMTwtM;?_;LMUc;>@*9Rl%&$#lbUw%t{e}9sQ3Io7H}UMD zgv6Aecho20r+$l)p2-rhNPt`+sQotmsTui82_K2#|6}hM2MGWU5DqQN9#P-ADeQ6v z1;2sbILyYTr*^@nDgMsg4mlVJ&B-^y1wq*X_K>1J-gxqGK=p9EkVQYAXb{YpRWD)4 zQ0+*6F=5E78DMkcw0p*+=V05nE0Bz=0`X&QydV>6>~8+fZ6W9j1vgiQB1tSdC?p)< za5JKR(5%!37u8Xh+K%expJ`XV*(*CfwJXU8+_kq^1HU52!j!w9&FS{128UA_Yb+AhSS;MO)=;pjGY9T*r*wQ9W#NI$3xKbT$ zQ=QdTiX_9a_ZV4K2*6?0)gEi&N)KuDtIg8u zOGil4Tzg;Eh*y5UspHup8Lhyn4pCC!gtG}HpyVw4_trB4S;fTkYuuJxCh>%kGgv_x zqn$;QJ3U6WVbVKMUm+`_Wb{Z}Ky|&#nBy>JD+n}{A06nq5?&k2_vD=-*2n30qa1pS z=q?ZA2V#l{@th&2&C&nSVd~kkHSt?u8+yXQRj_r5Lq3(xc$Rl?t}&eur*%PhlGp@$ zN1rXwu%C>(oA8Y+WhvHVLE@}8cLyBpZH1la$!iO+IiNBhS{5FjFD)%~0#AvjD14Z# zJULmlb{#RUf4oNJ$w(k@TS++evU4OUy(Hsp=>yrjd~qByr&}3!cnwq&lD$Kd2I{hm zysUSPdQ|ebzefBt1YB`;jHv54kAAsj(O~WiHG0U_i7KMgdq+z{n%VcLR8!}uLDMHm zP#0l4Cb|3JhewNj70M!dc<1NvX7@l?t)9~!MV7tk)j{gyzUTBlxp0a%gLPi}dN$HW z22mVO|E0|t1qfq~8=h9$q2kfe6J_=}`oCU<2<0OgD4oAx_PI0o-T+)R4-r|EMsHKf zlc!pN;ySB_^!8U*gU`2G=2xKXmhRWvZp!|CMA+|C`ltz@BASb#fqu(U2_@_icEpSe zZ}tVvTI>`q2@HOHuA-#c-8TSOi@Jy$oH z1X?F&&94rH!tZ`_&mujp7+{NaDaE&`fz}WGPkbhO{~9YtPW!F$POFms2-I-9wQzxw z|F6AQQv&8#+-CcqF=@k=XB5fKF8Q%ocHh?vOUGKvgJNo5?r^TsngK)-Ktpeit*8wd zOr_Q}3NBh$Mc)A6Nt{jy?Yku>59bcY1dHmB9Nsohb1>iDVSVB{tbSt7R zj#6g&kG1ei#3zn9V#=e3m;carODogtSxzVJkwQwKzsd$>a6#YAKCLc>Ug=K|vG6Z5 zYJP`@&vbviB@>zCXYhe2r#%*>8d)|kpi^vA>GIhH8t6At&Jd(YjWRge z?85&X78ctYGIiWggi~EWGO(y6%$-2~DEnXQSG%51=r_DJoG=cvz5RRfvsKHlx!B<% zWGp~HX0QcnBF(oya4%Pld(ct94Cgc8;cpNc?+gwOj+SozbRyHqqdn zsJc>stL;hJ57btNN;rILURugF#5n0!c;p?O9Hwek+F9Guet9G);GJxgx_ZSrzFG>yM_L;Gl%5Wcvg-R|Q~y;>6LBtCn7I|~LU5X@>~ zi>iDc3m5GyHm(Ke$($^IymhtB>g~@`5kq@A>=e#PHieL8RRqLU-ZO!xg9sPuQ0Q{?r7Xy>gWA zX|tq;XPz#!SR4K0$0KChtN(PD{d~^YLhwW9cw3`V0|a*vwtimqvCybq?ygmT0jB#3 zEK|LRt8A-kX!9_iL_el_85-i`774!P=%dIwZzP*T?ujdMpKI?+xi329k{l1oQ;H!} zO_iQFkOIhl<`JGXe#7nG+7?XLrVd5Q4bprKj~ue&;-wB1fxAjVA{=~T3jqolA-m@q z>N54kQ9{Xt_*o1-ac0MBxors|_I8AqDC4SsZ^`d}LFoXojrRPvpfE?SB>5oH&%=t4 zt7O4O&SHpD&nqduUtXvUMZe9%1%jV4Yh|4ANCiF6vZP{guM@V(7OQVOUmlh ziU=|t-vM30s}qO%ZWU)W{IW)svwFc-*I**$bYOqY=h(GWSi5&w*tdfocj#5UY0+~( zma9L{>`=7`BS4xcR^bEOKnkCVM-`tGg8b6(E62wh3?)DFMl2A=Gg!R>_fP< zC0B;b$)3E(X(FpaH>9|DiPJM7o~6d$Q-;-2kmm(TQRZkp(r$bUx;ok*|0SMKNL?%| zK;Lf`KhJ7fML!F(aKyHN`&3i-&?Mmu{Nmb^;O>RT~bM5Jj;)FaTT46pGO;G_D8l~t^-5L~a0dj#a1H90ln zyV(zLF^MDJw#>&W$&?eJQ46v7sIBR}u(>=AhDghWt8hngbDf{yjjR`&W10NkWcr60 zPs6^Int@^jsfXP|eK(g_t@wl`oIkc@R#;odf(eI66Cdtskoc%KBufrSPl1iggv@rM zUqBN*_vjxG8@X(W7fYnL4qlo~_Fmhrsj6Sjb-S#?t#+UiS^PxeV8{#=g--M;%D7^V z0^3&5j{3?vV8?WYUM5CAY*+jI2~q|41Y(RbKon zq4Ae~$g&a>4lg}~?m7Tta@_>))U|NuW(V6#?d)l>lMA$oDT=yak2A!i zz>}<^({Dp4O2iW^>H#W#4T1edY{dsPsQ9rwHWL|kOrIPPv+M~Dt*UZFIV^Da7@a%- zoR-XBs=9-5J?B-Q>cwTmi8?Df{x?IK%Oes~Vh)arL5D9gK#@h3uor6f;}rs@ zNa^_s-WWdPHAzK#+*q4vZZ6z3a!P=J&cH7^;WYOLzzHLU7r zohA!)8ULH0mw{3uMnl)do+rjHVAFV0eRbh-A~&4O7R=VYX~?2 zzs+9Apv+Gpk4}$bS}WmqxZRv|qK!8uoNT9syc-N)8j%Mi)Cbb}E~}F**YmyiTdX#Z z*1tvQG5*lH?&+2tw>O3^gCHF|XEuTaNi+|&e{#sfPXSyS)otLC2-?0z(WN-q6SG9S zG5m>cGCVb5TkWcHBi|xd7mq)~_7g_@61DgMRFn&y_`2x8f&)`;fp%T(MId&-LvXOv z`tQ<~8|qL<0>vOFz{}&Bva{1an}R2N+?3YTVcCOl>I9z#UKcnyzK-xo3pu$=Ux{x6 zKCbRT#<0^*VT9LRPA`XbPS+nE@p-R*=cvQCp}tnP6;aIoX}D2r_nYFC_;=X0m{b|4 z#YY8;FtG<)K9!2iAGeciUF8EUf!7!h%C0Pq%{~uwfOi`%x+Y^0^~VfOK8uU6gF-fy zgR2bXS)k^-}Nmt78>aFbONj6MCNio-k^n9)KANm_BLEy@N8_Ik8%}{KHDN2O*O(!jVclL-`!XgN|pCpRdJcOXypr zP7SW?jxdCV(mJU>s@ce)`T%&4LIq|)l?^#_V<$4Fb`ih@z+wY_3Es0-sWg@Wkce0kAz0Al{gIYer@(6lT>>{B$Ct zjV}Jm;Hooad>S@mVZWj(*{a{K%X?R5#;bBe($aCo>PG{6T5f%xS7ZYvKH;Dwav<#; z9D=1plG3HL;lL0aJTb}81>=@ij;Vf;sr15nVok5@3$I~>B-vF9LrSIU%`a<+`U-pc z2mU%F4QP=iVQ`U^ET)wP8VTIin=p~id~h`Ssd(z0j(X6hHP_3M-|u3=k1Ytpo)*G_ z?Mk$Y5`@i1dz*&s6O#wV!S?-o-2f5JpyOu3l`Smy6#vS?ZY-a%hn#P(wfKx+bpGONgoFrS!u3lC|2?7X<|{ z>8_mH@Nj_qaJb>gU_a@iH92OC>2f{T>Ws&E^Qa`Ykl`0UJl$8JrmaRt*L-N}S|-^&hLR}!|hR|Gg^P%*ww;)38Jgf2UlMts{tZg zLuA@j_`chht(B1p@vzf>C(2|!h>oh9HA3mVnj)gp<=fQ@^gG1iI@@0WiX~ex*IZ;L zMVFYxQG-qWQH@JS&jH*mKq1s(fA{gT{H`t=y=e$?f*`t2H65stN` ztxbR80A7Wn;y3Y$f5@t`>-s}Ck(fVxc_=?#=En&)G~ticI7B&e$H02JE`DEpVtn^F z^#qicrWG;tlqR13Aege{2Ogj=PwX&Ky4>>6vLm&%Ausyjn$gR-~vV`g^ z#GT~A6J5c4#nbiT?cuGDR67&t3eISk0?|Ka8lSJgR$X*cK~BT^R?h%nt9i>8lp23A zWwPJ*9ZAXNqd(F)ENU(_i9WlNSc*a5I$y|q_fry!wop0&+hMZo*(S>2qHX8Fyf7 zBh<`9qqNy%9xvhEaKm%QbERY&lD>b#J*VxZy!hl^UX%Q4R}oWttULTyo#S$cgJ8uD z4hfF{z;_j)P=Qoo6!mV(mcXfcKypMxXvj4#lZMCHRZzW$vxmhg0GdoEPZMU4jb0e? zuf?v}tHFgVR}F`GD=1&XAq?SvvU6TNe*8Gp0n%GB&CkEjAV znjFcX^@D>ixn=)EFZGAU6m^JpVl_8Bb-Je(82Y1i2|~Lkd^!6Wk>&DN#PApoUoR(v z+Nclm>-zUuho> zHvC%Iu8Tn%_)8(h!O6c%fynVSD23W+1)|G8Kp{GNHGB;3cXrhC^o+O zI#WCc|35W=pSr&ro8-lO_81nHuYPA?FVPvR$+nr_1dS9M9akL*Iacd}bR?>)-F4>Q z<4S29YIwWauud;HaF9(4{glWlIpfjB1Fn~&D}zAV2!dG&Rj=J_rE!$lmvX3n;qVZL z8_#>wH!#;i8I&Lvz2=T@8>#BYT%MKy%7L4JEKW)=zy+UMO19GGyegM@YhDE+zS-?6 z^4-d;6#GyQZ5PaDs6rY&);#CpAV%_ZZ0Q4KN@Tgz)6FE?rwMfy_&K^D*W}yx z5~a;_LNDKQjJGzE3^WDOKCPoxN3Y?}qMup*Jb0@4lYk%Amd;N15rXpbL0COB&f8hF zR4cMtwOgpGJw5q~xC&m=XMd|#Z=lVQo9!u%ssx}Reex-;1C7DPqL|G77S+$r_gijY zkKY6gN*7hW6mdQtQ-&BwZI z4jwx%xwUIlv)#G$^P`-3tZ{IJTZyPc-G)Jz{oeY`;UeKj!)>ZkTm7HV{U77H1{(yv?zuyfXVvK1?g#9fRv&634Du;t&X>U?b1$Rr|T1SWqSY`Z_gEzm93!G0}`55 zyOkm58I@yORUm=uNjE}AGDzoG26?}<4WyT~vZZ=Y+_xzJc>Ilmk!gP1T|Hzzay7A{ z%0wOC#-fM+g+I7|%?fVkQO`koVP_4?7hBN#*+2W^fDehb0 z6OLFji=~n$B$n3v{HRRd#)cV!zyP=S@d0HSn!W4-XwBmW(O}NSfJ+IM-EkO#As15N zq9Ca{9B@(I4pDy5!)6*OG}-X}XAPZ%!!Aid#VMHjx?*aEdh&R-;z%1FgZ07Jcy*sx zzT~8|$Vh9EE!@u@QPiu2uaK$E6nkmpT&8X~f>X+Q*e)lxptx@GdahW!@Jdl^l84OeRw~U3+k1T;mc?kqHaCI;%~YTcTxTmsG#BOU>YJ z_wb`UfTW*wHzgMkx|UsU7<8j!kuz7WOk!=38(MetINN#Pco(HEh~1U4eAr-yLel?V z?0xq?oLd)f5JUt?Bt%a_L>EShPDDf*qIV;@86{Dp3_(Py-e&YRy3s~wNYm@+qD8M0 zy>lPuzUQ30Ip_Wb_x;6ZF!Rjbd#$zi+H0-vcX=&HFFN-WKP`}Zu~k$Zbei?E$m^)q zQ-3ueZ=z`M2*cME0ELCPAM@&+=+hq6=6v+dToU#2J}FCx$u+6lM%x0+=G4b?hd_8$ zJa&odWxM7b+$LA4c9rHG?kAblAztiV{kAC8L_R>rxKBYcjL2kM`_b&e5ag01x}~H1 zMB$s70WWJlwsrwxvyxf_8!V)%TzU3YIcwyr)xH-BaarieyUm3INJ)lYw;H-Qm~Oe8 zuSoVkNgMx#t9vru9t3w4_DP-{5v(oTKe!MbM#6c0@fxv){R++32gSwLpJ-@5`09B2 zjvzpjE9GTW@14h-V@yp1r@IJ?nBn2!n!1{?Sx5ZthJpK!L=WqFP{sNR!^nJrM3rG* zl^!S=d_IOpO5^=QEVdo#me&7z^BR!3U$j{WASRLt9uSc$_+F(|?eN?&Ief9)`iTfS zBRR55Ah}~T1$mtB={7-&G)pjHRf1)@VUMjL0@(PQK%w3_{m~B`bRDj*7fru0zmSW+ z95OjET~agZ%~8x}%sXn-!#+SC_4pZh>XV(`jo^<+vn77F;0~<=R}4<15pLd?WrT z4~4I1-~)jRYE@Ka6%UgJCUZ>+ozkbN+hX=BBn>Ov6Bj;nU+2?q_DG5s&D!!`i{{sC zYZXWOI8G#K%6)@DfHu$f%fC2>=cG(yCd}G|ZlL5E<{=9RrC?f)r zVFeQ}E%s<&+w{IKcE|SosVM0D=1?)VnQ`;SvX8%d*Olv!t+Iijm1L4aG`F5Evyl7^ zs(bQuu-=AQa=0ea6uSACNdT3eqg#l*ZN(;bh?Q6^i$Yy%Y|YV}eMnL4z&?VHc2=51 z%bM>0_$=acEP(!D@FykD@WT|KrmAFcJyW9Kvtxf*J$gS2#covT41Uj!6`{p9zNsMYyjFUFi;b>o|WTAQI)Q*%rBQP_owUy_9p3 zoW6Y+Na7=eaT@$at6j<^7B^z&4*Iq}r|2SK(w$if0dshJvL&OjCr~=41FzC}gQeYs zF6qNwuLWO>`uq;l2+-_Z;-FJ87N$Tr?{S-{LzBhs&e+Wc?x8=q073^C$!Wq$DkjRu zuxWBt&Tn^eU8P`FZ=O&4h6`9U9THlm()=_-ALeU-SC8n!7+h=7P?Ldj2bfreypi45 z2=-3T&U8V=%Ibr<;EK$veI;!dcU|^3?h^KNHXyuZ00ZCQ<YhK_uo*Ab(87gpWLzuBsRs!yY&M`FvkF3%W9m;AcHNDKdsPe!nxA%*p@f(X}ZP z_i~ewYMs`Ppk;MD#QW28bCC6k-SzZi=vU?56x>JAF}v4WYZeqYCVe)$UHVY*?uv)u z;`5Hy*&Cw4wbmbREPW4lUqLfDk3YjP0l3O!Bvr&B8iN@WFt-)J70O_uCT}ba8|d7} zIOzYyvX#Lemein|hoX=qg1YYu=OuD=#P^lj;C$4&>Tx_g@z&jrs+!(I;KejIuS3d} zuI|F`!h?d^kLej^!I_|~2SG>iNl-r{_udmMv_Jo87t)_h6Pjhm%R$jMlp}vAvSZno zgz4|Rn_P<@j77T415mB7K~n~fF?_9_i2cxvk=s3L@VQmrN_dc`DxJXlRo?gh$B|>1li#`9k(=@y69B%zZYq2h$(p* zc<^IolWT^s#P(ZO@}kYhk(}Tx{^~)92c|DiuSuI!y8pwH)+c)6tysui1a05^Z7&iV zgof)@Dt3)WzfzxuN8gfy$bPpmB|VBAA&Rf@`W{m_%`v;+>byF#J>W^qZT2ypqFT=n z+E{KO_Ggj7W!032>uDvP%bjL<@HU1)^c2Is7A+s7N|zlBrXwf;Z@xC)E!M5^x+7*r zA`>U&hj1D-qpo(+9JtD%la`Sm7Cj)hI7uXG><4~Ksy}*(nP#!fVOf{c=vxhF_td!K z@ZB}O6_5LgU>D&UMCO264bYX9*(Av< z0N$AaW*rO65gV+@!jv=;)g38P_B*&%SwQ_2_H3OElFr#z=RlNt}B4ykSY(~0$q zY~ts>a34d;9-%svo06hKgHjB`^3wU;)n#ysre2G}`_@BQNr?@l!$SRE58Ar((5Md$ z&vD>E6jD^8pbx@$A|$T_>y@UIK6#OSjhlXNs%?zQkgZDt5@&>1$~~wA`V`L$l3`mS ztvO)egV$l>Dag`}eXdOyf`mLU^EygKS|XZK-p|q9K=MD#bh4e-?_(Yq^k@KF5hJ8( zfOqXsN{4k;;e=%1_8dHl_WiNRxo0jf0x3~8~Z2AK=X=d=GXZ=e@@TDUE2*MA)i1X{%Db0i!_mnPc#>c*1>4>c7Z zVH6b;W&BdNKuoivjhJM=eH~`6Y@2uQS%2nUzr8d-0m1t2tiM{W%HDSzoi03lsEFy` z(#9)V{xrhx|2SXcz9vL4NDVG>0-9SBY)3=y1Wv|~3T z3MC#P%So(1+F-~FCGsBM&v)osC7|01e1L&wN$7$8qKCJ9oMLRA^YYxQniLCEYh%D2 z&O|?cqwmGxyYFl->dSQL{4^iCnAWEA49ti?7J z2&9jnY~k+H9dweW@dkWXB6cp-cL;e+H9lp&Yy@qv`Z_7#*I^1PPY=D2rtR)oZ=juZ0n zP8U|S4Ebos_o%T<^vwfGiUZqw%B0p8GFjWrf-_j1N0bz=tt+SdGQzeoa~M9xHb=ZK z+IGSYd6k;FfIX$tdHbsefKwT`Err*-qjFy_OU$HA+isu-=6dct{PXb)?i8`KaCiJ> z**RgKovK(LD-$mzy}aQ(Cj#m}GtIIj#2d&ERGwZzoV5c6JI6~6&xk>F8s9v)aA&cB zWNqYOs=Z>;2l6N$w;wLKB)7`SLu=e|!`5YYpuXj+T_%QLUp<*a*c=|L78cl8}~ad#icM+!fqO3$)$FNw00Z`lZ3N(`f!}Qrmj}? zwfm~zWbfmR4un*flc^#(2Io2FQ_s9pAe`6KeHuNOIY_jtd<&G8nMf1g$PTomGw>eB zCEhj1b|d9Y-CMq*+N0jx963!Gxa7T;O3DVQuhsSRI^xIrq@}$n=gBoluf`NAM^h7s znH2T)3A$3Qpm-wyl*!(6pm8JwvL@5OrE)4!2ekqjS~7s5Bg#A%A*D8>rE@3ww!(M& ziSKmm76XMZ2(usffdf$z$`~Mk^L7AL66rXBp5U&RtUj6!9T&b%t9Jg?N^Q7r;fvOs zjh?%ZmJXn&#wv6JPp!bxnL&WzuRj?7zRS7O>1JS8U7fSHzTSjyj^f0D9cm=4wZ}`- zr1uncH=5gvFKeqPpq(>uGgUU;ZRw#>ya(hhNWar)+9Yqm{;3Ep&d#M|@ZjATUYMyY zIVwgNx;%oUEyf3jmOaX-+O1b?Z{{gzFTNhunJA80zOhAYL~0TzIZMU;Y-eb?OFz9v zTa@43@V&!8{_sXe#b=z3wHFBDZlJeJdueQ~B`YZwxbPd(i{$e=tLCF}4%&iEBmlr_ zCwMnl-a$=ZGTPS}$LnF}|M*z=kzz}a=yAX|`$#(fMJ;OEQokvvBb0UAeR+FPielq0 z{!}?)`av{jn_hl2E8PuOPTSO+{N_lBiiF{Y8afD9d@)xfp&OW`tFv#qFp0DAjb>)P zZ=P$SNWa(gwq8EF9sLWVon7ZV63rTShkWU#GTosvJCINw|CsL~Z@uh$r>C#jeFdV) zP7Yup+Gt&{d;Db5m^!jfVwJR$PO><5xm3@A)1)xD2886R+cKwS=AU(qEFCEmS_A6_<=M`YX zL&Y*}04O2D69CHR>kHsWw;n)-cOHq3U-r_IGpRAX*#_scGZWOXjcrNeDTo5dTg*K5N{|h>AN!4|nNCalg+XLcjvW&#}z&Imr=+%7i=t z2ZnwQ4viq8=nVS^K(2OEbbl0SHMxwAMhM(3b^xlF z_Ol?HeKfrB$x%S!rh8R9fm?G&H)xoTboLfx&(6Dx&mTizXw;Z^Uu zi0KNS-y#DP?{i*(XZXV4E}iGveyl3PevWg$uxMMPdYp_a?uxG$Zkfn5Y7}3or;E&O z@R%5D`=BY-fFFFj6+;_xlqjAEBknKbS*7k+qg|~HS;ZZb#|(@dz;+n9WksLL&OAYDh z?=8E}&eo5By?jRQMoR-+1`dP*3I*~5y4Q$EYT2Z`wzd)jM6|Y)X+7$m@PYg@Z8J>k zybM{NLhpUe8#cS6841zSD6ksLQ2JKWRkhVyDCo6vAB)ORd;jet zy)yUa7=C+@M2z+jmF%-5P&FOm?L&}s9ku;V`nyuRxwJK*!kX*JK9l9(nhAjDG8*8* z?5}4vIKo^rDPK?K^Um?L!A|^1*95L#DBpwb+3j~4`tFDf8jpbst5ES$FsTV|Cn_xR z#Yloai+eA9xUg!62nEnXXMoYCUt6-f6Td1ndg(_BmB&(dYJn^0I!i*%v#AHxhqRf& zbi)0GO4(q^?I|^JL3?}?k;B-2uLK(WJrp@?{PCB4A9mRs-00zRMT<%PKf z-^IZrODbg7R^WR*Vg~^Bt@#j75xy~dwc}?q!|a;37eYgjIcisCuP+ylT5V30CnA;L zDi}W~w9CqOSfl0NH(IM*CKxWYaVlETFfg4cdw#kS>Cy4tQW$$Y+`(~7gAzzjM*&O< zhbX4@tt$NWm!GX5vy3XZEf+=u zkD$(C6_~6pw|?8DzD1vP=aS8=?fm5J3E|_%c14;*uE2RvOWRKMBj{p+Eja3KEH>IV zcJJqBkNjy5N0|Dumz3yOlU-Kg0)3Myx@)Ykwmr94cWisCtuwvDXu{j$4LZ`#+moYf zBzViEOPaG{+)Jxre{n}V%?97ZA`mT69X}a*S_O~`d~CUCd@k~?-)qy?K&2z7I zDi-l`V=r*qJ2`Yh=&BbazK7!szX9PQEw171li^2>_93n|~`*9U@@qE0B{i!)Taj-PbLhk-s_tw=Gr z`A}=d&Htp_-_$qMACt}o3q&d;vxdK;;p8xpz(gH29a_HoBE2H`d3)G*vGt>FxQ;rn z1uYY(YoxeFB$daY#4;EDZgsp)5pbJ%yupx9#2d7ind+o(r4A{%b<5RmZ92dG@wEU? zd?5dVL#al)YPs+bZXo>ZMxh4THg}OPUP*-YX^a5Vaqz)(HT<5EQZQKbo{?qSK*7L5 zC=jE&iZ%;RoLDx%QZMZ&DJf}|J-)(eQK;k9_LEC5c6cJF96#i^eX3jKoEg@ofxkuZ zeMu}vq~l{C@dY;-QzfjHqX5ySgYd3>fZa&$Xx7~uxzRdhMob0$S#Yr!POWd+Z#b@8 zzOXgo@hEn6-20w6I@69xD>Yz0({`%+guR!GyQToLj<;S8gXt=@I|i?L)SmA@y8j>< zA-2hn0D6P+0Mr&2zpo4xfBwK@03o4LmdHkV_t5qLhlBbS7?r!&{(NE7=^@(^^8sBr z=q|wc*hZ#@aQUM~x|x#@U>u4GpGLJ#eQtRwWkr$Uw%Yf}r-$m4D#Dah`ALULkX5Mk zNtr4_vOl2P&{at0A0K(Q31NY>G!PqeL~_xqoHleXb474w`+*$%Z;_9jCVFCR<^LvpxU{Ndm?S zt0P_-s+uh3y9tgkmOq{%EV04+XEW=Ee!IPff3YEb4qYLW9eD+e;|rQbFdxY``XAOexUG>EboMxUQ|pB(`911J{iw>ZxDJ>_{Uq^Qmb#UxWA;qt*-$oLR`1mndHpFB)@O%FF$|zZ*)S* zo0lQFp6=-*Y3YH5Mpy;ieABx?3%ck)>q(9R|L5&TIHbb?hvOrv0%t>FvCIc_IJ)~f zxRT~O?LEO7KpAO|#(lR2&}t>Nny!1bc5_4}n}}h?R#Huxhwwl-JCNjBGcEriC7aSl zIO_mw=|qnUVsPj_=tYurnY*En&w`B4tJyCcY-vCi@OCSM*A%@z!^v@Ms*|kRUm8Mi zUh;eyZdr>v2E)vpXYfik=9=wLQT;;TtA-Yu?i9TTKK_yL8!IyLXPVAV`|k6@n~a2@ zEcHu@0#U!W43Z6@O;N`%`jMae1(kD#1N!6FFjFATzj&Qh=FdO;@+RPu0DfiKiQwGd zYxno7UTy#_T4z4R>OXUdq9 z(wR373ue=>BsYoIeQr^FAPFw{+sFVcr!IAoOjpxas}q<~p)sdSUg|9OPhw$4KC(BCYce;2q_4(+c+tt0{UrA)2UI898Pw|;HueM67 zo|Wy+o#5Fajz3zZ-4rAYrerC<7(y<8XSSK~=|ZM;SBhbMlxX{mB>>!oK%IX9r-xzA z12fwX^{ek{@A*0lw(D@d$H@u$TEu&%2C}ct5SV1Svt6;GpS<7ste;KKuhy&5^bM=x zf*$Wi1)29R;<;H%cpx@@VtzejF8S+4+9JwlrzdPm;;IgqpQgOZXISD-tk3R?82}*K z{q=PHo`5ONEw-2l-azP*PPUo!dxxgNoHxY!#kQz z{&mldW(_g+`0_R9LUZ~vT-wd|7wo^6w0R}+nN@F>)a-VV~rC4)w< zOLM3#Q@*S>qs9>Cxcu@I*ll071!Ww|q$d>Ot zrN%M4FEQ{I^B^`xxSjQa*{52Ka%QZMdXn+R*Ok?c;RT&017{^ikn@u@$zgHb8*?hE z&wtV51RsA3#n9F(Fw4#;mQQHBu>rFr_VEx^zR&P^CDRI%9Tw#XsYA-v#y}+ipZcKP z$Ddr{2Jx9zT)B4t#m^%9k#enw(A{A4F(ig7TvLuKpys1tPgOfwVYuFFa42YWW>G!n zac}B0Qv~vC1#?vgYiteyGtqR}qtzHQ#a{+vX)24gW&~lVh2q>-7On`GBpPs{ z=@v`z9hlkgBXdy)|{PPNxK!n!1zl#GpAfqVP4My@ll z`MX|ZV-dSeN{MW|K*?M}k}j5DZ+Z$AOr_&QJb9>0!ynaowbXql?m{l-`axpwOv}?Y z%W|@!|AxLUlL_S-X&x9Gn6%z~W%Dectn(Sx&bqoqNKD-FK%eywgH2%cC*1#Wr&wE~ z#U{yu_p9oYj{18zy_E==lVQ$H-PIThqusWUL?!3@Z$&jmgKd7*sauzmMMK%xW}-JT zFSgP}zf<0Xk~ypOoA(U4HAt5u<;Ztf?z6R^zI`vYsG500xALKWfG|$~Y*(~2lr_5e z4N+LF`dO}pHdgN1=FX#)O3XB`;-_sRdQ(El{y>`?fhtpJ2eHlwI$Pz1VX5H(%P7!zotFndao9m>@xF78ZVa z66`I2K4seqf_)mewRT+QG@kSV2*KaEl^Q1%zqHw#>DM;+xjtIVWv&&-3>VbeB*SBb z?DZi_{dwHHNYO~+Dre%T_2%?CtIavyfsOk(cCVi)gPF1fr<9~rjQ4Mb?)oabuYI2# zt@qh??nUD_>pgd0l^X9>pHaQ^=jvoQ&$jla!fRf-0B3TrWcTuw3G4pcL_Q$r)F+Bh zh})irY|O>W!{3X$iK|zecvcCYuHQ{=$xy-`4oGcH9&1)OGEMjtz3mGbgT=?1uCTs+ zYjv!L7bFL%yt4fSA!1J3S7YJ$>=!@R$TITrr?)vuVYr5YL=3eeF z*ZX31@%sCb9nC4pzFYOGiQ>aZnazCm-s!c&UY!^bqK-vQMF7FdD0Z8Olp>q}O>wD` zO5_6_cY23BF0gJb_N*47$|XyC#i-8fd~XpsySTa5BAOgdLB%>d$Eu zISWs2F92y%j|nQO=Cme97Qa|>@wR33b}YcRbub$*XqB74zbi9Qa(uXZNZjvgHl)Z5 ze<1W`L6!Bj-*f^+oO>)o*wkLYgP@BoMI=*c-@h`Kag(&DY%6*P{ zymg=T@Kb|+SJMpno2>o?R(|}^!CqtBn{y~_uHLcX^$^#Uk*$K5;+LjgJK_DCb3=2< zilNNWMou+2P|94rEs?3W{F>k7)7>9M?Nlr$fi_~SiXms!BLLPi+4x0|6_8Yy2t7WU zR!^TzD~xUPJ1xM_25lVy1wVv@P&CB?%M%y=j8Bq+i(*?|KdX?x@5b6yT5$ zaHq7Eax~W+m-)_33Ez&xFOgl%S;;<=72qNS8TDPI319yGg0<)KtMy1TU+&(xs(lng zKK(PG)aIJoM72Zw1Wg^YkG5gCZTOw|ho%6XHo%DCi=B4syvD2f1=&wqayRLx)3?0A zbcgr>)N5mhJi3Lr;%Rt1RCYZNjSZK+C8V_N@a6n3_oP1rOm}T>V^e)5`UDO)f^=u* z9c`t1D6l}-*)^{>7t3KdwN8Hq+6|Pw^#QH;)ms6!sajaK_NZqvCXrq309;1*v@M1= zKe)~~S=e^go;~N-L+Y?SvXh4ZCvz;sZiaAa9C|)I`+6De##e$8Hrv6XU+?qcU40cj z9U8bb%WRdy6k$K?y62`8nRjOo-99}WF0(7Weq)PwD^9=IJoEeTv?*42brcib#N46p zP)E9k)dL9D?4k!xXMJ@cHiKi&O3^+P_jrxV9wS9zOGYs|?99Fk1QW$s?y4iAiJ%L4 zo-+G;a62zG>IrKg@26nGzu6H=za80s)Sriy0OtAou6-e}K?6q|!@eq0aGFQdv5*){ zmTT23+(gnZR5_zMkLa8pTO77Yig-HVGjC)pd8<*BDda7_eiKi=8XoMebgE~%dHwi! zmsK(l`dRVA)@%=1!PDI}38#IG|G-W+kmKoqn(wt_B_D2t9<1N3S{_bsd!blCtPBZS4N20?}hbh9{xJ2Dme9uz_H2U(*}88cz|8QJWgHa zXa7s`A&x+DdEUZPUh6?NFYSqNlt{>zz z>K^jqNAXsOd+VZDD{*D3S>@=X7%TTj5=*D>jcZu*PP?l|j7NfxJgpZb@T4D*P?k@K zj|>M$0$Krh9!9VuAaOoZ_~Ld?$_ZUB3d{{VHn2&g6FLWtZ3F#odKWC={)`OT5~D&L z-bI7Q)>~boM}-k2gRV~q;cpg0S0uGuz_2j8du+x^>5XfsWcN*+T4LuWd9EGx6PqUv z%L4_z!KuP6LMhZCmWie5DL6GLg-*)Po_D#v^mHV{k0#Vw%X$S>7>3@1lPy=^B#-# zXgqXlcqI*oT)L2L*C~Oo9D{bPVs&=nxq)I2)f4!GT=;;)P-V}>8B@3I?X=^;e<;1| zP49~{3jmy^WT3#Pbx0sx_1=tL9<^ryVlKMOE_>%WO~@8-;2giRvqxTW6l$bdl>va_ z1wR}wE;8QHKD_P1?r_m-;YoyxhJF^rYhMQ-F;LQPbEOfnojlhwX{`$$aox-ZF?dV2 z#`4oX8xlWQ0y1-VxMTQf9K^j39V*^XZcVCr@sGjP2c(>K*9Aj=jfMWP@178fmG5WC z1nT=!QTW;_Zk#p4=&|_L`ksGh7uLGFjHWJ_eCIv7)eZ|j`mE~m*rs>n*};VvA*_gS z-;A*4j10H#o1MhD=q+Q{x{zwb^4?Bo$mZB+tw;*?665_LG`k5DQ1{!5ZW4VW(12CL+GQf_tDY zsR>2C+6B!;=czl+{+>QRr<&Gy>i0>#x1${OO!~6t4Ky`H3VPk1MlbtMBg80A;DD+rntm`&-`3n`R-xkC72Gbn5 zJfP#rwp)q^mk}btt|$O8z-{Q= zr3Ub#US756L?rp8CPSD_vgGo1$4|QvU@o298BvxF1W@YlyW1BF@x5Mj_`S*{KKvp1 zKCjG+pD7cP|M25(hDn`ww%qIjI4$hP^K(>>Es%IFJBPx&!fH*!GF$eR;K*5^AZ8{~ z9s9Fj@GVf9e%O)aG52%ov)~HDDJnS`evn!Q9lo8yv8o6@d|ryvgTO65>gIwO0msOJ zqA(@!#*h*z;SCA*+A;^+h#`O~ypP?)1sSR=wSW7#0@j54&PttfuS_~;L%l zIHTYbL(Jl2?ED}ZP#$Kgv@86@o*Ig^pudl^P7&Y8Ni3sVYC9%|)W({nG? zbfYy7z5i|jH?OehEO!@Dv=$TZbafT0)-|YRxU9y~7(w|Wg`mRBZZc)EL(gqXk178X z_ZS?1Z2BJF=DY7%W}E!{Be zb$C=FS@;=fE{i2YLmZ~yDNNMQ9*fKBa-~ULTO4()YjJ8Irm%%}@JZZZyB6t$-G?;n zk1mgOL3~e?kVMRt6=b`$B#(q`7lnJX2J2n5d-NclJ+SL-f1Wmf1p#g{T$mXGHg$$o zdHdEGGG~n4&Z2NGimwKs^0z!-!P(zz6`p7;Gz z5P2I=dbQGqrUnoPC&zaW@^MIKz}bu0@|-fzdSBooD+8my*>^H5R5UcwcbQyId3k8; z@NMY(U0$xFu$$DrOtAi+d8Vtb6VCIm2B6+;=O=t`tQYj#_f>`bL*dfyTxRk6acEz* zLkBx-acj@em&o~$)YX|-V|o0V0`)c{&f0VKAxzaOtIVd0dAQc|J)@LYAES4`f^D4V z#!QP^R@hSUQZ_)`xe}JEll!>-=5czap=^{Ml*J3^8xiAxVmDO+?L9CzH|RpuYe@6B z|Aqeezj27mtb{PpwR-|DBdG^k5+!1_Re6|MBj_Y30-$s*7To_Z5ZTF$i}vH?ij>Ua zKAIu7{OVtDYLzrA4SVPg+l4ruAawW$n|{xR0taD+-c2W;kumYiiuNa)$?fo9uMgp! zicffZKSllXoV@T*Hsx8P&lL{g)!UfS{7*q@o%J~B>V;V#f-VKy`6naxg;(I4e0&vb z*LZJ~Z#>}TNVUE7woU#qng0{~jg4RPg8w;^4OCNXr;e_6pyI00xBE0p*9b|tn$7(# zF;f1+U%MH`Ou%&NGPp(rbT0AVvX5D-RUoGgGglygV|T112&7yqDgL3f=lpM6z8@9E zbGUQ2Wom|XE}Ta(6k5qU&FQgbe3gaPhS@XYNn>pUKfZz5&tk! z;4CuNekt8=zV*r3@b}C4nE=D7fi)2`QvSabVE=V=U4Ekhq?rZ0o{I7QQ;O&On}K4$ z_X)gre$4>>>9Kst2Ez0=>WUK17X zn!y;*zcFaP2?M5bp;43P-)6FMCjK#48{%KZ{}`CcD>KeN=w|=c;Xhv8bUzdS8fLcr zU&YS`Oyxn*wKMMA->v6z4X_60yCcrl|M(ZM{8!F6(f=pT|3Ad(UpXWt+{u$i#-m?q z;%ii9s%t-VXA~%Jc=gFM^Fhm}-V84~JCiIQT?ylmvzmp!E&GpG7r4%DCQ2=ev&HIJ z#0pCIcQ8Y}SWsS!8s1ur{j@%?Wuq?^WZ6D*J!xLD!ci;$nk0Bzrptue!WzA9G>TE0 zjoVz>p3=s;^dU@57#@F(2qKILuD+Qy6tpBK{oh~b|MTKZbIM3~dQ$+&3&BsLDaxDy z(3}P-C}A8{&FH#N6QaMB#3UA{$752#I%&#ldfyL7qSq1@RwNlo+{|`g(Vt`r=hmAP zG>On!sqIS2{7w18X_~|3FptwUPssem^2#&mpWqT~64iLI)aI(h)I-+DOBI#)<(K`q z*T#3qf;LEaYzTfgt?)B`ekGIE7&{eFFD~sIZOzV{XbzYNKK!-LgS+CHJ{5f z^S5&hZZ-1bkAz2Ory;S;5q=)cufa71wfh@QN?@E&iPzWjb_RI~38N`#6Cyk9r|hS! z$tTqSva;UQ{@J_sBb(A%QODx;WADfnVYsFpA}d)}({PZR-y=y+@rM>!ifz_U zYMT5=bk-4Tp6E(Uhtcj{XR6P>xn`BIKvi}BVsD|T0#gB>zN*C#8c&JbVUkIRQp03J7@jws&)ziwU=~#t22I{!G+4_zHc!b zLbg<|I&IPKuiwT5ty)N`>Q1|SJu&SH>qkVI z2XP03*fjr0D!h|ZE2q7cgwKp^!}|S-!E<3u=biZ~vhK!j)_MGV2>ID+TG&-R)+-gS z5Wi4=@Kx1wYKO+zvy`ioXJD&dKA0%vRz=S3`xe9Oo?8VIM;kEN$B}~~oA0lO>BM`q z&8mlPj6SmI`zgIyIo(U*d>Ula96X|(xBVv<0DIAX>pX~PJE^p>NX;<_E zcH8LT47pTT(kW-k4R45ynh~cw-httAEk;z|Vr_7g6dZLrKqAuB@BM{uzMtQ^<-JFF z#Cz~dUVVB23ykF9I!n@ETfI6vQj+DQOxCn2JsNUZP#@7bYr0!8;YEGvI~ONGBUnN! z0NuI0w%9&W)n@VAzH!s$qGf%eUZ*ShV!ljr8PCm>x$n;^>?plDxTmFvpq)NDTWFka z%f5O@n6S~#(E9*sw1|I-w`w;VCd0q-a3Y?|khKG$iyW~UM2^U~gZbvg(RLjkW8Bl0 z%WHwAc{q&yP~Y|5^sqOJ=p4-8mrg?-hs}f;sRCO|U%sipwlyNUdnF5ak zpRFC?ePs0N_Z$!axg0`|NCzE1;)npptAZQW(5Delfq0Qs0lA(QbJ5SP)&no?2D22s zNcs}?R~DPV&$a_0F+-)kT@&h%JJsWbb@LUy!s(uC4EN;@sFIfrsq2Q@M-=dnhpGR2 z3zyXe3SVg&xrN}JBibq~;_l)3#;X&SLj4r&$_^y^$`|C`hEvA|Ut8#=hp>V{DmJ~= zA&miSnaZ@z*4tnhu2Bxfu4&!7K~Xb<*gcN7f2}!A$~yOUz)^+UE~UlsN1U#yciq6F z&D;(vbVc%^kK|aA>h?b1{ezf%`X-@J;o}ve?&*{7=MNK<4!_{&-!vAYL1-64Sbm4w zSf0f0ofc}ms=GD0-9dWzrUH7qKlG}UU%rA<%~<)0m6qaBq- zyxFRZ<0&<(mrR4dhT9#yb-wHH!*iE-JL9#TW|iftn&(=ZK}o|FVpf&@#(gMK3hMH$ ztao^uTiFQI5Fp}kbTogbzP=@46dZ>g*h|lx{*mzd`s}umkv=?b^yyCUngWfR=sPzp3g?VEdhKhd0((- zF~hn#&4L%_VIiJLglz*?SJy^`^O}9f^A+wkOa^>gSGwRUt6(rJUGwnp&S@HC%=M0V(Vmi#98c z-;@;Bl|-5Fv;T`#nw-1`(sl*IFDd~yJ6PS!^&dKpAaOQ{U(&37V%l?ZoV4faCA&F1 z2>!Vdc{n<(&=QMsl03>jfzPX?z%QkexN?x_42E}-U3wM zad7p(u2oekm|}77%bS-CBQncsee}I%$$Hg$3Dkjvwv)5#ZS*b34sS~=*??V9QJjHp z9}q3GKhVNysN}_`q_CT;sbxiTE*UwYag7w;;a!W>DiVEB^JwX|PHVS@a8`m6 zETzM+*q~Fo?t{zohB8XmpX;BQ6ddHiUYFo#snOQL*Yc_krJ`7}ODp~Py5IO0gihLV z8S@Pd?3H;RjBrQ0 zXS71eZ18jvE|vFJ0>-UsBF` zlr^HDR6(ON>xU{&5P`d)gstxdKG7nh-s)>Aml`Kx*XGsJzAAqSwI~gO(k`t>Q2pkY z*Z`&V?zuHhlRjUlso7bpOlWdSXr^m>stId_;Ei?edp=TsFg zera2^M$~|g-5Au*_Y0+D5m8mbK->F1ukDA5yXYx213kUe@DsRi20kbhW-danI?HA3 z<<~(>AzvUip1S+h^8`9i!Ge?#t-<8T!k5Ukqct_RqmU_cP(&7x>x{6DKHk9O7&X#6 z^pEv951z1LeHIgYV57@v)Qy^B(%33TDOK6LiaaqC34>ddI+L4rym2+G)Xtz2X}?pv zV0L3nH3b|$AdXYT8Se#rBI5k9TG5n&aPOMFIc_!@@@%H8 z{XBLWilKI|(lAcr@2_9@fpOi2gj^-uhMECD)_3D<^B2jc!);K?-U6N}W zpQG?P4C=5?7!iLak4P(;h@stf z| zTcEdbG0AflXJzEh^wS6fJALZ8K8l?o#bp_}Kc>U9QO^YZtrKFjmX)O0Q&(^rs)7KGQscVtNsC6)kq@ljYp^)omS$X>JX zI{7R!lUB<0*5#qPN0Qm56*VDj+hJ(Uq*S?WXhDO!Kvi5kkIJ2~xVF}{=S;Bk*&X`m zW&P1Aj{e1gx%$2gxvBAFRdU044aN5baHvv_1IeqnV*5xRkgrniY{}LCw()cRE9|mQ zR@qF7Sx$|aisoPaAf6dZTFo4{yW_J=NkaD#7kev!dZemuN6vaz9)*dbL66;{Rt<9) zdXa!ARfZ-Lo%SIZ7aVe9ICsA2 zrGiB80%+L5zH8rR4A9pV-Rp@v*WBj=!~dyuOvq7+M$E zfdU=MK~Al@4w7Jqm+g1vwRbv}Z8J@C%yT9K$zRM?#M<4{h_9;-v^tiFw+rwho*@K{ zWDO+57yCp?{v2TQ24UwZZ8EU06CeZv(&)Xg6gtL}|Co(7mHUtku{6+}Zu3V98it{l4Rm z317@R!OA>WhkQ_Q*wz;7($<5YN#7GIWZvzWl6LMkxE~j;-+ttmuDxo zKUpxswxkoN9MgUuEGW(pKFBiTLV!LnMb*L7Y zA8>zNVFOmgB&E2(hmkz?u; z>7UA+4hKa4)uZk#Sa{iv;MCnPe4JtS@VwBA*=TDd!KK%i;&~w<1~YU+KS$UT+BOjM zk9XrIWAqH20_+UiIQ3;$cU0(`Iq!gUwZOYsm>zLV{t{+cJGMU$!7+yG8TQQ$`nuN& zoqgA0Au6;ksZv*8N}EsRH6i5zr*28)j>R|>2dry;KCeG!w;Tf(v_CTU+>HM}?R{rd zliSv=ic&-c6ahiHf`#6DQRzsL-a&eoUK3P6lqy|1NQVH@Ye4C}Lx2F05-$y=*6d*%?~z{IT&8H4$R;uU~W;b)D>Om_mxg%`{x z#kuMJ-hMaCw*IsE?jBUUiYHHIa@B>=g?~fR?;R6#{&T6Pb6PMLWQo0hf#qAbrRe6k zp<$JB$STrAg&r__b)!2qx9w1K>ay(n;?@#vg?|Qp|BbOu5s!J-UVPOK5g}z z%pt1;kZ8y9X1d2MyIp(jo%&i9HmdAoGM zhle<@`yc4RZBPyGw%4x)NKhNPWqYPR4$>OT>x1HK464}+w=moD8JE7s*VP6`%pZdr zo{#181^;fgFt;kXu|q_LREefdp02OXPjI$*)wzw7er8~xl^n2B|0U1PGS4O=lqF1F z%DQ{Ml>f8F5@espaip506P{Ry-g`^&Z+P8(-p|M{YLcdGl-V8rH@X(Dk~E+%WLkYR z#;*4je^bdv+A+HvCfe5dOHZVbeh`crao7Cy=bx8czTjJixG>ZuOkF`z{*J&Cl zb>+W_>p!>G8)8VMPmI8Su!<5R^cac#nZEQ|^uOv!f7uy4h(;0*n*Zhy`wz5O^*M2D z85KbP$xf*7omW9;jJI3nKi29@)X6nSKK_^7oqt=3T8Jl1tJ0VM6ppIEc9Ym2=S#K! z-Kpk}7yW}wx|tmPuLJsbt^Y5RxjteAzh+-Rq8d$^g;QjMkDV?$clZ-lct;e zgdgj2u)E>&*{-_1J7y9a`Vmf6tPY>qRg&R+^XghWvozNQ$e5R+FIpWj{ZMcYg`~;bFNvv*GY5K|4oH{>H6PG>MvAHjm?M zB`>pdgLmpMh&SKR6O8_JSEt;fvdvenD+e6C%|JiNpHy=*SUV{>nK0gPey@?D;`Eci zom=ldbrIMvm@wf6`sfnJ;W&r|P5kS}B=0mSIwX&5GrHcC8z^JVsuP8oAQSpQCe9n9 zp>)yq=^)=Y3A(@58+q5IF?ov;VTbX=uJo!D`wLLgLt>7j`fa{9aPsUqs-&27p5`VK zmnO+Y1ngpdD5?0uc-yNkp${C^nAvRo$-Y=I%~;FiRO$s(R`T>?2d-Zlc)5}2Wu!s$ zEGu@o>q4W158jL*5KbX7Fz4~y`bqJzt5*_llfy;B*4FJiS0=od3p*nh1385AB3~~) zWU_j7i}|lT^^95-eBF@zTCZsuj7u9WU98rnoy_GZdSsS1=O~9?mo~Az;>y^-Aa&9Q z0=5W-gqla)N%wE1c*RQed@$8%(tL~nBK^G8`@Frsi;Rt=K81XW)nupUdUtEjbog-N z9o`437b2kvqD_ugcvN$`w`g7^^5@Ys62j-Sr zoMV3VWEg_XIothFiLaGnBBZ8@Rni5jx2IW%KO=!wt zRi&Cah^)$2r~0s?21M9tNd_?%=!lXSaJddGBPLu5ozY~YR{JA8F2&_W)$4lx%2%(` z^Sw|QBe_LSMN9XWwNfFCKEGt?K%)roPE(FWf5stMwhhKcCBHggTKQ680=2+3L$8w946zcjlAY{9FRlY6gzsMm_MK1OeSS05?JfyqwX&4?t%R=aDh$@IZ)cx7`ojDK>y(CB%E{W zNqaVLPNZvBgK@7W-27QAg8J;~)u%WjNe*9rIf??Ag8NN@Y7nhgUt6%`39b-#0l;_O zbR~_np-wQqIM$iPxV|4T{dLjAS?uPGD1_uo#!r~Y`oSZ=dZz*yF;h?Dd^g>%gN2Z<&Ye<-8o_(s%%3OK|9{zETDnA!hF?XT%?3u0)zhMey z&ot0dWBW45tZT}xeN<}R4=JujgTAI)V*;HYgU{*>OVm=dil`bt2bRW`(FMFGVMvEXTo?Xpql{RW%Wtmqu&#+ zeS5x_xq%4uaCN1X#~wh@QjdDFTVrAmy6U>kp+!YKuPW@krqCX9huI=zinSvY7&l3R zpq|(l#wI+L_1o{e;CNm$vBd7k>!fa&R76V{?3^%v$d@y5*F74k6@vYpT+_vjFbnNz zG%_AQ0C}gon7{$E#;zPYu$PEe4~shQfs`N!1%Bt^BbKhT?auUotjylvxMNYTfp`n= zgUIcykR460RVv=h99|;l%dt#$Y{v(Qk$EDG3V#}}l|2~fWR)$|61U1amX&*VUwqAv z8V3~cn$|z{TnS!uwItEVoO+{OW4b7{rCw@qwA9A~TWs}drWFd)DAj-DNEpj~HFY7%2d+6C$72sQ+Ei;_uQvYdvB}NAzXCTVEEaG1 zX}j(tkujax`jw_i#BdCbzA;}3y+?dYF4mohWCIwtQH_Z!*bs^AxP_U_2j=L@^E;i6 zT$nho#+;|_K?6)tio}-qmo%4=j1k^4!RPX^jGl56?!q<2#l;%iw{jocYYoV`R=h0> zX=R@I>rA9(e%JDAmkWuqUp!1TywD0Riqm9M$_kkW>CT0|vt%c_e9_)K(-&c_5Y9Y$ zjzfmS(8o{h)9bzGx`d{#?p5Sjns~GxJhA%r!?MQeIxtJ9Cv%P;ACq>*E9@}VXkS0bX`8`RNd5V4jf0yyt=)K=E<9v9cV_?F-DyiMn4)ico8EvAuhW;egd=BD`;2 zktLiEE;q8#@tN+ldrA@IF0KpHem6MP&Ab?SOU?DvV*n z+zzld-C0?ph1YBIb>XM?L;Nzkh-&3Nri>IeA(&UGns{#x>UQO4ZZRLtN6pttG*4N- zBYoOWkWp*1046qgLYG^YGFQLZ)T8dZ>Q_E_4#SV)ppC*Fax8BqgGxh=T?{~grPCu5 zvL~ngSJOk}y=#sGE{2hw_;apZV$!BNqvXk0OpDsi?5!KouK6!6wnWKyGz~9JgbLn2 z*pNWrmkXUq1#_TFGq|>V&Ra-PA1=b7H1sN;a@?s>b=%pqr{q7)t@0HvBft1%u)qqb zR^jIEFI^SJfGbWe?`<)^27W_0ZXSi59+P+N9WQTX{@iTlYB}8}2_U^1u{U~87Pzt+ z#NX`V6@P=_q#7n$qA2!q9A7Z)TpCO|Cxl~M=V`ahF%^0}yYSZ42Hcc|f370YdD4vr z*&gCNM;AT`ExVyKalk^7HYX`aD1yk>MJYo<`iCO5jqBtJ(PW#=5cvqIv67EJH}Pmc zWh{4CwGv9|KE@h1`@cLrO^4Z~AsaQUnvX1J-a<*RD=ZC?fyxszfCc3 zpmE;lV;mp#+R7 zt4YP-B*&N2!K2`9A|s@qjK58nX0{t(d}>0?Qn4|1wcY;|0hnlP{u>1P%KiB??^|I2 zh-M{u{;XDSWa}kpC~fRjuAjrv+v>-dxhU0IW}mBpy{yGRsH zMDtZ!X2M*>jafzfT1mM#)~~o$@mMr<)9cKzm9P}nqon-sM?M#Cv%q<8GR~b{S{8_@ zuizHoY(l8Bj^C`dJhc5KTCxyglYZ*Gd!x*GjHl|d;ouZ~$~?}SY_?%O0-fo4X?qS5 zRT7-tW=Y}_3~B?f#`sTX8hB<|&swT{AF~vHJu5-X^%TEsQls_K^};`wb>Pg!g}18K zu{gMKcs{raVZ*(W2e6v=onygmqCL;v-@z#ro6f1HMS{VkfQEhz7lei$r$+c%=8?p) zr;_VT>&z`ZO8dddgsQ$wZP$3I3n1x@P!&toqw2dlGjK-ZfZQ11Wgs@3zlzamybjAM z!KN(-h4JSb7KDDy)@?39oln0-OZQ80s-dnC?T>Q)fcYZGfsbB~UmFETLN9%M4} zB;~v(<2cQjMa-Ku(uNz3$*Ba)pvPR_GVAEk%Zi7jIJD#0xjd^RrR zi#z^-!e4R0VZVm;m}YUM&{jh}^T*Z$c#nX7u;+yXTHk=#2T~;_VdKY?Wqo~pCSgqe zH`P{@Nmdx0{JC$^IemlNJDG%NgXu7`AT0Lj7HWlg!7u%;o1A%A0*=v@Z1zZQs*4lP z;0H5BmQ84cgBT&dZ7Co@C7APayiBNsiKTEDim|9RnWeDvysSJ~q6rbNiR6czq> zG)N0mchrG(DIjivUw~Gx4Z0w7j`rE_rvU9)tk*WazSGJ%|0$yyEb3S|rL)@kl_P<@ z&cDi~6R#ScYdi3=ZeIE#r0?wQ=8ZCh>kAUSJBz2{CEJyv2i-YBAt;AUS@GpE zu}arsF0zd>G@Q|1af4!*g-)sxKuhYlYfhU9@*@u|Ca^rZ@;W=|)liQ%bw*LpcDhQq zVS;>ULL;c=)cdQ|jic~V@F>3zqi`JKY_biE-~^v`F!-H_URD%wzg;SNCA*7SI^4@q z)a7TZ3O+P=83=a1Vm6cR*bkE2N{82gP9sZ#zLNfGxEGVt?OgHhCU`3V=2eJFcacrc z(|qH!d0@R{tS7y5MUQ0Up2)X^K0@{sG9UY;B!=(TqnvTqJCnXz;m*Qi^lzkBUc0C} z&FJI5G)S7}(UX~4MI0?50e3-$Ux5_z_)$`Cf@u0_6Xa6QTk+LeQuw6pJ=-gnjlSG! z%o(~Rg9IE;5(2(#*{6n9HkK#N180g9D~zl{r=_ASv3|{Ih;)*hd`@k9Ta_=9TdvPH zpu1RAE?nlSr|cP9ydl2RmwH23i$xsgzr$>)R@|?>&aq#Z3HC&!bAg+uD{LyZLn=?R z>QqkW@W_k39Eui;jn;v5%ZOS1nX1+@womMzs1-Q`wQn+_W#*R|H0JIq={|2%Pc6GH zE?TM|IXnB{V+3_d=6fBPh$O?tsY)!NDmh@!v!)&+8FJ!JiocMXkaR^gLU5R7pz9JW z4BTVCMTlso9w1|J{`im^fR)Z*izI&&Nx}T>LFk(biV;9uIaHYL5P_#nWSox(v5iPN z->pEtDu`^N*`+A0nD3Vp;5)u<7Zg!sGr+3r57aT4oPZ@3{#Na=95RTYNm7FwRI4$p z&nvA=jJ`l2RnlZ0QppGAfhG{$Kji7XNsYU((2(Zc<*1Pwj~2;eOFO*E!uf0Y$a&R^ z9{<2Gpu^kHpTOsLVEt!8N+?^)=L;7RPOOeBF(Z64n9t!sUH858xSCc%p%z2?tl9eW zcgOF^=brxjNx-U-Lwq;T8oSvMoCS!$%WW+^qB|9?9)sPoH1zwVn9nl5oyFtZWbdff zG<9bC^#_0v6(xcbD&3fGJ)=w^$#KvMXCD@7OhWz-F^;pf#;AQHosTpDsONjJiC10` zT6*@uvBWCwqwkquR1t)p7+qo=tMEzoY+xYCqHAsoV2l!#cYlQC=Rya^YXn-Iqmy*v z*aE*X8$}@6B6CwWxMXrc?ONLf?VTRrnS-sVy`~xp;zWcP3+HY*=7l&v+1xm2Eg*RG zAelCPM8^~@Q?+;k+{>qXlzyu%XSIxTIwWB@)A}U*k*(YPJ6%gOmuaZ~QOa@^hKkE^ zex1G=>&NRP)`zcJB_iG`J$lriz;~!U=Ystruwmmb?Iu0=Y@_KhQyE8SC@RJFRc&n3 zKrBFl+1Ch{i(&~emR78qt~(yLS~@%pJQvPA7h>?*h+0RBeed}hesG#KqoIw>*yeax zBfgL?o&1b*R)BWanA|J;M;jDMH%x2lR&vCCgJ6Aer~k*>?zl7iF|qDm1|2&1*RTB2 zCJS8Q4y3S6khfwSP*8`BXPTHN6LDU*LVNiV&0}*`RBd5`44WBd4B_cm6Z8Xv_gWsf zx}u}A;j@aW#gD7OR*~E{fj{Z>^gv?gMU(WbZ$RbfcH0 zZzVh$r>CDaerKK)mGNi?BG9dPfGMAUotF@bdgtHSlHVUPPEt)!A>CdAW93ui^4ENL zF#i6&7x!EGN)SQs#OIy4S#u7t8(%u9pK1K!{$Q>2`^Oi$tRV0J+>%agb*6if=8d+< z4>{V|g;nMz)|4QS7jS|iB)Cb1m&&ZEp(B&8>|jNMr_f=S0KJ3tf!IN zB3+7|NXqYTTlNvJm1zto#c0=LjmFr=v9ynMZuc)~HF93zVEae0wmf=ot(`Lsp z!L3B;@+5ZeTZ{J4&9CSQH}MmUU-$kh1Qt5t*qr4d9H@{%y-o?L#kpjVEBwx$WGHG+ z9A(x1(&1h|M>j;nrGr^<3h&0CN1{*%AAN1#ZoGawfqs0$T;_Z7ZnE~0jP;pMo{&y= zOeG_w>}Y5Z*7v9EfGPtAawKBapLO)n_FS>XCrGcFUTCUYG`VkGP$&Cpl2f~B$WpX% z%bF}=y_r~Bq=7kgAac+9N0vRcwT1pe`P=xA*{ZIWM%$)-pW(!4x(~Y@fHjVYC1407 zi9uwsdw%8hF$@azN4^tfbpk~dEK5IiH$00!PiFN6Yf~_PVp}#CIsIO_ga3K+d}gCbHj6iT}~W`O$jWY=6=xrZp#L zMV`j#v7L8q&|;fPKDMV+BdkjQ7x%@_AL-=*j9Edl0}roAvP^Q{4{`FwWNITFPzz4g6-7_8^hVC5EbLS0Gp=_>zI?G^(VhSPJHw_ zCSJ-g=o#gN6$h9PnlKmgmpG$-hoeXVdd!-Kfx}gEIP!u!1Rrmp&x6zLdRBeiuIbqw%OgSEn5&%(Yq^#=BD53x zX~Fx66o|LkE~$ZQHu?cRNN9UX-q`l3Z{*!HZ2Dfw$oTl(0W!-f0RVHxV|;N^P^Xj} zw>c=|!{>K_dNFH{-;s?*rM=0$ENoNnQP1-!^da)2boc#mg0|18{eD?d*!_uYk{&JQ zTsP;rrxZpT#y7#KPXL}O=<;}ka&D9Q2c22KiigPwr{wo}vpT||(zdp4R)dk+?7DF8 zXZ3aSUjR%WeK9&6#V&}~tH1V5n#cJf*)Wjga+ykkhnmq-I-MG@)LxvM!HX;1yie~L z*c_AviSN^*?Ge)Ky%AG&agI4YwP7jutydjWn-=NU(H&$lVf`Za^FMsG1fAmpBG*eN zkZz3lw&kf>x-ECyKxv-6(&N=H7UUl~*O@JbBx<5rWIE{XlWneneDCX!_wMBctRAQ5 zXxZi^AZwbnG6HlvNoeW3CZv14<$7N2+zD?HD^6CqPf0TTocU$MTE6%{^ zQSOA7p+ZPXZ7Z+E2__P8)&@ZDAKq6+)9p**kPrT$1Dfo8_6&)I0h4*^s?E{AzEb*gnvu_)lrH3|eP z$rBlGoY_MO(x<5yi%#&qtmN9VVV!Ly=kt@2=|^-dRBKX{`a#vRGm!m|ItU#dGvn)x zhyiX#C8j~7&)#w2w9@|O(qQivuNbt2$Hcz{SPWq=B?`AHA&#&%QuS#t6wF3wO3mq|T?* zhRnErdyVDj5f2dAfnv|6t%LpAU+kz{N{S5L1J0K-e>V;^Pj|eRdGP&_%+EfuM>nP? zA`ma}#~#@4@5#6#7D9mf-uOh%BzczGG+SCW6`@k$4A6TU3+`L8;{9S3H~$s^Vpn+i ziEjMHZv^&>?Lt&AA1N5;qFo{s=WX)<^O&C6TtC~Y9_!T7A2J!+QIE8DyOtVVGdprn z`Cl&2j~o`STtul%LmuR;PR<8mTE%Q`Pjto0{FqL4ITM!`uJY0wp;PJodc~R{nu$*# z@NVnA40l@j&9wnt%ZZ@ZfWuR_$}(V05LTX^0-yA% z#0KskXNkhkB=?F6ljUV3R?qv3w?BtLZlHOJPOgR7yt{hQQRqBaUI;(T(_Iv}!F@OZ zG4+WP(yfAuS5oq$G-J&lk+(CD0$YB!H#xSu@ zLM1C`f_JBW>(g8EkN+sMD*44qq8*hd>=rS#S}Lh`dzw`9k;?0^f_BuVr*!swrf+*s zKjR6tCnHrr25vXTgzstQ(dVMMu38sH+z%?%Cv1G4Yboa(PN!wB)wWs>e1{+r0 zyU^A6;j5z8uYe#vD|QCxN9G=j_P1ZeCdlC=BncqO9YS=)Jm+RYMPQ(B$Kg&xgSIvl zh&|H@^v!iOQ013F#a1k#BV|;mP(uajtPH2#ny`!<%{+((Z>vOd&@@B7E+SpN3Gook z_aZRY<&+Gdpq^@Wc+DWBIwHF)-n1(4UbOiwO3!lG3Z5)1ON$0qaga}m4o?*oz~6+Z zHt$O@)L*Out7g8*`yd%_bLnw+nO1%HIPRDBIMSn{mP5C3I(Q8j?Iky!ZF0E8ha2C| zw(P!2+w9<38b&pfXR&BFmJa!-LeA@&MaGvKtYKy>gJuD^d3%Q&jJkj5bFkEAZLMw> z7ee}Edq`UAp*KxsZ(=bUm`vokIOPCnJ7mX%tmcp~Y70bx{a>y&WEb#*MaEb*&m^nH zHA6nyZCkzx$_$uAd*HC3uGKb*up<0Vsk7~LtCzW%hvpbFID6%mI)kfmXg_4n?Rq^& zjNh&pGS_U@aqQ0Dmy84jjTo1Rz!rCRiUNwL3cH9;!GnttD%_=N8&7$W==AIA&3C+M zKfE{60^w_*U=ngez}Y=pndyh+Wb_MMwUIR92a z$+5>d`kmtfR!gqmdk8dNT`mw>wi1=-_~wSr&v){@>*c+`Pdu)!lY-V~B2D5~{ z5~yF)=nTEhsQAi!FX(I^`1Cf?7{#wx#?F%BA&5u0&r9XdSF?}s)cel6dpk$Y#n)3w z1=bdQ8v5vR=9TNWMKQ0a7s=Wu$143K`xzN|qc3Gb`X3X>-Tl$1T0FnB0)28` zq4?6cs*g9r$bCN+St0F4F^07Kj7v_IGZRTFhzuEhb4R zMq3|P-1Ha0?2s$n*(FPb-ZC`J*&Iwg@7~GY;L<#rlgbwQX>Jd{J7cffc{UF%e)k-x z)Q#i%GN@8!&tuD^CS+cd__PvgT3K4zbvBGNxs`nE^J!C-HsxDJE#1zCS`XpFlu`E* zME~4L%@pA~gvKNM;A*F;&PKh!gP9~J!Kaymzlv1*oX}sMYYs6PIFlL23pA%&J-#^} zYN?{v{bA*IE9yt|z+MOn@Hx7fXGGAjT)nJWw7KNQBx~r2E_yYzz=D3iWx;X6qW-j| z1E-8>tEXoIS*?rs`?Dv0^*bk^E;Ra=FTMrc>CB~YT=Uk}3KBzyZhn!%lW`XpwDUZF zG$%pngBp}5>500%R=4TdzP(CTXM)U9DK-P0hlCt!Zb!P3Lkw7qtz|&vo2OCM`||P~ z4|pGV;d8ns#G2FLb-wtG6Yewn_QwVNwyX^46v}&aWdfX-(~o48F@~o#o1HYPF%XN= zs@so7L_?&({5G&)&xJxA9oxmP=tU@b>9IN5R+tK}w}<>z=g<`wLVw-G+{}{dy89XT zr;wTZba z7bjpOUR|h!8J)#I0<5a1Y?~Lc=BUz=zphhkSK{tCndt6m^LJ#!;R;2+csy^&FUu5& zgkP#*SX$&;7=EJPY9YJc9}yiFv_VFI%d;lN-uF{6%^8m2*i5g)5Y`nJ5J!lIHMuSj zs&pjOvFC_4{3>wMCMcmZH*FU(h93KTNx}hm+cOWbY|RJ^Ypk*TO@>v)*=@n}v`a%- z8GM(;F^OlpPAC=A!I_Dgb3Y-6-JYSnLXGf{iz^Yd5w^{yeSBAqsd%5Mw%8tlSM|k; zhu7{JS>=IY9bqudFM|%_B>elcOu$w9A>L+Kjv?g6osAGIdAW}n;Mm~IzErQ5q02C_ z=azO12mUy&nX3@h$=`ayZN!~UILfS3+V^N%D}mMpMX}a-nr0k)3&##^#kCz*i|9Ye z(%pZ3v|@Yv#dwF;eU4EZa+Qe3?R~$xn&$?6J*=KnM~JVM>Bl-m+^ronO5V)=D)p2l zA`ti?{A_G@A)x&@9(Gsm+gdQ-(_YxPQM}5Fd6%kt+nfdCKq?CSfd*80LS9L$UqGgL z5sDy?Keab}@@xO+^E-7LOOO$5rq(U-@Sx7H957W4wBY+D@<96Mj;jg|Dk+p>7i19@ zy2uY&_`a}7zm?b@lqf#!-4aQK{*V`Da#)$zTog&sJq+*Ml?M$sTIES{r&DyeTtGVl zR({``v7F2jIlVVBjQ!eJ?fg&g$vC_1dZ-PgDg zds-XKo-6RpsW)(a>@8FKSv1X%Vfa!6n7N6`T+qC78ap)~$AyWUFC17}gA!vQehp$; z-DgWhtdD|fOMJqeWdRATtYXGBAx})bva@YTHZNy+W`gQ5na(U3@?A$R&$JliKkOh} z9g#N}7vwX}W@mkCqxRYzOuqW{-hPG{yz^l@IjWlt7ZE-?zO?0YbwQ7}Rz5t8bRm!~ z)Gnx%)NPgD#CJ+5>7KBhI++15R`%KSz`9CH(M%JtGCd>a!mnd@9jp64V=p*!Dia2#4v zV2tXzcn7!6PJ&CkPL=hCNmg;}Ru25R>MUD6+;4$R9x*72^ymeh1f_F9J?ri3)}yVo zEBobaQcEn#l(Uz6Wulg;7vg1nVVjv&aq0;YNIycK43>Ibqp(_2k|37QqH} zYA{PN&%1ZxVdAI-`pRMU`9aaHd!qn+MNX3R*xo9hweGBz_2F^|xlzCLo)@J-z>np0 zS4=|!oC+>AAo|G%K0Oka@jJ_4mu3G0+;Ag!`1z}5xirdo_s@|~&W|=m2`!Juk|8s| zuF!+NFi;XEX6S|VfcdYznM%jSa0MXDi+_Xt32dwu>bYS#)J@5oqSJ)}b*A#r zMpW)?b&~}yVsd*;@Z-bH>>)FLycQK|>ix_+ z=sfTl&?u`j%kQS{1E&1UliM?aIPJjgmc=0Q^6&jttAR^lvovIzjz-71ii3}P;x+j? zHY*7u}1&=A$36 zR5P}<(@lwbUi)c;o~;>3+)?ak3IPzd!2=Yu<|pQ8A46~$L%+E(al@89(sDyL<{ zU*pd*Lpilmv-zWq>;{ zbqwm&SB7x}viMJ~4mEC#ePX|TU}P795-ie`SsH(_Q_4HQNEO;bhh}>Zb@TKH%EwqU#p8)J;oN(ACXe7)OVc9+2FmDsYMAo*W3>&bNbBkroBzA0mG$ zDO^FXIp$>Zv}-=52?Dmd!1!x>Uk?TKXpi7_kOoY3J0tj%cU@**xO=w+;S^w|*b4g{ z&U<|740X9ZIE7)-x#!fu<2fhlF;r+PmEl&7!^%Wo@|ueT9i%C{hq*a zi#NX!k==XGDEVmlEDuD7N@-$c=xu~Jakbam!GeeeD1&41VSKrX@}jdpy@#ck)07X z`M=rMkte=uk@Q+MM!V<^_BIsJ(QYU)YR_ovl!nj9^P)$L5g^NN{yfm>fS{M4(Lx1} z+o00gUhsM`-pv7B8po6Db$~>yOWF_@p7@-a;SbbXiCEXA7L0tc-+|E9M z61BG5CA=GGfy{!H74-Gf@3|ICbF+lK=)+(6x_~geCeFW(mfk$li+`!7OlvXGYIw7u z!(|uy)#u|gnTD(j3*~k^3T#$qauFxB(xM!{5{zo4b=e=DED|q2ws9$CD!%p?Jfv`S zn4HJ@^zjmR*(0+Tp097S@zbhpADH)~3Dq#=YM~3k_dc9ptKqVyy}+xtS^50Z9=Ihbl>`U5uw{AS+Z!HZTE zBAk;6->xv{t5Z!EDP^BI9ovs}m)@4wqO&@y6caEn-xAo3Z_3y&qKYN5?g7(24D&Vb z3Q?qPzh+|BQDF{KUgCK@MlryW*;$f_XmFmZN9a( z_9i2?2GH;#;w%yRdhl`Lt4%Ti_0t>o)@srKicG}mX%yNHv1QEttZ%zzv_5R;Tdo=y zeF~Us5S=-UUVkXRd2*(PowIMuJ(Hs^-$A zjzDBg_@}7<2Zk`JbY-sn&S;<3PNmYHVBdfLwzlM8({;C8V%mxMFVFsoLj3XlJycQQ z>r!ZK)~wn;g~LB@vVR)QsNV&BQt+wUn9wjQ>-_UM5zV)~%a-oGjRr;@EwZBfaIQ(> z$-8)vRFiuGU-fp+`Ioj7p3>+Pyf<8eE*bux>VI^V_Wa(pLdL)CbbmGc&we;PymEWa z)Y;S3?EmzhU#UNr(fv;iSN|`^0n_R+dFj%XQneR~`v0|1*;R d|Hk1Y?0Gk-Q_-~S!%M`Env&Lw>SxyP{}*j&)EEE& diff --git a/docs/reference/transform/transform-alerts.asciidoc b/docs/reference/transform/transform-alerts.asciidoc index e3ea82d34ec2e..988dc5effe956 100644 --- a/docs/reference/transform/transform-alerts.asciidoc +++ b/docs/reference/transform/transform-alerts.asciidoc @@ -18,19 +18,20 @@ refer to You can create {transform} rules under **{stack-manage-app} > {rules-ui}**. -. On the *Create rule* window, give a name to the rule and optionally provide -tags. Select the {transform} health rule type: +. Click *Create rule* and select the {transform} health rule type. + +. Give a name to the rule and optionally provide tags. + +. Select the {transform} or {transforms} to include. You can also use a special +character (`*`) to apply the rule to all your {transforms}. {transforms-cap} +created after the rule are automatically included. + -- [role="screenshot"] -image::images/transform-rule.png["Creating a transform health rule",500] +image::images/transform-check-config.png["Selecting health check",500] // NOTE: This is screenshot is automatically generated. Do not edit it directly. -- -. Select the {transform} or {transforms} to include. You can also use a special -character (`*`) to apply the rule to all your {transforms}. {transforms-cap} -created after the rule are automatically included. - . The following health checks are available and enabled by default: + -- @@ -41,10 +42,6 @@ _{transform-cap} is not started_:: _Unhealthy {transform}_:: Get alerts when a {transform} has an unhealthy status. The notification message contains status details and related issues. - -[role="screenshot"] -image::images/transform-check-config.png["Selecting health check",500] -// NOTE: This is screenshot is automatically generated. Do not edit it directly. -- . Set the check interval, which defines how often to evaluate the rule conditions. From b0fb722cc5d8f8e60ac49112f229811ce2cdf7cf Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 7 May 2024 17:44:55 +0200 Subject: [PATCH 035/117] Small refactor of ESQL lucene operator (#108350) This refactor removes duplicated code on all implementations of LuceneOperator.Factory by moving the factory from an interface to an abstract class. In addition it introduces a getCheckedOutput() to centralize the catching of IOExceptions thrown by the different implementations of getOutput(). --- .../compute/lucene/LuceneCountOperator.java | 27 ++---------- .../compute/lucene/LuceneOperator.java | 42 ++++++++++++++++++- .../compute/lucene/LuceneSourceOperator.java | 28 ++----------- .../lucene/LuceneTopNSourceOperator.java | 30 +++---------- ...TimeSeriesSortedSourceOperatorFactory.java | 42 +++++++++++-------- 5 files changed, 77 insertions(+), 92 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java index d05593015211b..c7f12d1099cc1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -22,7 +21,6 @@ import org.elasticsearch.core.Releasables; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.List; import java.util.function.Function; @@ -41,11 +39,7 @@ public class LuceneCountOperator extends LuceneOperator { private final LeafCollector leafCollector; - public static class Factory implements LuceneOperator.Factory { - private final DataPartitioning dataPartitioning; - private final int taskConcurrency; - private final int limit; - private final LuceneSliceQueue sliceQueue; + public static class Factory extends LuceneOperator.Factory { public Factory( List contexts, @@ -54,11 +48,7 @@ public Factory( int taskConcurrency, int limit ) { - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); } @Override @@ -66,15 +56,6 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneCountOperator(driverContext.blockFactory(), sliceQueue, limit); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - - public int limit() { - return limit; - } - @Override public String describe() { return "LuceneCountOperator[dataPartitioning = " + dataPartitioning + ", limit = " + limit + "]"; @@ -118,7 +99,7 @@ public void finish() { } @Override - public Page getOutput() { + protected Page getCheckedOutput() throws IOException { if (isFinished()) { assert remainingDocs <= 0 : remainingDocs; return null; @@ -170,8 +151,6 @@ public Page getOutput() { } } return page; - } catch (IOException e) { - throw new UncheckedIOException(e); } finally { processingNanos += System.nanoTime() - start; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 9a7abb2aafc58..fae0a86762b92 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.TimeValue; @@ -34,6 +35,7 @@ import java.io.UncheckedIOException; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Objects; import java.util.Set; import java.util.TreeSet; @@ -72,10 +74,46 @@ protected LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSlice this.sliceQueue = sliceQueue; } - public interface Factory extends SourceOperator.SourceOperatorFactory { - int taskConcurrency(); + public abstract static class Factory implements SourceOperator.SourceOperatorFactory { + protected final DataPartitioning dataPartitioning; + protected final int taskConcurrency; + protected final int limit; + protected final LuceneSliceQueue sliceQueue; + + protected Factory( + List contexts, + Function queryFunction, + DataPartitioning dataPartitioning, + int taskConcurrency, + int limit + ) { + this.limit = limit; + this.dataPartitioning = dataPartitioning; + var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); + this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); + this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); + } + + public final int taskConcurrency() { + return taskConcurrency; + } + + public final int limit() { + return limit; + } } + @Override + public final Page getOutput() { + try { + return getCheckedOutput(); + } catch (IOException ioe) { + throw new UncheckedIOException(ioe); + } + } + + protected abstract Page getCheckedOutput() throws IOException; + @Override public void close() {} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 9b942114e61f2..64836b00a7e1b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreMode; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.IntBlock; @@ -21,7 +20,6 @@ import org.elasticsearch.core.Releasables; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.List; import java.util.function.Function; @@ -37,12 +35,9 @@ public class LuceneSourceOperator extends LuceneOperator { private final LeafCollector leafCollector; private final int minPageSize; - public static class Factory implements LuceneOperator.Factory { - private final DataPartitioning dataPartitioning; - private final int taskConcurrency; + public static class Factory extends LuceneOperator.Factory { + private final int maxPageSize; - private final int limit; - private final LuceneSliceQueue sliceQueue; public Factory( List contexts, @@ -52,12 +47,8 @@ public Factory( int maxPageSize, int limit ) { + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); this.maxPageSize = maxPageSize; - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @Override @@ -65,19 +56,10 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneSourceOperator(driverContext.blockFactory(), maxPageSize, sliceQueue, limit); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - public int maxPageSize() { return maxPageSize; } - public int limit() { - return limit; - } - @Override public String describe() { return "LuceneSourceOperator[dataPartitioning = " @@ -123,7 +105,7 @@ public void finish() { } @Override - public Page getOutput() { + public Page getCheckedOutput() throws IOException { if (isFinished()) { assert currentPagePos == 0 : currentPagePos; return null; @@ -162,8 +144,6 @@ public Page getOutput() { currentPagePos = 0; } return page; - } catch (IOException e) { - throw new UncheckedIOException(e); } finally { processingNanos += System.nanoTime() - start; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 2c22d850daf0c..e9fb15d265fbe 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; @@ -28,7 +27,6 @@ import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.Arrays; import java.util.List; import java.util.Optional; @@ -39,13 +37,10 @@ * Source operator that builds Pages out of the output of a TopFieldCollector (aka TopN) */ public final class LuceneTopNSourceOperator extends LuceneOperator { - public static final class Factory implements LuceneOperator.Factory { - private final int taskConcurrency; + public static final class Factory extends LuceneOperator.Factory { + ; private final int maxPageSize; private final List> sorts; - private final int limit; - private final DataPartitioning dataPartitioning; - private final LuceneSliceQueue sliceQueue; public Factory( List contexts, @@ -56,13 +51,9 @@ public Factory( int limit, List> sorts ) { + super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit); this.maxPageSize = maxPageSize; this.sorts = sorts; - this.limit = limit; - this.dataPartitioning = dataPartitioning; - var weightFunction = weightFunction(queryFunction, ScoreMode.TOP_DOCS); - this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency); - this.taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); } @Override @@ -70,19 +61,10 @@ public SourceOperator get(DriverContext driverContext) { return new LuceneTopNSourceOperator(driverContext.blockFactory(), maxPageSize, sorts, limit, sliceQueue); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - public int maxPageSize() { return maxPageSize; } - public int limit() { - return limit; - } - @Override public String describe() { String notPrettySorts = sorts.stream().map(Strings::toString).collect(Collectors.joining(",")); @@ -136,7 +118,7 @@ public void finish() { } @Override - public Page getOutput() { + public Page getCheckedOutput() throws IOException { if (isFinished()) { return null; } @@ -152,7 +134,7 @@ public Page getOutput() { } } - private Page collect() { + private Page collect() throws IOException { assert doneCollecting == false; var scorer = getCurrentOrLoadNextScorer(); if (scorer == null) { @@ -169,8 +151,6 @@ private Page collect() { } catch (CollectionTerminatedException cte) { // Lucene terminated early the collection (doing topN for an index that's sorted and the topN uses the same sorting) scorer.markAsDone(); - } catch (IOException e) { - throw new UncheckedIOException(e); } if (scorer.isDone()) { var nextScorer = getCurrentOrLoadNextScorer(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index 58f2c8de67b61..899060dae5fbb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; @@ -48,13 +47,23 @@ * This operator currently only supports shard level concurrency. A new concurrency mechanism should be introduced at the time serie level * in order to read tsdb indices in parallel. */ -public record TimeSeriesSortedSourceOperatorFactory( - int limit, - int maxPageSize, - int taskConcurrency, - TimeValue timeSeriesPeriod, - LuceneSliceQueue sliceQueue -) implements LuceneOperator.Factory { +public class TimeSeriesSortedSourceOperatorFactory extends LuceneOperator.Factory { + + private final int maxPageSize; + private final TimeValue timeSeriesPeriod; + + private TimeSeriesSortedSourceOperatorFactory( + List contexts, + Function queryFunction, + int taskConcurrency, + int maxPageSize, + TimeValue timeSeriesPeriod, + int limit + ) { + super(contexts, queryFunction, DataPartitioning.SHARD, taskConcurrency, limit); + this.maxPageSize = maxPageSize; + this.timeSeriesPeriod = timeSeriesPeriod; + } @Override public SourceOperator get(DriverContext driverContext) { @@ -62,11 +71,6 @@ public SourceOperator get(DriverContext driverContext) { return new Impl(driverContext.blockFactory(), sliceQueue, maxPageSize, limit, rounding); } - @Override - public int taskConcurrency() { - return taskConcurrency; - } - @Override public String describe() { return "TimeSeriesSortedSourceOperator[maxPageSize = " + maxPageSize + ", limit = " + limit + "]"; @@ -80,10 +84,14 @@ public static TimeSeriesSortedSourceOperatorFactory create( List searchContexts, Function queryFunction ) { - var weightFunction = LuceneOperator.weightFunction(queryFunction, ScoreMode.COMPLETE_NO_SCORES); - var sliceQueue = LuceneSliceQueue.create(searchContexts, weightFunction, DataPartitioning.SHARD, taskConcurrency); - taskConcurrency = Math.min(sliceQueue.totalSlices(), taskConcurrency); - return new TimeSeriesSortedSourceOperatorFactory(limit, maxPageSize, taskConcurrency, timeSeriesPeriod, sliceQueue); + return new TimeSeriesSortedSourceOperatorFactory( + searchContexts, + queryFunction, + taskConcurrency, + maxPageSize, + timeSeriesPeriod, + limit + ); } static final class Impl extends SourceOperator { From 6e7afa04b465bd4422c4feb1f976ddd69b638054 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 7 May 2024 18:00:59 +0200 Subject: [PATCH 036/117] Introduce constants for special ReservedClusterStateMetadata versions (#107995) --- .../cluster/metadata/ReservedStateMetadata.java | 2 ++ .../reservedstate/service/FileSettingsService.java | 2 +- .../service/ReservedClusterStateService.java | 7 ++++--- .../reservedstate/service/ReservedStateErrorTask.java | 9 +++++++-- .../reservedstate/service/ReservedStateUpdateTask.java | 5 ++--- .../elasticsearch/readiness/ReadinessServiceTests.java | 3 ++- 6 files changed, 18 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java index ec8200bf2d701..5df045df4ecd8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ReservedStateMetadata.java @@ -47,6 +47,8 @@ public record ReservedStateMetadata( ) implements SimpleDiffable, ToXContentFragment { public static final Long NO_VERSION = Long.MIN_VALUE; // use min long as sentinel for uninitialized version + public static final Long EMPTY_VERSION = -1L; // use -1 as sentinel for empty metadata + public static final Long RESTORED_VERSION = 0L; // use 0 as sentinel for metadata restored from snapshot private static final ParseField VERSION = new ParseField("version"); private static final ParseField HANDLERS = new ParseField("handlers"); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index 8719c8cbf8730..f765ee591fb40 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -102,7 +102,7 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { // We check if the version was reset to 0, and force an update if a file exists. This can happen in situations // like snapshot restores. ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(NAMESPACE); - return fileSettingsMetadata != null && fileSettingsMetadata.version() == 0L; + return fileSettingsMetadata != null && fileSettingsMetadata.version().equals(ReservedStateMetadata.RESTORED_VERSION); } /** diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index d2aea19417787..a281db9f02383 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -42,6 +42,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.ExceptionsHelper.stackTrace; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.reservedstate.service.ReservedStateErrorTask.checkErrorVersion; import static org.elasticsearch.reservedstate.service.ReservedStateErrorTask.isNewError; @@ -112,7 +113,7 @@ ReservedStateChunk parse(String namespace, XContentParser parser) { try { return stateChunkParser.apply(parser, null); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, -1L, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -134,7 +135,7 @@ public void process(String namespace, XContentParser parser, Consumer try { stateChunk = parse(namespace, parser); } catch (Exception e) { - ErrorState errorState = new ErrorState(namespace, -1L, e, ReservedStateErrorMetadata.ErrorKind.PARSING); + ErrorState errorState = new ErrorState(namespace, EMPTY_VERSION, e, ReservedStateErrorMetadata.ErrorKind.PARSING); updateErrorState(errorState); logger.debug("error processing state change request for [{}] with the following errors [{}]", namespace, errorState); @@ -148,7 +149,7 @@ public void process(String namespace, XContentParser parser, Consumer } public void initEmpty(String namespace, ActionListener listener) { - var missingVersion = new ReservedStateVersion(-1L, Version.CURRENT); + var missingVersion = new ReservedStateVersion(EMPTY_VERSION, Version.CURRENT); var emptyState = new ReservedStateChunk(Map.of(), missingVersion); updateTaskQueue.submitTask( "empty initial cluster state [" + namespace + "]", diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java index 0be4a7972d05c..1a45a357fe621 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateErrorTask.java @@ -18,6 +18,9 @@ import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.NO_VERSION; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.RESTORED_VERSION; import static org.elasticsearch.core.Strings.format; /** @@ -50,8 +53,10 @@ ActionListener listener() { static boolean isNewError(ReservedStateMetadata existingMetadata, Long newStateVersion) { return (existingMetadata == null || existingMetadata.errorMetadata() == null - || newStateVersion <= 0 // version will be -1 when we can't even parse the file, it might be 0 on snapshot restore - || existingMetadata.errorMetadata().version() < newStateVersion); + || existingMetadata.errorMetadata().version() < newStateVersion + || newStateVersion.equals(RESTORED_VERSION) + || newStateVersion.equals(EMPTY_VERSION) + || newStateVersion.equals(NO_VERSION)); } static boolean checkErrorVersion(ClusterState currentState, ErrorState errorState) { diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 2ee9aa0d86a0e..535758ed71eac 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -169,12 +169,11 @@ static boolean checkMetadataVersion( return false; } - // Version -1 is special, it means "empty" - if (reservedStateVersion.version() == -1L) { + if (reservedStateVersion.version().equals(ReservedStateMetadata.EMPTY_VERSION)) { return true; } - // Version 0 is special, snapshot restores will reset to 0. + // require a regular positive version, reject any special version if (reservedStateVersion.version() <= 0L) { logger.warn( () -> format( diff --git a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java index 1ad790ae31804..88661abf5f1fe 100644 --- a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java +++ b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java @@ -49,6 +49,7 @@ import java.util.Set; import static org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata.ErrorKind.TRANSIENT; +import static org.elasticsearch.cluster.metadata.ReservedStateMetadata.EMPTY_VERSION; public class ReadinessServiceTests extends ESTestCase implements ReadinessClientProbe { private ClusterService clusterService; @@ -59,7 +60,7 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient private static Metadata emptyReservedStateMetadata; static { - var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(-1L); + var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(EMPTY_VERSION); emptyReservedStateMetadata = new Metadata.Builder().put(fileSettingsState.build()).build(); } From d0f4966431b29664074f45cb15e1782e8a62d599 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 7 May 2024 18:10:48 +0200 Subject: [PATCH 037/117] [DOCS] Add local dev setup instructions (#107913) * [DOCS] Add local dev setup instructions - Replace existing Run ES in Docker locally page, with simpler no-security local dev setup - Move this file into Quickstart folder, along with existing quickstart guide - Update self-managed instructions in Quickstart guide to use local dev approach --- docs/reference/index.asciidoc | 2 +- .../{ => quickstart}/getting-started.asciidoc | 5 +- docs/reference/quickstart/index.asciidoc | 10 + .../run-elasticsearch-locally.asciidoc | 177 +++++++++++++++++ docs/reference/setup.asciidoc | 2 - docs/reference/setup/install.asciidoc | 2 +- docs/reference/setup/install/docker.asciidoc | 6 + .../setup/run-elasticsearch-locally.asciidoc | 183 ------------------ .../tab-widgets/api-call-widget.asciidoc | 2 +- docs/reference/tab-widgets/api-call.asciidoc | 2 +- .../quick-start-install-widget.asciidoc | 4 +- .../tab-widgets/quick-start-install.asciidoc | 61 +----- 12 files changed, 202 insertions(+), 254 deletions(-) rename docs/reference/{ => quickstart}/getting-started.asciidoc (98%) create mode 100644 docs/reference/quickstart/index.asciidoc create mode 100644 docs/reference/quickstart/run-elasticsearch-locally.asciidoc delete mode 100644 docs/reference/setup/run-elasticsearch-locally.asciidoc diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index e47304f1e1337..2057519719177 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -10,7 +10,7 @@ include::intro.asciidoc[] include::release-notes/highlights.asciidoc[] -include::getting-started.asciidoc[] +include::quickstart/index.asciidoc[] include::setup.asciidoc[] diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/quickstart/getting-started.asciidoc similarity index 98% rename from docs/reference/getting-started.asciidoc rename to docs/reference/quickstart/getting-started.asciidoc index 2a5dbc2f0d031..6b3095e07f9d4 100644 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/quickstart/getting-started.asciidoc @@ -1,10 +1,9 @@ -[chapter] [[getting-started]] -= Quick start +== Quick start guide This guide helps you learn how to: -* install and run {es} and {kib} (using {ecloud} or Docker), +* Run {es} and {kib} (using {ecloud} or in a local Docker dev environment), * add simple (non-timestamped) dataset to {es}, * run basic searches. diff --git a/docs/reference/quickstart/index.asciidoc b/docs/reference/quickstart/index.asciidoc new file mode 100644 index 0000000000000..e517d039e620b --- /dev/null +++ b/docs/reference/quickstart/index.asciidoc @@ -0,0 +1,10 @@ +[[quickstart]] += Quickstart + +Get started quickly with {es}. + +* Learn how to run {es} (and {kib}) for <>. +* Follow our <> to add data to {es} and query it. + +include::run-elasticsearch-locally.asciidoc[] +include::getting-started.asciidoc[] diff --git a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc new file mode 100644 index 0000000000000..cfad434b890db --- /dev/null +++ b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc @@ -0,0 +1,177 @@ +[[run-elasticsearch-locally]] +== Run {es} locally in Docker (without security) +++++ +Local dev setup (Docker) +++++ + +[WARNING] +==== +*DO NOT USE THESE INSTRUCTIONS FOR PRODUCTION DEPLOYMENTS* + +The instructions on this page are for *local development only*. Do not use these instructions for production deployments, because they are not secure. +While this approach is convenient for experimenting and learning, you should never run the service in this way in a production environment. + +Refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a production environment, including using Docker. +==== + +The following commands help you very quickly spin up a single-node {es} cluster, together with {kib} in Docker. +Note that if you don't need the {kib} UI, you can skip those instructions. + +[discrete] +[[local-dev-why]] +=== When would I use this setup? + +Use this setup if you want to quickly spin up {es} (and {kib}) for local development or testing. + +For example you might: + +* Want to run a quick test to see how a feature works. +* Follow a tutorial or guide that requires an {es} cluster, like our <>. +* Experiment with the {es} APIs using different tools, like the Dev Tools Console, cURL, or an Elastic programming language client. +* Quickly spin up an {es} cluster to test an executable https://github.com/elastic/elasticsearch-labs/tree/main/notebooks#readme[Python notebook] locally. + +[discrete] +[[local-dev-prerequisites]] +=== Prerequisites + +If you don't have Docker installed, https://www.docker.com/products/docker-desktop[download and install Docker Desktop] for your operating system. + +[discrete] +[[local-dev-env-vars]] +=== Set environment variables + +Configure the following environment variables. + +[source,sh] +---- +export ELASTIC_PASSWORD="" # password for "elastic" username +export KIBANA_PASSWORD="" # Used _internally_ by Kibana, must be at least 6 characters long +---- + +[discrete] +[[local-dev-create-docker-network]] +=== Create a Docker network + +To run both {es} and {kib}, you'll need to create a Docker network: + +[source,sh] +---- +docker network create elastic-net +---- + +[discrete] +[[local-dev-run-es]] +=== Run {es} + +Start the {es} container with the following command: + +ifeval::["{release-state}"=="unreleased"] +WARNING: Version {version} has not yet been released. +No Docker image is currently available for {es} {version}. +endif::[] + +[source,sh,subs="attributes"] +---- +docker run -p 127.0.0.1:9200:9200 -d --name elasticsearch --network elastic-net \ + -e ELASTIC_PASSWORD=$ELASTIC_PASSWORD \ + -e "discovery.type=single-node" \ + -e "xpack.security.http.ssl.enabled=false" \ + -e "xpack.license.self_generated.type=trial" \ + {docker-image} +---- + +[discrete] +[[local-dev-run-kib]] +=== Run {kib} (optional) + +To run {kib}, you must first set the `kibana_system` password in the {es} container. + +[source,sh,subs="attributes"] +---- +# configure the Kibana password in the ES container +curl -u elastic:$ELASTIC_PASSWORD \ + -X POST \ + http://localhost:9200/_security/user/kibana_system/_password \ + -d '{"password":"'"$KIBANA_PASSWORD"'"}' \ + -H 'Content-Type: application/json' +---- +// NOTCONSOLE + +Start the {kib} container with the following command: + +ifeval::["{release-state}"=="unreleased"] +WARNING: Version {version} has not yet been released. +No Docker image is currently available for {es} {version}. +endif::[] + +[source,sh,subs="attributes"] +---- +docker run -p 127.0.0.1:5601:5601 -d --name kibana --network elastic-net \ + -e ELASTICSEARCH_URL=http://elasticsearch:9200 \ + -e ELASTICSEARCH_HOSTS=http://elasticsearch:9200 \ + -e ELASTICSEARCH_USERNAME=kibana_system \ + -e ELASTICSEARCH_PASSWORD=$KIBANA_PASSWORD \ + -e "xpack.security.enabled=false" \ + -e "xpack.license.self_generated.type=trial" \ + {kib-docker-image} +---- + +[NOTE] +==== +The service is started with a trial license. The trial license enables all features of Elasticsearch for a trial period of 30 days. After the trial period expires, the license is downgraded to a basic license, which is free forever. If you prefer to skip the trial and use the basic license, set the value of the `xpack.license.self_generated.type` variable to basic instead. For a detailed feature comparison between the different licenses, refer to our https://www.elastic.co/subscriptions[subscriptions page]. +==== + +[discrete] +[[local-dev-connecting-clients]] +== Connecting to {es} with language clients + +To connect to the {es} cluster from a language client, you can use basic authentication with the `elastic` username and the password you set in the environment variable. + +You'll use the following connection details: + +* **{es} endpoint**: `http://localhost:9200` +* **Username**: `elastic` +* **Password**: `$ELASTIC_PASSWORD` (Value you set in the environment variable) + +For example, to connect with the Python `elasticsearch` client: + +[source,python] +---- +import os +from elasticsearch import Elasticsearch + +username = 'elastic' +password = os.getenv('ELASTIC_PASSWORD') # Value you set in the environment variable + +client = Elasticsearch( + "http://localhost:9200", + basic_auth=(username, password) +) + +print(client.info()) +---- + +Here's an example curl command using basic authentication: + +[source,sh,subs="attributes"] +---- +curl -u elastic:$ELASTIC_PASSWORD \ + -X PUT \ + http://localhost:9200/my-new-index \ + -H 'Content-Type: application/json' +---- +// NOTCONSOLE + +[discrete] +[[local-dev-next-steps]] +=== Next steps + +Use our <> to learn the basics of {es}: how to add data and query it. + +[discrete] +[[local-dev-production]] +=== Moving to production + +This setup is not suitable for production use. For production deployments, we recommend using our managed service on Elastic Cloud. https://cloud.elastic.co/registration[Sign up for a free trial] (no credit card required). + +Otherwise, refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a self-managed production environment, including using Docker. \ No newline at end of file diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index c886fe0feeb4a..64626aafb2441 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -29,8 +29,6 @@ resource-heavy {ls} deployment should be on its own host. include::setup/install.asciidoc[] -include::setup/run-elasticsearch-locally.asciidoc[] - include::setup/configuration.asciidoc[] include::setup/important-settings.asciidoc[] diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 49501c46b8ba9..89373d0ce8d44 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -20,7 +20,7 @@ If you want to install and manage {es} yourself, you can: * Run {es} in a <>. * Set up and manage {es}, {kib}, {agent}, and the rest of the Elastic Stack on Kubernetes with {eck-ref}[{eck}]. -TIP: To try out Elasticsearch on your own machine, we recommend using Docker and running both Elasticsearch and Kibana. For more information, see <>. +TIP: To try out Elasticsearch on your own machine, we recommend using Docker and running both Elasticsearch and Kibana. For more information, see <>. Please note that this setup is *not suitable for production use*. [discrete] [[elasticsearch-install-packages]] diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 0c518d520bdd5..370fc5c4ccf7e 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -8,6 +8,12 @@ https://github.com/elastic/elasticsearch/blob/{branch}/distribution/docker[GitHu include::license.asciidoc[] +[TIP] +==== +If you just want to test {es} in local development, refer to <>. +Please note that this setup is not suitable for production environments. +==== + [[docker-cli-run-dev-mode]] ==== Run {es} in Docker diff --git a/docs/reference/setup/run-elasticsearch-locally.asciidoc b/docs/reference/setup/run-elasticsearch-locally.asciidoc deleted file mode 100644 index a6e6d5c8963a2..0000000000000 --- a/docs/reference/setup/run-elasticsearch-locally.asciidoc +++ /dev/null @@ -1,183 +0,0 @@ -[[run-elasticsearch-locally]] -== Run Elasticsearch locally - -//// -IMPORTANT: This content is replicated in the Elasticsearch repo -README.ascidoc file. If you make changes, you must also update the -Elasticsearch README. -+ -GitHub renders the tagged region directives when you view the README, -so it's not possible to just include the content from the README. Darn. -+ -Also note that there are similar instructions in the Kibana guide: -https://www.elastic.co/guide/en/kibana/current/docker.html -//// - -To try out Elasticsearch on your own machine, we recommend using Docker -and running both Elasticsearch and Kibana. -Docker images are available from the https://www.docker.elastic.co[Elastic Docker registry]. - -NOTE: Starting in Elasticsearch 8.0, security is enabled by default. -The first time you start Elasticsearch, TLS encryption is configured automatically, -a password is generated for the `elastic` user, -and a Kibana enrollment token is created so you can connect Kibana to your secured cluster. - -For other installation options, see the -https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Elasticsearch installation documentation]. - -[discrete] -=== Start Elasticsearch - -. Install and start https://www.docker.com/products/docker-desktop[Docker -Desktop]. Go to **Preferences > Resources > Advanced** and set Memory to at least 4GB. - -. Start an Elasticsearch container: -ifeval::["{release-state}"=="unreleased"] -+ -WARNING: Version {version} of {es} has not yet been released, so no -Docker image is currently available for this version. -endif::[] -+ -[source,sh,subs="attributes"] ----- -docker network create elastic -docker pull docker.elastic.co/elasticsearch/elasticsearch:{version} -docker run --name elasticsearch --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t docker.elastic.co/elasticsearch/elasticsearch:{version} ----- -+ -When you start Elasticsearch for the first time, the generated `elastic` user password and -Kibana enrollment token are output to the terminal. -+ -NOTE: You might need to scroll back a bit in the terminal to view the password -and enrollment token. - -. Copy the generated password and enrollment token and save them in a secure -location. These values are shown only when you start Elasticsearch for the first time. -You'll use these to enroll Kibana with your Elasticsearch cluster and log in. - -[discrete] -=== Start Kibana - -Kibana enables you to easily send requests to Elasticsearch and analyze, visualize, and manage data interactively. - -. In a new terminal session, start Kibana and connect it to your Elasticsearch container: -ifeval::["{release-state}"=="unreleased"] -+ -WARNING: Version {version} of {kib} has not yet been released, so no -Docker image is currently available for this version. -endif::[] -+ -[source,sh,subs="attributes"] ----- -docker pull docker.elastic.co/kibana/kibana:{version} -docker run --name kibana --net elastic -p 5601:5601 docker.elastic.co/kibana/kibana:{version} ----- -+ -When you start Kibana, a unique URL is output to your terminal. - -. To access Kibana, open the generated URL in your browser. - - .. Paste the enrollment token that you copied when starting - Elasticsearch and click the button to connect your Kibana instance with Elasticsearch. - - .. Log in to Kibana as the `elastic` user with the password that was generated - when you started Elasticsearch. - -[discrete] -=== Send requests to Elasticsearch - -You send data and other requests to Elasticsearch through REST APIs. -You can interact with Elasticsearch using any client that sends HTTP requests, -such as the https://www.elastic.co/guide/en/elasticsearch/client/index.html[Elasticsearch -language clients] and https://curl.se[curl]. -Kibana's developer console provides an easy way to experiment and test requests. -To access the console, go to **Management > Dev Tools**. - -[discrete] -=== Add data - -You index data into Elasticsearch by sending JSON objects (documents) through the REST APIs. -Whether you have structured or unstructured text, numerical data, or geospatial data, -Elasticsearch efficiently stores and indexes it in a way that supports fast searches. - -For timestamped data such as logs and metrics, you typically add documents to a -data stream made up of multiple auto-generated backing indices. - -To add a single document to an index, submit an HTTP post request that targets the index. - -[source,console] ----- -POST /customer/_doc/1 -{ - "firstname": "Jennifer", - "lastname": "Walters" -} ----- - -This request automatically creates the `customer` index if it doesn't exist, -adds a new document that has an ID of 1, and -stores and indexes the `firstname` and `lastname` fields. - -The new document is available immediately from any node in the cluster. -You can retrieve it with a GET request that specifies its document ID: - -[source,console] ----- -GET /customer/_doc/1 ----- -// TEST[continued] - -To add multiple documents in one request, use the `_bulk` API. -Bulk data must be newline-delimited JSON (NDJSON). -Each line must end in a newline character (`\n`), including the last line. - -[source,console] ----- -PUT customer/_bulk -{ "create": { } } -{ "firstname": "Monica","lastname":"Rambeau"} -{ "create": { } } -{ "firstname": "Carol","lastname":"Danvers"} -{ "create": { } } -{ "firstname": "Wanda","lastname":"Maximoff"} -{ "create": { } } -{ "firstname": "Jennifer","lastname":"Takeda"} ----- -// TEST[continued] - -[discrete] -=== Search - -Indexed documents are available for search in near real-time. -The following search matches all customers with a first name of _Jennifer_ -in the `customer` index. - -[source,console] ----- -GET customer/_search -{ - "query" : { - "match" : { "firstname": "Jennifer" } - } -} ----- -// TEST[continued] - -[discrete] -=== Explore - -You can use Discover in Kibana to interactively search and filter your data. -From there, you can start creating visualizations and building and sharing dashboards. - -To get started, create a _data view_ that connects to one or more Elasticsearch indices, -data streams, or index aliases. - -. Go to **Management > Stack Management > Kibana > Data Views**. -. Select **Create data view**. -. Enter a name for the data view and a pattern that matches one or more indices, -such as _customer_. -. Select **Save data view to Kibana**. - -To start exploring, go to **Analytics > Discover**. - - diff --git a/docs/reference/tab-widgets/api-call-widget.asciidoc b/docs/reference/tab-widgets/api-call-widget.asciidoc index adc2aa86f1c0e..4ad3c45366434 100644 --- a/docs/reference/tab-widgets/api-call-widget.asciidoc +++ b/docs/reference/tab-widgets/api-call-widget.asciidoc @@ -12,7 +12,7 @@ aria-controls="self-managed-tab-api-call" id="self-managed-api-call" tabindex="-1"> - Self-managed + Local Dev (Docker)

> for advanced Docker documentation. - -. Run the following Docker commands: -+ -[source,sh,subs="attributes"] ----- -docker network create elastic -docker pull {docker-image} -docker run --name es01 --net elastic -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -t {docker-image} ----- - -. Copy the generated `elastic` password and enrollment token, which are output to your terminal. -You'll use these to enroll {kib} with your {es} cluster and log in. -These credentials are only shown when you start {es} for the first time. -+ -We recommend storing the `elastic` password as an environment variable in your shell. Example: -+ -[source,sh] ----- -export ELASTIC_PASSWORD="your_password" ----- -+ -. Copy the `http_ca.crt` SSL certificate from the container to your local machine. -+ -[source,sh] ----- -docker cp es01:/usr/share/elasticsearch/config/certs/http_ca.crt . ----- -+ -. Make a REST API call to {es} to ensure the {es} container is running. -+ -[source,sh] ----- -curl --cacert http_ca.crt -u elastic:$ELASTIC_PASSWORD https://localhost:9200 ----- -// NOTCONSOLE - -*Run {kib}* - -{kib} is the user interface for Elastic. -It's great for getting started with {es} and exploring your data. -We'll be using the Dev Tools *Console* in {kib} to make REST API calls to {es}. - -In a new terminal session, start {kib} and connect it to your {es} container: - -[source,sh,subs="attributes"] ----- -docker pull {kib-docker-image} -docker run --name kibana --net elastic -p 5601:5601 {kib-docker-image} ----- - -When you start {kib}, a unique URL is output to your terminal. -To access {kib}: - -. Open the generated URL in your browser. -. Paste the enrollment token that you copied earlier, to connect your {kib} instance with {es}. -. Log in to {kib} as the `elastic` user with the password that was generated when you started {es}. +Refer to our <> to quickly spin up a local development environment in Docker. If you don't need {kib}, you'll only need one `docker run` command to start {es}. Please note that this setup is *not suitable for production use*. // end::self-managed[] \ No newline at end of file From b2ebaeee7b5349d368bfc436f6f62c0b12aa6fd0 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 7 May 2024 18:20:49 +0200 Subject: [PATCH 038/117] [DOCS] Add retrievers overview (#107959) --- .../retrievers-overview.asciidoc | 207 ++++++++++++++++++ .../search-your-data.asciidoc | 3 +- 2 files changed, 209 insertions(+), 1 deletion(-) create mode 100644 docs/reference/search/search-your-data/retrievers-overview.asciidoc diff --git a/docs/reference/search/search-your-data/retrievers-overview.asciidoc b/docs/reference/search/search-your-data/retrievers-overview.asciidoc new file mode 100644 index 0000000000000..fdd984819558b --- /dev/null +++ b/docs/reference/search/search-your-data/retrievers-overview.asciidoc @@ -0,0 +1,207 @@ +[[retrievers-overview]] +== Retrievers + +// Will move to a top level "Retrievers and reranking" section once reranking is live + +preview::[] + +A retriever is an abstraction that was added to the Search API in *8.14.0*. +This abstraction enables the configuration of multi-stage retrieval +pipelines within a single `_search` call. This simplifies your search +application logic, because you no longer need to configure complex searches via +multiple {es} calls or implement additional client-side logic to +combine results from different queries. + +This document provides a general overview of the retriever abstraction. +For implementation details, including notable restrictions, check out the +<> in the `_search` API docs. + +[discrete] +[[retrievers-overview-types]] +=== Retriever types + +Retrievers come in various types, each tailored for different search operations. +The following retrievers are currently available: + +* <>. Returns top documents from a +traditional https://www.elastic.co/guide/en/elasticsearch/reference/master/query-dsl.html[query]. +Mimics a traditional query but in the context of a retriever framework. This +ensures backward compatibility as existing `_search` requests remain supported. +That way you can transition to the new abstraction at your own pace without +mixing syntaxes. +* <>. Returns top documents from a <>, +in the context of a retriever framework. +* <>. Combines and ranks multiple first-stage retrievers using +the reciprocal rank fusion (RRF) algorithm. Allows you to combine multiple result sets +with different relevance indicators into a single result set. +An RRF retriever is a *compound retriever*, where its `filter` element is +propagated to its sub retrievers. ++ +Sub retrievers may not use elements that +are restricted by having a compound retriever as part of the retriever tree. +See the <> for detailed +examples and information on how to use the RRF retriever. + +[NOTE] +==== +Stay tuned for more retriever types in future releases! +==== + +[discrete] +=== What makes retrievers useful? + +Here's an overview of what makes retrievers useful and how they differ from +regular queries. + +. *Simplified user experience*. Retrievers simplify the user experience by +allowing entire retrieval pipelines to be configured in a single API call. This +maintains backward compatibility with traditional query elements by +automatically translating them to the appropriate retriever. +. *Structured retrieval*. Retrievers provide a more structured way to define search +operations. They allow searches to be described using a "retriever tree", a +hierarchical structure that clarifies the sequence and logic of operations, +making complex searches more understandable and manageable. +. *Composability and flexibility*. Retrievers enable flexible composability, +allowing you to build pipelines and seamlessly integrate different retrieval +strategies into these pipelines. Retrievers make it easy to test out different +retrieval strategy combinations. +. *Compound operations*. A retriever can have sub retrievers. This +allows complex nested searches where the results of one retriever feed into +another, supporting sophisticated querying strategies that might involve +multiple stages or criteria. +. *Retrieval as a first-class concept*. Unlike +traditional queries, where the query is a part of a larger search API call, +retrievers are designed as standalone entities that can be combined or used in +isolation. This enables a more modular and flexible approach to constructing +searches. +. *Enhanced control over document scoring and ranking*. Retrievers +allow for more explicit control over how documents are scored and filtered. For +instance, you can specify minimum score thresholds, apply complex filters +without affecting scoring, and use parameters like `terminate_after` for +performance optimizations. +. *Integration with existing {es} functionalities*. Even though +retrievers can be used instead of existing `_search` API syntax (like the +`query` and `knn`), they are designed to integrate seamlessly with things like +pagination (`search_after`) and sorting. They also maintain compatibility with +aggregation operations by treating the combination of all leaf retrievers as +`should` clauses in a boolean query. +. *Cleaner separation of concerns*. When using compound retrievers, only the +query element is allowed, which enforces a cleaner separation of concerns +and prevents the complexity that might arise from overly nested or +interdependent configurations. + +[discrete] +[[retrievers-overview-example]] +=== Example + +The following example demonstrates how using retrievers +simplify the composability of queries for RRF ranking. + +[source,js] +---- +GET example-index/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { + "query": { + "text_expansion": { + "vector.tokens": { + "model_id": ".elser_model_2", + "model_text": "What blue shoes are on sale?" + } + } + } + } + }, + { + "standard": { + "query": { + "match": { + "text": "blue shoes sale" + } + } + } + } + ] + } + } +} +---- +//NOTCONSOLE + +This example demonstrates how you can combine different +retrieval strategies into a single `retriever` pipeline. + +Compare to `RRF` with `sub_searches` approach: + +.*Expand* for example +[%collapsible] +============== + +[source,js] +---- +GET example-index/_search +{ + "sub_searches":[ + { + "query":{ + "match":{ + "text":"blue shoes sale" + } + } + }, + { + "query":{ + "text_expansion":{ + "vector.tokens":{ + "model_id":".elser_model_2", + "model_text":"What blue shoes are on sale?" + } + } + } + } + ], + "rank":{ + "rrf":{ + "window_size":50, + "rank_constant":20 + } + } +} +---- +//NOTCONSOLE +============== + +[discrete] +[[retrievers-overview-glossary]] +=== Glossary + +Here are some important terms: + +* *Retrieval Pipeline*. Defines the entire retrieval and ranking logic to +produce top hits. +* *Retriever Tree*. A hierarchical structure that defines how retrievers interact. +* *First-stage Retriever*. Returns an initial set of candidate documents. +* *Compound Retriever*. Builds on one or more retrievers, +enhancing document retrieval and ranking logic. +* *Combiners*. Compound retrievers that merge top hits +from multiple sub-retrievers. +//* NOT YET *Rerankers*. Special compound retrievers that reorder hits and may adjust the number of hits, with distinctions between first-stage and second-stage rerankers. + +[discrete] +[[retrievers-overview-play-in-search]] +=== Retrievers in action + +The Search Playground builds Elasticsearch queries using the retriever abstraction. +It automatically detects the fields and types in your index and builds a retriever tree based on your selections. + +You can use the Playground to experiment with different retriever configurations and see how they affect search results. + +Refer to the {kibana-ref}/playground.html[Playground documentation] for more information. +// Content coming in https://github.com/elastic/kibana/pull/182692 + + + diff --git a/docs/reference/search/search-your-data/search-your-data.asciidoc b/docs/reference/search/search-your-data/search-your-data.asciidoc index bed204985296c..e1c1618410f2f 100644 --- a/docs/reference/search/search-your-data/search-your-data.asciidoc +++ b/docs/reference/search/search-your-data/search-your-data.asciidoc @@ -43,10 +43,11 @@ DSL, with a simplified user experience. Create search applications based on your results directly in the Kibana Search UI. include::search-api.asciidoc[] -include::search-application-overview.asciidoc[] include::knn-search.asciidoc[] include::semantic-search.asciidoc[] +include::retrievers-overview.asciidoc[] include::learning-to-rank.asciidoc[] include::search-across-clusters.asciidoc[] include::search-with-synonyms.asciidoc[] +include::search-application-overview.asciidoc[] include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] From e043bce1af83bb47e6cfab488bdc401b744d89ea Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 7 May 2024 19:02:54 +0200 Subject: [PATCH 039/117] Add more rollup usage stats (#108245) This change adds `number_of_rollup_jobs` and `number_of_rollup_indices` to `rollup` usage. The former indicates the number of active rollup jobs running and the latter indicated the number of rollup indices (which could be the result of previous rollup jobs). --- docs/reference/rest-api/usage.asciidoc | 4 ++- .../org/elasticsearch/TransportVersions.java | 1 + .../core/rollup/RollupFeatureSetUsage.java | 28 ++++++++++++++++++- .../rollup/RollupUsageTransportAction.java | 11 ++++++-- .../RollupInfoTransportActionTests.java | 11 +++++--- .../rest-api-spec/test/rollup/put_job.yml | 5 ++++ 6 files changed, 52 insertions(+), 8 deletions(-) diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 6bdfaab17a4d0..0d21f648ab58b 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -308,7 +308,8 @@ GET /_xpack/usage }, "rollup" : { "available" : true, - "enabled" : true + "enabled" : true, + ... }, "ilm" : { "policy_count" : 3, @@ -496,6 +497,7 @@ GET /_xpack/usage } ------------------------------------------------------------ // TESTRESPONSE[s/"security" : \{[^\}]*\},/"security" : $body.$_path,/] +// TESTRESPONSE[s/"rollup" : \{[^\}]*\},/"rollup" : $body.$_path,/] // TESTRESPONSE[s/"detectors" : \{[^\}]*\},/"detectors" : $body.$_path,/] // TESTRESPONSE[s/"model_size" : \{[^\}]*\},/"model_size" : $body.$_path,/] // TESTRESPONSE[s/"eql" : \{[^\}]*\},/"eql" : $body.$_path,/] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 3f66147181593..78fe55a1df9b5 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -193,6 +193,7 @@ static TransportVersion def(int id) { public static final TransportVersion NO_GLOBAL_RETENTION_FOR_SYSTEM_DATA_STREAMS = def(8_650_00_0); public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX = def(8_651_00_0); public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); + public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java index fd5cf1c41b466..a198c0570cd91 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupFeatureSetUsage.java @@ -9,19 +9,45 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.XPackField; import java.io.IOException; +import static org.elasticsearch.TransportVersions.ROLLUP_USAGE; + public class RollupFeatureSetUsage extends XPackFeatureSet.Usage { + private final int numberOfRollupJobs; + public RollupFeatureSetUsage(StreamInput input) throws IOException { super(input); + this.numberOfRollupJobs = input.getTransportVersion().onOrAfter(ROLLUP_USAGE) ? input.readVInt() : 0; } - public RollupFeatureSetUsage() { + public RollupFeatureSetUsage(int numberOfRollupJobs) { super(XPackField.ROLLUP, true, true); + this.numberOfRollupJobs = numberOfRollupJobs; + } + + public int getNumberOfRollupJobs() { + return numberOfRollupJobs; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (out.getTransportVersion().onOrAfter(ROLLUP_USAGE)) { + out.writeVInt(numberOfRollupJobs); + } + } + + @Override + protected void innerXContent(XContentBuilder builder, Params params) throws IOException { + super.innerXContent(builder, params); + builder.field("number_of_rollup_jobs", numberOfRollupJobs); } @Override diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java index a15dc19bb4abf..c3b568fc32b71 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java @@ -12,6 +12,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.core.Predicates; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -20,6 +22,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; +import org.elasticsearch.xpack.core.rollup.job.RollupJob; public class RollupUsageTransportAction extends XPackUsageFeatureTransportAction { @@ -48,8 +51,12 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { - // TODO expose the currently running rollup tasks on this node? Unclear the best way to do that - RollupFeatureSetUsage usage = new RollupFeatureSetUsage(); + int numberOfRollupJobs = 0; + PersistentTasksCustomMetadata persistentTasks = state.metadata().custom(PersistentTasksCustomMetadata.TYPE); + if (persistentTasks != null) { + numberOfRollupJobs = persistentTasks.findTasks(RollupJob.NAME, Predicates.always()).size(); + } + RollupFeatureSetUsage usage = new RollupFeatureSetUsage(numberOfRollupJobs); listener.onResponse(new XPackUsageFeatureResponse(usage)); } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java index b0881eb350d5a..243b478db6dbf 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java @@ -8,18 +8,19 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; import java.io.IOException; import java.util.concurrent.ExecutionException; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.Is.is; import static org.mockito.Mockito.mock; @@ -42,13 +43,15 @@ public void testUsage() throws ExecutionException, InterruptedException, IOExcep TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); var usageAction = new RollupUsageTransportAction(transportService, null, threadPool, mock(ActionFilters.class), null); PlainActionFuture future = new PlainActionFuture<>(); - usageAction.masterOperation(null, null, null, future); - XPackFeatureSet.Usage rollupUsage = future.get().getUsage(); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); + RollupFeatureSetUsage rollupUsage = (RollupFeatureSetUsage) future.get().getUsage(); BytesStreamOutput out = new BytesStreamOutput(); rollupUsage.writeTo(out); - XPackFeatureSet.Usage serializedUsage = new RollupFeatureSetUsage(out.bytes().streamInput()); + var serializedUsage = new RollupFeatureSetUsage(out.bytes().streamInput()); assertThat(rollupUsage.name(), is(serializedUsage.name())); assertThat(rollupUsage.enabled(), is(serializedUsage.enabled())); + assertThat(rollupUsage.enabled(), is(serializedUsage.enabled())); + assertThat(rollupUsage.getNumberOfRollupJobs(), equalTo(serializedUsage.getNumberOfRollupJobs())); } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml index 6560c6f470533..dd301c0a29f4f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/put_job.yml @@ -94,6 +94,11 @@ setup: status: job_state: "stopped" + - do: {xpack.usage: {}} + - match: { rollup.available: true } + - match: { rollup.enabled: true } + - match: { rollup.number_of_rollup_jobs: 1 } + --- "Test put_job with existing name": From ef12e39cd95dad2a8f16716e8104f1d494444a55 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 7 May 2024 13:39:19 -0400 Subject: [PATCH 040/117] ESQL: Serialize Blocks in plan (#108334) This adds support for serializing `Block`s as part of the plan. Unlike serializing blocks in the normal stream, we attempt to prevent sending duplicate blocks. This'll make it easier to serialize blocks in the plan without worrying about duplicates. --- .../test/AbstractWireTestCase.java | 15 ++- .../esql/enrich/EnrichLookupService.java | 2 +- .../xpack/esql/io/stream/PlanStreamInput.java | 88 +++++++++++++ .../esql/io/stream/PlanStreamOutput.java | 124 +++++++++++++++++- .../logical/local/ImmediateLocalSupplier.java | 55 ++++++++ .../plan/logical/local/LocalSupplier.java | 48 +++++-- .../esql/plugin/ClusterComputeRequest.java | 2 +- .../xpack/esql/plugin/DataNodeRequest.java | 2 +- .../xpack/esql/SerializationTestUtils.java | 2 +- .../esql/io/stream/PlanNamedTypesTests.java | 46 +++---- .../esql/io/stream/PlanStreamOutputTests.java | 111 +++++++++++++++- .../logical/local/LocalSupplierTests.java | 87 ++++++++++++ .../esql/plugin/DataNodeRequestTests.java | 2 +- .../EsqlConfigurationSerializationTests.java | 8 +- 14 files changed, 541 insertions(+), 51 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java index 8d4085623d156..eccbf602f2c71 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java @@ -234,11 +234,24 @@ protected final void assertSerialization(T testInstance, TransportVersion versio * how equality is checked. */ protected void assertEqualInstances(T expectedInstance, T newInstance) { - assertNotSame(newInstance, expectedInstance); + if (shouldBeSame(newInstance)) { + assertSame(newInstance, expectedInstance); + } else { + assertNotSame(newInstance, expectedInstance); + } assertThat(newInstance, equalTo(expectedInstance)); assertThat(newInstance.hashCode(), equalTo(expectedInstance.hashCode())); } + /** + * Should this copy be the same instance as what we're copying? Defaults to + * {@code false} but implementers might override if the serialization returns + * a reuse constant. + */ + protected boolean shouldBeSame(T newInstance) { + return false; + } + protected final T copyInstance(T instance) throws IOException { return copyInstance(instance, TransportVersion.current()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index e9a2fb88e1991..84993a96f040d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -474,7 +474,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(matchType); out.writeString(matchField); out.writeWriteable(inputPage); - PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE); + PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, null); planOut.writeCollection(extractFields, writerFromPlanWriter(PlanStreamOutput::writeNamedExpression)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 046e46d216bdc..93bd2518ae380 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -7,10 +7,23 @@ package org.elasticsearch.xpack.esql.io.stream; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockStreamInput; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BooleanBigArrayBlock; +import org.elasticsearch.compute.data.DoubleBigArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.LongBigArrayBlock; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanNamedReader; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -60,6 +73,8 @@ public NameId apply(long streamNameId) { private static final Supplier> DEFAULT_NAME_ID_FUNC = NameIdMapper::new; + private final Map cachedBlocks = new HashMap<>(); + private final PlanNameRegistry registry; // hook for nameId, where can cache and map, for now just return a NameId of the same long value. @@ -180,6 +195,79 @@ public EsqlConfiguration configuration() throws IOException { return configuration; } + /** + * Read a {@link Block} as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public Block readCachedBlock() throws IOException { + byte key = readByte(); + Block block = switch (key) { + case PlanStreamOutput.NEW_BLOCK_KEY -> { + int id = readVInt(); + // TODO track blocks read over the wire.... Or slice them from BigArrays? Something. + Block b = new BlockStreamInput( + this, + new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE) + ).readNamedWriteable(Block.class); + cachedBlocks.put(id, b); + yield b; + } + case PlanStreamOutput.FROM_PREVIOUS_KEY -> cachedBlocks.get(readVInt()); + case PlanStreamOutput.FROM_CONFIG_KEY -> { + String tableName = readString(); + Map table = configuration.tables().get(tableName); + if (table == null) { + throw new IOException("can't find table [" + tableName + "]"); + } + String columnName = readString(); + Column column = table.get(columnName); + if (column == null) { + throw new IOException("can't find column[" + columnName + "]"); + } + yield column.values(); + } + default -> throw new IOException("invalid encoding for Block"); + }; + assert block instanceof LongBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof IntBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof DoubleBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof BooleanBigArrayBlock == false : "BigArrays not supported because we don't close"; + return block; + } + + /** + * Read an array of {@link Block}s as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public Block[] readCachedBlockArray() throws IOException { + int len = readArraySize(); + if (len == 0) { + return BlockUtils.NO_BLOCKS; + } + Block[] blocks = new Block[len]; + try { + for (int i = 0; i < blocks.length; i++) { + blocks[i] = readCachedBlock(); + } + return blocks; + } finally { + if (blocks[blocks.length - 1] == null) { + // Wasn't successful reading all blocks + Releasables.closeExpectNoException(blocks); + } + } + } + static void throwOnNullOptionalRead(Class type) throws IOException { final IOException e = new IOException("read optional named returned null which is not allowed, type:" + type); assert false : e; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 5ee292b6add9e..d78e004aade31 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -8,9 +8,20 @@ package org.elasticsearch.xpack.esql.io.stream; import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBigArrayBlock; +import org.elasticsearch.compute.data.DoubleBigArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.LongBigArrayBlock; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.NamedExpression; @@ -18,6 +29,8 @@ import org.elasticsearch.xpack.ql.tree.Source; import java.io.IOException; +import java.util.IdentityHashMap; +import java.util.Map; import java.util.function.Function; import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSourceNoText; @@ -28,19 +41,42 @@ */ public final class PlanStreamOutput extends StreamOutput { + /** + * Cache of written blocks. We use an {@link IdentityHashMap} for this + * because calculating the {@link Object#hashCode} of a {@link Block} + * is slow. And so is {@link Object#equals}. So, instead we just use + * object identity. + */ + private final Map cachedBlocks = new IdentityHashMap<>(); + private final StreamOutput delegate; private final PlanNameRegistry registry; private final Function, String> nameSupplier; - public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry) { - this(delegate, registry, PlanNamedTypes::name); + private int nextCachedBlock = 0; + + public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry, @Nullable EsqlConfiguration configuration) + throws IOException { + this(delegate, registry, configuration, PlanNamedTypes::name); } - public PlanStreamOutput(StreamOutput delegate, PlanNameRegistry registry, Function, String> nameSupplier) { + public PlanStreamOutput( + StreamOutput delegate, + PlanNameRegistry registry, + @Nullable EsqlConfiguration configuration, + Function, String> nameSupplier + ) throws IOException { this.delegate = delegate; this.registry = registry; this.nameSupplier = nameSupplier; + if (configuration != null) { + for (Map.Entry> table : configuration.tables().entrySet()) { + for (Map.Entry column : table.getValue().entrySet()) { + cachedBlocks.put(column.getValue().values(), fromConfigKey(table.getKey(), column.getKey())); + } + } + } } public void writeLogicalPlanNode(LogicalPlan logicalPlan) throws IOException { @@ -130,4 +166,86 @@ public void setTransportVersion(TransportVersion version) { delegate.setTransportVersion(version); super.setTransportVersion(version); } + + /** + * Write a {@link Block} as part of the plan. + *

+ * These {@link Block}s are not tracked by {@link BlockFactory} and closing them + * does nothing so they should be small. We do make sure not to send duplicates, + * reusing blocks sent as part of the {@link EsqlConfiguration#tables()} if + * possible, otherwise sending a {@linkplain Block} inline. + *

+ */ + public void writeCachedBlock(Block block) throws IOException { + assert block instanceof LongBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof IntBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof DoubleBigArrayBlock == false : "BigArrays not supported because we don't close"; + assert block instanceof BooleanBigArrayBlock == false : "BigArrays not supported because we don't close"; + BytesReference key = cachedBlocks.get(block); + if (key != null) { + key.writeTo(this); + return; + } + writeByte(NEW_BLOCK_KEY); + writeVInt(nextCachedBlock); + cachedBlocks.put(block, fromPreviousKey(nextCachedBlock)); + writeNamedWriteable(block); + nextCachedBlock++; + } + + /** + * The byte representing a {@link Block} sent for the first time. The byte + * will be followed by a {@link StreamOutput#writeVInt} encoded identifier + * and then the contents of the {@linkplain Block} will immediately follow + * this byte. + */ + static final byte NEW_BLOCK_KEY = 0; + + /** + * The byte representing a {@link Block} that has previously been sent. + * This byte will be followed up a {@link StreamOutput#writeVInt} encoded + * identifier pointing to the block to read. + */ + static final byte FROM_PREVIOUS_KEY = 1; + + /** + * The byte representing a {@link Block} that was part of the + * {@link EsqlConfiguration#tables()} map. It is followed a string for + * the table name and then a string for the column name. + */ + static final byte FROM_CONFIG_KEY = 2; + + /** + * Build the key for reading a {@link Block} from the cache of previously + * received {@linkplain Block}s. + */ + static BytesReference fromPreviousKey(int id) throws IOException { + try (BytesStreamOutput key = new BytesStreamOutput()) { + key.writeByte(FROM_PREVIOUS_KEY); + key.writeVInt(id); + return key.bytes(); + } + } + + /** + * Build the key for reading a {@link Block} from the {@link EsqlConfiguration}. + * This is important because some operations like {@code LOOKUP} frequently read + * {@linkplain Block}s directly from the configuration. + *

+ * It'd be possible to implement this by adding all of the Blocks as "previous" + * keys in the constructor and never use this construct at all, but that'd + * require there be a consistent ordering of Blocks there. We could make one, + * but I'm afraid that'd be brittle as we evolve the code. It'd make wire + * compatibility difficult. This signal is much simpler to deal with even though + * it is more bytes over the wire. + *

+ */ + static BytesReference fromConfigKey(String table, String column) throws IOException { + try (BytesStreamOutput key = new BytesStreamOutput()) { + key.writeByte(FROM_CONFIG_KEY); + key.writeString(table); + key.writeString(column); + return key.bytes(); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java new file mode 100644 index 0000000000000..8bcf5c472b2d0 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.local; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.Arrays; + +/** + * A {@link LocalSupplier} that contains already filled {@link Block}s. + */ +class ImmediateLocalSupplier implements LocalSupplier { + private final Block[] blocks; + + ImmediateLocalSupplier(Block[] blocks) { + this.blocks = blocks; + } + + @Override + public Block[] get() { + return blocks; + } + + @Override + public String toString() { + return Arrays.toString(blocks); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeArray((o, v) -> ((PlanStreamOutput) o).writeCachedBlock(v), blocks); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + ImmediateLocalSupplier other = (ImmediateLocalSupplier) obj; + return Arrays.equals(blocks, other.blocks); + } + + @Override + public int hashCode() { + return Arrays.hashCode(blocks); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java index 7fa82359ffc45..3b81da06d7077 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplier.java @@ -7,13 +7,25 @@ package org.elasticsearch.xpack.esql.plan.logical.local; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import java.util.Arrays; +import java.io.IOException; import java.util.function.Supplier; -public interface LocalSupplier extends Supplier { +/** + * Supplies fixed {@link Block}s for things calculated at plan time. + *

+ * This is {@link Writeable} so we can model {@code LOOKUP} and + * hash joins which have to go over the wire. But many implementers + * don't have to go over the wire and they should feel free to throw + * {@link UnsupportedOperationException}. + *

+ */ +public interface LocalSupplier extends Supplier, Writeable { LocalSupplier EMPTY = new LocalSupplier() { @Override @@ -25,19 +37,29 @@ public Block[] get() { public String toString() { return "EMPTY"; } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(0); + } + + @Override + public boolean equals(Object obj) { + return obj == EMPTY; + } + + @Override + public int hashCode() { + return 0; + } }; static LocalSupplier of(Block[] blocks) { - return new LocalSupplier() { - @Override - public Block[] get() { - return blocks; - } - - @Override - public String toString() { - return Arrays.toString(blocks); - } - }; + return new ImmediateLocalSupplier(blocks); + } + + static LocalSupplier readFrom(PlanStreamInput in) throws IOException { + Block[] blocks = in.readCachedBlockArray(); + return blocks.length == 0 ? EMPTY : of(blocks); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java index 2f5920a4e32c9..bedbd517f1184 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeRequest.java @@ -93,7 +93,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(clusterAlias); out.writeString(sessionId); configuration.writeTo(out); - new PlanStreamOutput(out, planNameRegistry).writePhysicalPlanNode(plan); + new PlanStreamOutput(out, planNameRegistry, configuration).writePhysicalPlanNode(plan); out.writeStringArray(indices); out.writeStringArray(originalIndices); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java index 6c87b226aa590..b72feadd20c61 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java @@ -90,7 +90,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeCollection(shardIds); out.writeMap(aliasFilters); - new PlanStreamOutput(out, planNameRegistry).writePhysicalPlanNode(plan); + new PlanStreamOutput(out, planNameRegistry, configuration).writePhysicalPlanNode(plan); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index 312250d2f58d0..185fb14503cab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -79,7 +79,7 @@ public static T serializeDeserialize(T orig, Serializer serializer, Deser public static T serializeDeserialize(T orig, Serializer serializer, Deserializer deserializer, EsqlConfiguration config) { try (BytesStreamOutput out = new BytesStreamOutput()) { - PlanStreamOutput planStreamOutput = new PlanStreamOutput(out, planNameRegistry); + PlanStreamOutput planStreamOutput = new PlanStreamOutput(out, planNameRegistry, config); serializer.write(planStreamOutput, orig); StreamInput in = new NamedWriteableAwareStreamInput( ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 23f9e93ac72a6..cfa3b4a8ea6ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -207,7 +207,7 @@ public void testWrappedStreamSimple() throws IOException { // write BytesStreamOutput bso = new BytesStreamOutput(); bso.writeString("hello"); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); var plan = new RowExec(Source.EMPTY, List.of(new Alias(Source.EMPTY, "foo", field("field", DataTypes.LONG)))); out.writePhysicalPlanNode(plan); bso.writeVInt(11_345); @@ -230,7 +230,7 @@ public void testUnsupportedAttributeSimple() throws IOException { new NameId() ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeUnsupportedAttr(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readUnsupportedAttr(in); @@ -255,7 +255,7 @@ public void testFieldAttributeSimple() throws IOException { true // synthetic ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeFieldAttribute(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readFieldAttribute(in); @@ -277,7 +277,7 @@ public void testKeywordEsFieldSimple() throws IOException { true // alias ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeKeywordEsField(out, orig); var deser = PlanNamedTypes.readKeywordEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -295,7 +295,7 @@ public void testTextdEsFieldSimple() throws IOException { true // alias ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeTextEsField(out, orig); var deser = PlanNamedTypes.readTextEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -308,7 +308,7 @@ public void testTextEsField() { public void testInvalidMappedFieldSimple() throws IOException { var orig = new InvalidMappedField("foo", "bar"); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeInvalidMappedField(out, orig); var deser = PlanNamedTypes.readInvalidMappedField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -321,7 +321,7 @@ public void testInvalidMappedField() { public void testEsDateFieldSimple() throws IOException { var orig = DateEsField.dateEsField("birth_date", Map.of(), false); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeDateEsField(out, orig); var deser = PlanNamedTypes.readDateEsField(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -330,7 +330,7 @@ public void testEsDateFieldSimple() throws IOException { public void testBinComparisonSimple() throws IOException { var orig = new Equals(Source.EMPTY, field("foo", DataTypes.DOUBLE), field("bar", DataTypes.DOUBLE)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(EsqlBinaryComparison.class, orig); var deser = (Equals) planStreamInput(bso).readNamed(EsqlBinaryComparison.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -345,7 +345,7 @@ public void testBinComparison() { public void testAggFunctionSimple() throws IOException { var orig = new Avg(Source.EMPTY, field("foo_val", DataTypes.DOUBLE)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(AggregateFunction.class, orig); var deser = (Avg) planStreamInput(bso).readNamed(AggregateFunction.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -358,7 +358,7 @@ public void testAggFunction() { public void testArithmeticOperationSimple() throws IOException { var orig = new Add(Source.EMPTY, field("foo", DataTypes.LONG), field("bar", DataTypes.LONG)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); out.writeNamed(ArithmeticOperation.class, orig); var deser = (Add) planStreamInput(bso).readNamed(ArithmeticOperation.class); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -373,7 +373,7 @@ public void testArithmeticOperation() { public void testSubStringSimple() throws IOException { var orig = new Substring(Source.EMPTY, field("foo", DataTypes.KEYWORD), new Literal(Source.EMPTY, 1, DataTypes.INTEGER), null); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeSubstring(out, orig); var deser = PlanNamedTypes.readSubstring(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -382,7 +382,7 @@ public void testSubStringSimple() throws IOException { public void testStartsWithSimple() throws IOException { var orig = new StartsWith(Source.EMPTY, field("foo", DataTypes.KEYWORD), new Literal(Source.EMPTY, "fo", DataTypes.KEYWORD)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeStartsWith(out, orig); var deser = PlanNamedTypes.readStartsWith(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -391,7 +391,7 @@ public void testStartsWithSimple() throws IOException { public void testRoundSimple() throws IOException { var orig = new Round(Source.EMPTY, field("value", DataTypes.DOUBLE), new Literal(Source.EMPTY, 1, DataTypes.INTEGER)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeRound(out, orig); var deser = PlanNamedTypes.readRound(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -400,7 +400,7 @@ public void testRoundSimple() throws IOException { public void testPowSimple() throws IOException { var orig = new Pow(Source.EMPTY, field("value", DataTypes.DOUBLE), new Literal(Source.EMPTY, 1, DataTypes.INTEGER)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writePow(out, orig); var deser = PlanNamedTypes.readPow(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -409,7 +409,7 @@ public void testPowSimple() throws IOException { public void testAliasSimple() throws IOException { var orig = new Alias(Source.EMPTY, "alias_name", field("a", DataTypes.LONG)); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeAlias(out, orig); var in = planStreamInput(bso); var deser = PlanNamedTypes.readAlias(in); @@ -420,7 +420,7 @@ public void testAliasSimple() throws IOException { public void testLiteralSimple() throws IOException { var orig = new Literal(Source.EMPTY, 1, DataTypes.INTEGER); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeLiteral(out, orig); var deser = PlanNamedTypes.readLiteral(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -429,7 +429,7 @@ public void testLiteralSimple() throws IOException { public void testOrderSimple() throws IOException { var orig = new Order(Source.EMPTY, field("val", DataTypes.INTEGER), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeOrder(out, orig); var deser = (Order) PlanNamedTypes.readOrder(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -438,7 +438,7 @@ public void testOrderSimple() throws IOException { public void testFieldSortSimple() throws IOException { var orig = new EsQueryExec.FieldSort(field("val", DataTypes.LONG), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeFieldSort(out, orig); var deser = PlanNamedTypes.readFieldSort(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -447,7 +447,7 @@ public void testFieldSortSimple() throws IOException { public void testEsIndexSimple() throws IOException { var orig = new EsIndex("test*", Map.of("first_name", new KeywordEsField("first_name")), Set.of("test1", "test2")); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsIndex(out, orig); var deser = PlanNamedTypes.readEsIndex(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -457,7 +457,7 @@ public void testDissectParserSimple() throws IOException { String pattern = "%{b} %{c}"; var orig = new Dissect.Parser(pattern, ",", new DissectParser(pattern, ",")); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeDissectParser(out, orig); var deser = PlanNamedTypes.readDissectParser(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -466,7 +466,7 @@ public void testDissectParserSimple() throws IOException { public void testEsRelation() throws IOException { var orig = new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomEsSourceOptions(), randomBoolean()); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsRelation(out, orig); var deser = PlanNamedTypes.readEsRelation(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -479,7 +479,7 @@ public void testEsqlProject() throws IOException { List.of(randomFieldAttribute()) ); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeEsqlProject(out, orig); var deser = PlanNamedTypes.readEsqlProject(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); @@ -495,7 +495,7 @@ public void testMvExpand() throws IOException { ); var orig = new MvExpand(Source.EMPTY, esRelation, randomFieldAttribute(), randomFieldAttribute()); BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); PlanNamedTypes.writeMvExpand(out, orig); var deser = PlanNamedTypes.readMvExpand(planStreamInput(bso)); EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index 7f683e8f8003b..bc69b4454df81 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -8,23 +8,130 @@ package org.elasticsearch.xpack.esql.io.stream; import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.xpack.esql.Column; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.session.EsqlConfigurationSerializationTests; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.io.IOException; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class PlanStreamOutputTests extends ESTestCase { - public void testTransportVersion() { + public void testTransportVersion() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); TransportVersion v1 = TransportVersionUtils.randomCompatibleVersion(random()); out.setTransportVersion(v1); - PlanStreamOutput planOut = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE); + PlanStreamOutput planOut = new PlanStreamOutput( + out, + PlanNameRegistry.INSTANCE, + randomBoolean() ? null : EsqlConfigurationSerializationTests.randomConfiguration() + ); assertThat(planOut.getTransportVersion(), equalTo(v1)); TransportVersion v2 = TransportVersionUtils.randomCompatibleVersion(random()); planOut.setTransportVersion(v2); assertThat(planOut.getTransportVersion(), equalTo(v2)); assertThat(out.getTransportVersion(), equalTo(v2)); } + + public void testWriteBlockFromConfig() throws IOException { + String tableName = randomAlphaOfLength(5); + String columnName = randomAlphaOfLength(10); + try (Column c = randomColumn()) { + EsqlConfiguration configuration = randomConfiguration(Map.of(tableName, Map.of(columnName, c))); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(c.values()); + assertThat(out.bytes().length(), equalTo(3 + tableName.length() + columnName.length())); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + assertThat(in.readCachedBlock(), sameInstance(c.values())); + } + } + } + } + + public void testWriteBlockOnce() throws IOException { + try (Block b = randomColumn().values()) { + EsqlConfiguration configuration = EsqlConfigurationSerializationTests.randomConfiguration(); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(b); + assertThat(out.bytes().length(), greaterThan(4 * LEN)); + assertThat(out.bytes().length(), lessThan(8 * LEN)); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + Block read = in.readCachedBlock(); + assertThat(read, not(sameInstance(b))); + assertThat(read, equalTo(b)); + } + } + } + } + + public void testWriteBlockTwice() throws IOException { + try (Block b = randomColumn().values()) { + EsqlConfiguration configuration = EsqlConfigurationSerializationTests.randomConfiguration(); + try ( + BytesStreamOutput out = new BytesStreamOutput(); + PlanStreamOutput planStream = new PlanStreamOutput(out, PlanNameRegistry.INSTANCE, configuration) + ) { + planStream.writeCachedBlock(b); + planStream.writeCachedBlock(b); + assertThat(out.bytes().length(), greaterThan(4 * LEN)); + assertThat(out.bytes().length(), lessThan(8 * LEN)); + try ( + PlanStreamInput in = new PlanStreamInput(out.bytes().streamInput(), PlanNameRegistry.INSTANCE, REGISTRY, configuration) + ) { + Block read = in.readCachedBlock(); + assertThat(read, not(sameInstance(b))); + assertThat(read, equalTo(b)); + assertThat(in.readCachedBlock(), sameInstance(read)); + } + } + } + } + + private EsqlConfiguration randomConfiguration(Map> tables) { + return EsqlConfigurationSerializationTests.randomConfiguration("query_" + randomAlphaOfLength(1), tables); + } + + private static final int LEN = 10000; + + private Column randomColumn() { + try (IntBlock.Builder ints = BLOCK_FACTORY.newIntBlockBuilder(LEN)) { + for (int i = 0; i < LEN; i++) { + ints.appendInt(randomInt()); + } + return new Column(DataTypes.INTEGER, ints.build()); + } + } + + private static final BlockFactory BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + private static final NamedWriteableRegistry REGISTRY = new NamedWriteableRegistry(Block.getNamedWriteables()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java new file mode 100644 index 0000000000000..4206adf1492fd --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.local; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.test.AbstractWireTestCase; +import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; + +import java.io.IOException; +import java.util.Arrays; + +public class LocalSupplierTests extends AbstractWireTestCase { + private static final BlockFactory BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + @Override + protected LocalSupplier copyInstance(LocalSupplier instance, TransportVersion version) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setTransportVersion(version); + instance.writeTo(new PlanStreamOutput(output, PlanNameRegistry.INSTANCE, null)); + try (StreamInput in = output.bytes().streamInput()) { + in.setTransportVersion(version); + return LocalSupplier.readFrom(new PlanStreamInput(in, PlanNameRegistry.INSTANCE, getNamedWriteableRegistry(), null)); + } + } + } + + @Override + protected LocalSupplier createTestInstance() { + return randomBoolean() ? LocalSupplier.EMPTY : randomNonEmpty(); + } + + private LocalSupplier randomNonEmpty() { + return LocalSupplier.of(randomList(1, 10, LocalSupplierTests::randomBlock).toArray(Block[]::new)); + } + + @Override + protected LocalSupplier mutateInstance(LocalSupplier instance) throws IOException { + Block[] blocks = instance.get(); + if (blocks.length > 0 && randomBoolean()) { + if (randomBoolean()) { + return LocalSupplier.EMPTY; + } + return LocalSupplier.of(Arrays.copyOf(blocks, blocks.length - 1, Block[].class)); + } + blocks = Arrays.copyOf(blocks, blocks.length + 1, Block[].class); + blocks[blocks.length - 1] = randomBlock(); + return LocalSupplier.of(blocks); + } + + private static Block randomBlock() { + int len = between(1, 1000); + try (IntBlock.Builder ints = BLOCK_FACTORY.newIntBlockBuilder(len)) { + for (int i = 0; i < len; i++) { + ints.appendInt(randomInt()); + } + return ints.build(); + } + } + + @Override + protected boolean shouldBeSame(LocalSupplier newInstance) { + return newInstance.get().length == 0; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Block.getNamedWriteables()); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java index 45d57b2fa411e..c9c5091db2894 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java @@ -86,7 +86,7 @@ protected DataNodeRequest createTestInstance() { ); DataNodeRequest request = new DataNodeRequest( sessionId, - EsqlConfigurationSerializationTests.randomConfiguration(query), + EsqlConfigurationSerializationTests.randomConfiguration(query, EsqlConfigurationSerializationTests.randomTables()), randomAlphaOfLength(10), shardIds, aliasFilters, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java index 3e91321651928..41c39e88b943e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java @@ -53,10 +53,10 @@ private static QueryPragmas randomQueryPragmas() { public static EsqlConfiguration randomConfiguration() { int len = randomIntBetween(1, 300) + (frequently() ? 0 : QUERY_COMPRESS_THRESHOLD_CHARS); - return randomConfiguration(randomRealisticUnicodeOfLength(len)); + return randomConfiguration(randomRealisticUnicodeOfLength(len), randomTables()); } - public static EsqlConfiguration randomConfiguration(String query) { + public static EsqlConfiguration randomConfiguration(String query, Map> tables) { var zoneId = randomZone(); var locale = randomLocale(random()); var username = randomAlphaOfLengthBetween(1, 10); @@ -75,11 +75,11 @@ public static EsqlConfiguration randomConfiguration(String query) { defaultTruncation, query, profile, - randomTables() + tables ); } - static Map> randomTables() { + public static Map> randomTables() { if (randomBoolean()) { return Map.of(); } From 76dc0eb969b877907083564a27d7ecce1308537c Mon Sep 17 00:00:00 2001 From: Nicolas Chaulet Date: Tue, 7 May 2024 14:37:36 -0400 Subject: [PATCH 041/117] [Fleet] Add namespaces to fleet managed indices (#108363) --- .../src/main/resources/fleet-actions-results.json | 3 +++ .../template-resources/src/main/resources/fleet-actions.json | 3 +++ .../template-resources/src/main/resources/fleet-agents.json | 3 +++ .../src/main/resources/fleet-enrollment-api-keys.json | 3 +++ .../src/main/resources/fleet-file-fromhost-data.json | 3 +++ .../src/main/resources/fleet-file-fromhost-meta.json | 3 +++ .../src/main/resources/fleet-file-tohost-data.json | 3 +++ .../src/main/resources/fleet-file-tohost-meta.json | 3 +++ .../template-resources/src/main/resources/fleet-policies.json | 3 +++ 9 files changed, 27 insertions(+) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json index 88c51a9aef284..85a744200162c 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json @@ -53,6 +53,9 @@ }, "completed_at": { "type": "date" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json index 2b3ecbac92352..8702a098da826 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json @@ -56,6 +56,9 @@ "type": "binary" } } + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json index 6a89d7874c073..ad66ad8796862 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json @@ -316,6 +316,9 @@ }, "tags": { "type": "keyword" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json index 6be455e02825a..b2a116c0c592e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json @@ -33,6 +33,9 @@ }, "updated_at": { "type": "date" + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json index 698e4359e73c1..20e9ccf8daff3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json @@ -38,6 +38,9 @@ "last": { "type": "boolean", "index": false + }, + "namespaces": { + "type": "keyword" } } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json index 268e53a9470a8..9bf0c8b23f5ad 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "upload_start": { "type": "date" }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json index 7247920e5e293..7c990600749d3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "data": { "type": "binary", "store": true diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json index bdf7e4d00d869..84a3fe05777a9 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json @@ -25,6 +25,9 @@ "@timestamp": { "type": "date" }, + "namespaces": { + "type": "keyword" + }, "agent_id": { "type": "keyword" }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json index 44e2e67dd06c3..79b4ed0109f32 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json @@ -13,6 +13,9 @@ "coordinator_idx": { "type": "integer" }, + "namespaces": { + "type": "keyword" + }, "data": { "enabled": false, "type": "object" From 568d6daf905a92e3a48dbe23cd7573a031e7e930 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 7 May 2024 15:06:52 -0400 Subject: [PATCH 042/117] Revert "Adding manage_ml (#108262)" (#108371) Reverts elastic/elasticsearch#108262 Accidentally merged the above PR without security's approval. They asked us to revert for now and we'll continue discussing on a new PR. --- .../security/authz/store/KibanaOwnedReservedRoleDescriptors.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 787463355f594..8e4f9108c3b9c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -69,7 +69,6 @@ static RoleDescriptor kibanaSystem(String name) { // For Fleet package upgrade "manage_pipeline", "manage_ilm", - "manage_inference", // For the endpoint package that ships a transform "manage_transform", InvalidateApiKeyAction.NAME, From 4ef4b9d4204ba750c0cdad3e180c8a43d4778456 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 7 May 2024 13:49:12 -0600 Subject: [PATCH 043/117] Create a new NodeRequest for every NodesDataTiersUsageTransport use (#108379) * Create a new NodeRequest for every NodesDataTiersUsageTransport use This was previously re-using a single instance for the NodeRequest, which can cause: ``` java.lang.AssertionError: Request [ org.elasticsearch.xpack.core.datatiers.NodesDataTiersUsageTransportAction$NodeRequest/WaEyD6eiTRSJSFBG-RCLrg:2055] didn't preserve it parentTaskId at org.elasticsearch.server@8.14.0/org.elasticsearch.tasks.TaskManager.register(TaskManager.java:145) at org.elasticsearch.server@8.14.0/org.elasticsearch.tasks.TaskManager.register(TaskManager.java:117) at org.elasticsearch.server@8.14.0/org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:66) at org.elasticsearch.server@8.14.0/org.elasticsearch.transport.TransportService$6.doRun(TransportService.java:1069) at org.elasticsearch.server@8.14.0/org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:984) at org.elasticsearch.server@8.14.0/org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) at java.base/java.lang.Thread.run(Thread.java:1570) ``` * Update docs/changelog/108379.yaml --- docs/changelog/108379.yaml | 5 +++++ .../core/datatiers/NodesDataTiersUsageTransportAction.java | 4 +--- 2 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/108379.yaml diff --git a/docs/changelog/108379.yaml b/docs/changelog/108379.yaml new file mode 100644 index 0000000000000..312856a5db33d --- /dev/null +++ b/docs/changelog/108379.yaml @@ -0,0 +1,5 @@ +pr: 108379 +summary: Create a new `NodeRequest` for every `NodesDataTiersUsageTransport` use +area: Indices APIs +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java index 08a2d5ae4f5b4..ee721d9d55714 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java @@ -90,7 +90,7 @@ protected NodesResponse newResponse(NodesRequest request, List Date: Tue, 7 May 2024 15:54:01 -0400 Subject: [PATCH 044/117] ESQL: Enable testing in more cases (#107920) This allows us to build `ExpressionEvaluator`s in tests when some of the inputs aren't `representable` but when they *are* literals. This enables many more cases in `AddTests`, for example, where we have support for adding time spans to dates. --- .../function/AbstractFunctionTestCase.java | 53 ++++++----- .../expression/function/TestCaseSupplier.java | 91 ++++++++++--------- .../function/scalar/math/BucketTests.java | 13 ++- .../function/scalar/math/RoundTests.java | 2 +- .../scalar/multivalue/MvSliceTests.java | 2 +- .../SpatialRelatesFunctionTestCase.java | 10 +- .../operator/arithmetic/AddTests.java | 31 +++---- .../operator/arithmetic/DivTests.java | 25 +++-- .../operator/arithmetic/ModTests.java | 25 +++-- .../operator/arithmetic/NegTests.java | 2 +- 10 files changed, 143 insertions(+), 111 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index c19d48f3cd50e..1a410c518e9b1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -226,7 +226,7 @@ public static ExpressionEvaluator.Factory evaluator(Expression e) { } protected final Page row(List values) { - return new Page(BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); + return new Page(1, BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); } /** @@ -249,7 +249,8 @@ protected final void assertResolveTypeValid(Expression expression, DataType expe } public final void testEvaluate() { - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); logger.info( "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) ); @@ -355,13 +356,14 @@ protected Matcher allNullsMatcher() { } private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { - assumeTrue("can only run on representable types", testCase.allTypesAreRepresentable()); - assumeTrue("must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); + assumeTrue("Must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); - Block[] manyPositionsBlocks = new Block[data.size()]; + Block[] manyPositionsBlocks = new Block[Math.toIntExact(data.stream().filter(d -> d.isForceLiteral() == false).count())]; Set nullPositions = insertNulls ? IntStream.range(0, positions).filter(i -> randomBoolean()).mapToObj(Integer::valueOf).collect(Collectors.toSet()) : Set.of(); @@ -369,8 +371,12 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con nullPositions = Set.of(); } try { - for (int b = 0; b < data.size(); b++) { - ElementType elementType = PlannerUtils.toElementType(data.get(b).type()); + int b = 0; + for (TestCaseSupplier.TypedData d : data) { + if (d.isForceLiteral()) { + continue; + } + ElementType elementType = PlannerUtils.toElementType(d.type()); try (Block.Builder builder = elementType.newBlockBuilder(positions, inputBlockFactory)) { for (int p = 0; p < positions; p++) { if (nullPositions.contains(p)) { @@ -381,9 +387,13 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } manyPositionsBlocks[b] = builder.build(); } + b++; } Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); - try (ExpressionEvaluator eval = evaluator(expression).get(context); Block block = eval.eval(new Page(manyPositionsBlocks))) { + try ( + ExpressionEvaluator eval = evaluator(expression).get(context); + Block block = eval.eval(new Page(positions, manyPositionsBlocks)) + ) { for (int p = 0; p < positions; p++) { if (nullPositions.contains(p)) { assertThat(toJavaObject(block, p), allNullsMatcher()); @@ -408,8 +418,8 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con // TODO cranky time public void testSimpleWithNulls() { // TODO replace this with nulls inserted into the test case like anyNullIsNull - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); List simpleData = testCase.getDataValues(); try (EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext())) { BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); @@ -450,8 +460,9 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo } public final void testEvaluateInManyThreads() throws ExecutionException, InterruptedException { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); + assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); int count = 10_000; int threads = 5; var evalSupplier = evaluator(buildFieldExpression(testCase)); @@ -481,8 +492,8 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru } public final void testEvaluatorToString() { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); var factory = evaluator(buildFieldExpression(testCase)); try (ExpressionEvaluator ev = factory.get(driverContext())) { assertThat(ev.toString(), testCase.evaluatorToString()); @@ -490,8 +501,8 @@ public final void testEvaluatorToString() { } public final void testFactoryToString() { - assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); var factory = evaluator(buildFieldExpression(testCase)); assertThat(factory.toString(), testCase.evaluatorToString()); } @@ -524,7 +535,6 @@ public final void testFold() { } public void testSerializationOfSimple() { - assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); assertSerialization(buildFieldExpression(testCase)); } @@ -594,7 +604,7 @@ protected static List anyNullIsNull(boolean entirelyNullPreser (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false && nullValueDataType == DataTypes.NULL && original.getData().size() == 1 ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ); } @@ -603,7 +613,7 @@ public interface ExpectedType { } public interface ExpectedEvaluatorToString { - Matcher evaluatorToString(int nullPosition, Matcher original); + Matcher evaluatorToString(int nullPosition, TestCaseSupplier.TypedData nullData, Matcher original); } protected static List anyNullIsNull( @@ -635,10 +645,11 @@ protected static List anyNullIsNull( TestCaseSupplier.TypedData od = oc.getData().get(i); return i == finalNullPosition ? od.forceValueToNull() : od; }).toList(); + TestCaseSupplier.TypedData nulledData = oc.getData().get(finalNullPosition); return new TestCaseSupplier.TestCase( data, - evaluatorToString.evaluatorToString(finalNullPosition, oc.evaluatorToString()), - expectedType.expectedType(finalNullPosition, oc.getData().get(finalNullPosition).type(), oc), + evaluatorToString.evaluatorToString(finalNullPosition, nulledData, oc.evaluatorToString()), + expectedType.expectedType(finalNullPosition, nulledData.type(), oc), nullValue(), null, oc.getExpectedTypeError(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 7cfe950bb3144..d9261a1658969 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -92,7 +92,7 @@ public static List stringCases( expected, lhsSuppliers, rhsSuppliers, - evaluatorToString, + (lhs, rhs) -> equalTo(evaluatorToString.apply(lhs, rhs)), (lhs, rhs) -> warnings, suppliers, expectedType, @@ -202,16 +202,18 @@ public static List forBinaryCastingToDouble( (l, r) -> expected.apply(((Number) l).doubleValue(), ((Number) r).doubleValue()), lhsSuppliers, rhsSuppliers, - (lhsType, rhsType) -> name - + "[" - + lhsName - + "=" - + castToDoubleEvaluator("Attribute[channel=0]", lhsType) - + ", " - + rhsName - + "=" - + castToDoubleEvaluator("Attribute[channel=1]", rhsType) - + "]", + (lhsType, rhsType) -> equalTo( + name + + "[" + + lhsName + + "=" + + castToDoubleEvaluator("Attribute[channel=0]", lhsType) + + ", " + + rhsName + + "=" + + castToDoubleEvaluator("Attribute[channel=1]", rhsType) + + "]" + ), (lhs, rhs) -> warnings, suppliers, DataTypes.DOUBLE, @@ -224,7 +226,7 @@ public static void casesCrossProduct( BinaryOperator expected, List lhsSuppliers, List rhsSuppliers, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, BiFunction> warnings, List suppliers, DataType expectedType, @@ -243,7 +245,7 @@ public static void casesCrossProduct( public static TestCaseSupplier testCaseSupplier( TypedDataSupplier lhsSupplier, TypedDataSupplier rhsSupplier, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, DataType expectedType, BinaryOperator expectedValue ) { @@ -253,7 +255,7 @@ public static TestCaseSupplier testCaseSupplier( private static TestCaseSupplier testCaseSupplier( TypedDataSupplier lhsSupplier, TypedDataSupplier rhsSupplier, - BiFunction evaluatorToString, + BiFunction> evaluatorToString, DataType expectedType, BinaryOperator expectedValue, BiFunction> warnings @@ -366,7 +368,7 @@ public static List forBinaryComparisonWithWidening( (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), allowRhsZero), getSuppliersForNumericType(rhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), allowRhsZero), - evaluatorToString, + (lhs, rhs) -> equalTo(evaluatorToString.apply(lhs, rhs)), warnings, suppliers, DataTypes.BOOLEAN, @@ -391,16 +393,18 @@ public static List forBinaryWithWidening( for (DataType rhsType : numericTypes) { DataType expected = widen(lhsType, rhsType); NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + lhsName - + "=" - + getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + rhsName - + "=" - + getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + lhsName + + "=" + + getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + rhsName + + "=" + + getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); casesCrossProduct( (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), @@ -429,26 +433,22 @@ public static List forBinaryNotCasting( boolean symmetric ) { return forBinaryNotCasting( - name, - lhsName, - rhsName, expected, expectedType, lhsSuppliers, rhsSuppliers, + equalTo(name + "[" + lhsName + "=Attribute[channel=0], " + rhsName + "=Attribute[channel=1]]"), (lhs, rhs) -> warnings, symmetric ); } public static List forBinaryNotCasting( - String name, - String lhsName, - String rhsName, BinaryOperator expected, DataType expectedType, List lhsSuppliers, List rhsSuppliers, + Matcher evaluatorToString, BiFunction> warnings, boolean symmetric ) { @@ -457,7 +457,7 @@ public static List forBinaryNotCasting( expected, lhsSuppliers, rhsSuppliers, - (lhsType, rhsType) -> name + "[" + lhsName + "=Attribute[channel=0], " + rhsName + "=Attribute[channel=1]]", + (lhsType, rhsType) -> evaluatorToString, warnings, suppliers, expectedType, @@ -1006,7 +1006,7 @@ public static List dateCases() { public static List datePeriodCases() { return List.of( - new TypedDataSupplier("", () -> Period.ZERO, EsqlDataTypes.DATE_PERIOD), + new TypedDataSupplier("", () -> Period.ZERO, EsqlDataTypes.DATE_PERIOD, true), new TypedDataSupplier( "", () -> Period.of( @@ -1014,18 +1014,20 @@ public static List datePeriodCases() { ESTestCase.randomIntBetween(-13, 13), ESTestCase.randomIntBetween(-32, 32) ), - EsqlDataTypes.DATE_PERIOD + EsqlDataTypes.DATE_PERIOD, + true ) ); } public static List timeDurationCases() { return List.of( - new TypedDataSupplier("", () -> Duration.ZERO, EsqlDataTypes.TIME_DURATION), + new TypedDataSupplier("", () -> Duration.ZERO, EsqlDataTypes.TIME_DURATION, true), new TypedDataSupplier( "", () -> Duration.ofMillis(ESTestCase.randomLongBetween(-604800000L, 604800000L)), // plus/minus 7 days - EsqlDataTypes.TIME_DURATION + EsqlDataTypes.TIME_DURATION, + true ) ); } @@ -1237,7 +1239,7 @@ public static class TestCase { private final String[] expectedWarnings; private final String expectedTypeError; - private final boolean allTypesAreRepresentable; + private final boolean canBuildEvaluator; private final Class foldingExceptionClass; private final String foldingExceptionMessage; @@ -1271,7 +1273,7 @@ public static TestCase typeError(List data, String expectedTypeError) this.matcher = matcher; this.expectedWarnings = expectedWarnings; this.expectedTypeError = expectedTypeError; - this.allTypesAreRepresentable = data.stream().allMatch(d -> EsqlDataTypes.isRepresentable(d.type)); + this.canBuildEvaluator = data.stream().allMatch(d -> d.forceLiteral || EsqlDataTypes.isRepresentable(d.type)); this.foldingExceptionClass = foldingExceptionClass; this.foldingExceptionMessage = foldingExceptionMessage; } @@ -1297,11 +1299,11 @@ public List getDataAsLiterals() { } public List getDataValues() { - return data.stream().map(t -> t.data()).collect(Collectors.toList()); + return data.stream().filter(d -> d.forceLiteral == false).map(TypedData::data).collect(Collectors.toList()); } - public boolean allTypesAreRepresentable() { - return allTypesAreRepresentable; + public boolean canBuildEvaluator() { + return canBuildEvaluator; } public Matcher getMatcher() { @@ -1428,6 +1430,13 @@ public TypedData forceLiteral() { return new TypedData(data, type, name, true); } + /** + * Has this been forced to a {@link Literal}. + */ + public boolean isForceLiteral() { + return forceLiteral; + } + /** * Return a {@link TypedData} that always returns {@code null} for it's * value without modifying anything else in the supplier. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java index cc2714dc31dca..a73b4a0dfa557 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java @@ -46,14 +46,16 @@ public static Iterable parameters() { "fixed date with period", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-01-01T00:00:00.00Z"), EsqlDataTypes.DATE_PERIOD, - Period.ofYears(1) + Period.ofYears(1), + "[YEAR_OF_CENTURY in Z][fixed to midnight]" ); dateCasesWithSpan( suppliers, "fixed date with duration", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), EsqlDataTypes.TIME_DURATION, - Duration.ofDays(1L) + Duration.ofDays(1L), + "[86400000 in Z][fixed]" ); numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); numberCasesWithSpan(suppliers, "fixed long with span", DataTypes.LONG, () -> 100L); @@ -68,7 +70,7 @@ public static Iterable parameters() { (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") + (nullPosition, nullData, original) -> nullPosition == 0 ? original : equalTo("LiteralsEvaluator[lit=null]") ) ); } @@ -112,7 +114,8 @@ private static void dateCasesWithSpan( String name, LongSupplier date, DataType spanType, - Object span + Object span, + String spanStr ) { suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, spanType), () -> { List args = new ArrayList<>(); @@ -120,7 +123,7 @@ private static void dateCasesWithSpan( args.add(new TestCaseSupplier.TypedData(span, spanType, "buckets").forceLiteral()); return new TestCaseSupplier.TestCase( args, - "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding" + spanStr + "]", DataTypes.DATETIME, dateResultsMatcher(args) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index 30460828aaa91..097f3c1038cfb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -64,7 +64,7 @@ public static Iterable parameters() { suppliers = anyNullIsNull( suppliers, (nullPosition, nullValueDataType, original) -> nullPosition == 0 ? nullValueDataType : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ); suppliers.add(new TestCaseSupplier("two doubles", List.of(DataTypes.DOUBLE, DataTypes.INTEGER), () -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java index 260813bacb8f2..bf16344847bde 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java @@ -48,7 +48,7 @@ public static Iterable parameters() { (nullPosition, nullValueDataType, original) -> nullPosition == 0 && nullValueDataType == DataTypes.NULL ? DataTypes.NULL : original.expectedType(), - (nullPosition, original) -> original + (nullPosition, nullData, original) -> original ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java index e905f85141f31..64e03dec6b064 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; +import org.hamcrest.Matcher; import java.io.IOException; import java.lang.reflect.Field; @@ -28,6 +29,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; +import static org.hamcrest.Matchers.equalTo; public abstract class SpatialRelatesFunctionTestCase extends AbstractFunctionTestCase { @@ -188,11 +190,11 @@ private static DataType pickSpatialType(DataType leftType, DataType rightType) { } } - private static String spatialEvaluatorString(DataType leftType, DataType rightType) { + private static Matcher spatialEvaluatorString(DataType leftType, DataType rightType) { String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; - return getFunctionClassName() - + crsType - + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; + return equalTo( + getFunctionClassName() + crsType + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]" + ); } private static int countGeo(DataType... types) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 2daf2688d6631..25ccd91f43d07 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -33,8 +33,10 @@ import static org.elasticsearch.xpack.ql.type.DateUtils.asDateTime; import static org.elasticsearch.xpack.ql.type.DateUtils.asMillis; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; public class AddTests extends AbstractFunctionTestCase { public AddTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -94,27 +96,23 @@ public static Iterable parameters() { suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - "No evaluator, the tests only trigger the folding code since Period is not representable", - "lhs", - "rhs", (lhs, rhs) -> ((Period) lhs).plus((Period) rhs), EsqlDataTypes.DATE_PERIOD, TestCaseSupplier.datePeriodCases(), TestCaseSupplier.datePeriodCases(), - List.of(), + startsWith("LiteralsEvaluator[lit="), // lhs and rhs have to be literals, so we fold into a literal + (lhs, rhs) -> List.of(), true ) ); suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - "No evaluator, the tests only trigger the folding code since Duration is not representable", - "lhs", - "rhs", (lhs, rhs) -> ((Duration) lhs).plus((Duration) rhs), EsqlDataTypes.TIME_DURATION, TestCaseSupplier.timeDurationCases(), TestCaseSupplier.timeDurationCases(), - List.of(), + startsWith("LiteralsEvaluator[lit="), // lhs and rhs have to be literals, so we fold into a literal + (lhs, rhs) -> List.of(), true ) ); @@ -139,28 +137,22 @@ public static Iterable parameters() { }; suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - // TODO: There is an evaluator for Datetime + Period, so it should be tested. Similarly below. - "No evaluator, the tests only trigger the folding code since Period is not representable", - "lhs", - "rhs", result, DataTypes.DATETIME, TestCaseSupplier.dateCases(), TestCaseSupplier.datePeriodCases(), + startsWith("AddDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), warnings, true ) ); suppliers.addAll( TestCaseSupplier.forBinaryNotCasting( - // TODO: There is an evaluator for Datetime + Duration, so it should be tested. Similarly above. - "No evaluator, the tests only trigger the folding code since Duration is not representable", - "lhs", - "rhs", result, DataTypes.DATETIME, TestCaseSupplier.dateCases(), TestCaseSupplier.timeDurationCases(), + startsWith("AddDatetimesEvaluator[datetime=Attribute[channel=0], temporalAmount="), warnings, true ) @@ -192,7 +184,12 @@ public static Iterable parameters() { // Datetime tests are split in two, depending on their permissiveness of null-injection, which cannot happen "automatically" for // Datetime + Period/Duration, since the expression will take the non-null arg's type. - suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AddTests::addErrorMessageString); + suppliers = anyNullIsNull( + suppliers, + (nullPosition, nullType, original) -> original.expectedType(), + (nullPosition, nullData, original) -> nullData.isForceLiteral() ? equalTo("LiteralsEvaluator[lit=null]") : original + ); + suppliers = errorsForCasesWithoutExamples(suppliers, AddTests::addErrorMessageString); // Cases that should generate warnings suppliers.addAll(List.of(new TestCaseSupplier("MV", () -> { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index f3348ab2dcba5..eb29a7b5ce06e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; import java.math.BigInteger; import java.util.ArrayList; @@ -24,6 +25,8 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.hamcrest.Matchers.equalTo; + public class DivTests extends AbstractFunctionTestCase { public DivTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -116,16 +119,18 @@ public static Iterable parameters() { for (DataType rhsType : numericTypes) { DataType expected = TestCaseSupplier.widen(lhsType, rhsType); TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + "lhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + "rhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + "lhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + "rhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); TestCaseSupplier.casesCrossProduct( (l1, r1) -> expectedTypeStuff.expected().apply((Number) l1, (Number) r1), TestCaseSupplier.getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index a70f2c7885257..bc6d6dd97c3ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; import java.math.BigInteger; import java.util.ArrayList; @@ -24,6 +25,8 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.hamcrest.Matchers.equalTo; + public class ModTests extends AbstractFunctionTestCase { public ModTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -103,16 +106,18 @@ public static Iterable parameters() { for (DataType rhsType : numericTypes) { DataType expected = TestCaseSupplier.widen(lhsType, rhsType); TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + "lhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + "rhs" - + "=" - + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; + BiFunction> evaluatorToString = (lhs, rhs) -> equalTo( + expectedTypeStuff.evaluatorName() + + "[" + + "lhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + "rhs" + + "=" + + TestCaseSupplier.getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]" + ); TestCaseSupplier.casesCrossProduct( (l1, r1) -> expectedTypeStuff.expected().apply((Number) l1, (Number) r1), TestCaseSupplier.getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index f5e5e9f406f22..c65f4eed2de70 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -155,7 +155,7 @@ public void testEdgeCases() { } private Object process(Object val) { - if (testCase.allTypesAreRepresentable()) { + if (testCase.canBuildEvaluator()) { Neg neg = new Neg(Source.EMPTY, field("val", typeOf(val))); try (Block block = evaluator(neg).get(driverContext()).eval(row(List.of(val)))) { return toJavaObject(block, 0); From fb84a22a754eca18d23d1375888d1469dfc1d984 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Tue, 7 May 2024 16:15:52 -0400 Subject: [PATCH 045/117] Fix for missed renaming in upgrade tests (#108389) Some muted tests were missed [in a recent PR](https://github.com/elastic/elasticsearch/pull/107691), this change updates the ones in the muted upgrade tests --- .../AzureOpenAiServiceUpgradeIT.java | 8 +++---- .../application/CohereServiceUpgradeIT.java | 22 +++++++++---------- .../HuggingFaceServiceUpgradeIT.java | 16 +++++++------- .../application/OpenAiServiceUpgradeIT.java | 16 +++++++------- 4 files changed, 31 insertions(+), 31 deletions(-) diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java index db5e62a367ab3..d475fd099d4ac 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/AzureOpenAiServiceUpgradeIT.java @@ -59,16 +59,16 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(OpenAiServiceUpgradeIT.embeddingResponse())); put(oldClusterId, embeddingConfig(getUrl(openAiEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("azureopenai", configs.get(0).get("service")); assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); // Inference on old cluster model @@ -77,7 +77,7 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(OpenAiServiceUpgradeIT.embeddingResponse())); put(upgradedClusterId, embeddingConfig(getUrl(openAiEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); // Inference on the new config diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java index c73827dba2cbb..c889d8f9b312a 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/CohereServiceUpgradeIT.java @@ -71,7 +71,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); put(oldClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); assertThat(configs, hasSize(1)); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); @@ -83,7 +83,7 @@ public void testCohereEmbeddings() throws IOException { assertEmbeddingInference(oldClusterIdInt8, CohereEmbeddingType.BYTE); assertEmbeddingInference(oldClusterIdFloat, CohereEmbeddingType.FLOAT); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "embed-english-light-v3.0")); @@ -91,7 +91,7 @@ public void testCohereEmbeddings() throws IOException { // An upgraded node will report the embedding type as byte, an old node int8 assertThat(embeddingType, Matchers.is(oneOf("int8", "byte"))); - configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdFloat).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdFloat).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "float")); @@ -99,7 +99,7 @@ public void testCohereEmbeddings() throws IOException { assertEmbeddingInference(oldClusterIdFloat, CohereEmbeddingType.FLOAT); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "embed-english-light-v3.0")); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); @@ -116,7 +116,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); put(upgradedClusterIdByte, embeddingConfigByte(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdByte).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdByte).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); @@ -129,7 +129,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); put(upgradedClusterIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdInt8).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdInt8).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "byte")); // int8 rewritten to byte @@ -141,7 +141,7 @@ public void testCohereEmbeddings() throws IOException { cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); put(upgradedClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdFloat).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdFloat).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "float")); @@ -179,12 +179,12 @@ public void testRerank() throws IOException { if (isOldCluster()) { put(oldClusterId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertRerank(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "rerank-english-v3.0")); @@ -195,7 +195,7 @@ public void testRerank() throws IOException { } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("models"); + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "rerank-english-v3.0")); @@ -206,7 +206,7 @@ public void testRerank() throws IOException { // New endpoint put(upgradedClusterId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); - configs = (List>) get(upgradedClusterId).get("models"); + configs = (List>) get(upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertRerank(upgradedClusterId); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java index 718678f97f37f..899a02776195d 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/HuggingFaceServiceUpgradeIT.java @@ -63,18 +63,18 @@ public void testHFEmbeddings() throws IOException { embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(oldClusterId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); // Inference on old cluster model @@ -83,7 +83,7 @@ public void testHFEmbeddings() throws IOException { embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(upgradedClusterId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(upgradedClusterId); @@ -110,17 +110,17 @@ public void testElser() throws IOException { if (isOldCluster()) { put(oldClusterId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertElser(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); assertElser(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("hugging_face", configs.get(0).get("service")); var taskSettings = (Map) configs.get(0).get("task_settings"); assertThat(taskSettings.keySet(), empty()); @@ -129,7 +129,7 @@ public void testElser() throws IOException { // New endpoint put(upgradedClusterId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); - configs = (List>) get(upgradedClusterId).get("models"); + configs = (List>) get(upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertElser(upgradedClusterId); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java index 4e8e1c845b070..bfdcb0e0d5ed4 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java @@ -65,12 +65,12 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(oldClusterId, inferenceConfig, TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); assertEquals("openai", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); var taskSettings = (Map) configs.get(0).get("task_settings"); @@ -80,7 +80,7 @@ public void testOpenAiEmbeddings() throws IOException { assertEmbeddingInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("models"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); // model id is moved to service settings assertThat(serviceSettings, hasEntry("model_id", "text-embedding-ada-002")); @@ -94,7 +94,7 @@ public void testOpenAiEmbeddings() throws IOException { openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); put(upgradedClusterId, inferenceConfig, TaskType.TEXT_EMBEDDING); - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertEmbeddingInference(upgradedClusterId); @@ -122,12 +122,12 @@ public void testOpenAiCompletions() throws IOException { if (isOldCluster()) { put(oldClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); assertThat(configs, hasSize(1)); assertCompletionInference(oldClusterId); } else if (isMixedCluster()) { - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); assertEquals("openai", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "gpt-4")); @@ -137,7 +137,7 @@ public void testOpenAiCompletions() throws IOException { assertCompletionInference(oldClusterId); } else if (isUpgradedCluster()) { // check old cluster model - var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); + var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("endpoints"); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "gpt-4")); var taskSettings = (Map) configs.get(0).get("task_settings"); @@ -146,7 +146,7 @@ public void testOpenAiCompletions() throws IOException { assertCompletionInference(oldClusterId); put(upgradedClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); - configs = (List>) get(TaskType.COMPLETION, upgradedClusterId).get("models"); + configs = (List>) get(TaskType.COMPLETION, upgradedClusterId).get("endpoints"); assertThat(configs, hasSize(1)); // Inference on the new config From bc49a1fbf312333c4a48018651669adff4be52cf Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Wed, 8 May 2024 07:01:17 +0200 Subject: [PATCH 046/117] Update YAML REST docs: Synthetic version features are available up to 8.15.0 (#108375) --- .../yamlRestTest/resources/rest-api-spec/test/README.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc index 0fcedece97f01..baec8169b4f76 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc @@ -143,7 +143,7 @@ The `cluster_features` field can either be a string or an array of strings. [[synthetic_cluster_features]] Note: In order to smoothen the transition from version checks to cluster feature checks, a REST-test specific -synthetic cluster feature named `gte_v{VERSION}` is available for all release versions until including 8.14.0. +synthetic cluster feature named `gte_v{VERSION}` is available for all release versions up to 8.15.0. For instance, `gte_v8.12.2` would be available for all release versions greater than or equal to 8.12.2. [[skip_known_issues]] From 1db38407d97cd51476d8a5d5022c360ad1d850fc Mon Sep 17 00:00:00 2001 From: Andrew Wilkins Date: Wed, 8 May 2024 14:48:31 +0800 Subject: [PATCH 047/117] ecs@mappings: reduce scope for ecs_geo_point (#108349) * ecs@mappings: reduce scope for ecs_geo_point Match on the path `*.geo.location`, rather than `location` or `*.location`. All ECS fields match `*.geo.location`, and this reduces the chances of matching non-ECS fields that happen to end in "location". --- docs/changelog/108349.yaml | 6 ++++++ .../datastreams/EcsLogsDataStreamIT.java | 17 ++++++++++------- .../datastreams/LogsDataStreamIT.java | 18 +++++------------- .../src/main/resources/ecs@mappings.json | 3 +-- .../stack/LegacyStackTemplateRegistry.java | 2 +- .../xpack/stack/StackTemplateRegistry.java | 2 +- 6 files changed, 24 insertions(+), 24 deletions(-) create mode 100644 docs/changelog/108349.yaml diff --git a/docs/changelog/108349.yaml b/docs/changelog/108349.yaml new file mode 100644 index 0000000000000..6d9ea3d658dca --- /dev/null +++ b/docs/changelog/108349.yaml @@ -0,0 +1,6 @@ +pr: 108349 +summary: "Ecs@mappings: reduce scope for `ecs_geo_point`" +area: Data streams +type: bug +issues: + - 108338 diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java index 3802d572e04dd..5fe72c38078ee 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java @@ -201,12 +201,12 @@ public void testGeneralMockupEcsMappings() throws Exception { "host": { "cpu": { "usage": 0.68 - } - }, - "geo": { - "location": { - "lon": -73.614830, - "lat": 45.505918 + }, + "geo": { + "location": { + "lon": -73.614830, + "lat": 45.505918 + } } }, "data_stream": { @@ -414,7 +414,10 @@ public void testGeneralMockupEcsMappings() throws Exception { getValueFromPath(properties, List.of("host", "properties", "cpu", "properties", "usage", "scaling_factor")), is(1000.0) ); - assertThat(getValueFromPath(properties, List.of("geo", "properties", "location", "type")), is("geo_point")); + assertThat( + getValueFromPath(properties, List.of("host", "properties", "geo", "properties", "location", "type")), + is("geo_point") + ); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "dataset", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "namespace", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream", "properties", "type", "type")), is("constant_keyword")); diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java index 2370cca08b23e..79d33a95c4709 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java @@ -463,7 +463,6 @@ public void testNoSubobjects() throws Exception { { "@timestamp": "2023-06-12", "start_timestamp": "2023-06-08", - "location" : "POINT (-71.34 41.12)", "test": "flattened", "test.start_timestamp": "not a date", "test.start-timestamp": "not a date", @@ -497,7 +496,7 @@ public void testNoSubobjects() throws Exception { "vulnerability.score.version": "2.0", "vulnerability.textual_score": "bad", "host.cpu.usage": 0.68, - "geo.location": [-73.614830, 45.505918], + "host.geo.location": [-73.614830, 45.505918], "data_stream.dataset": "nginx.access", "data_stream.namespace": "production", "data_stream.custom": "whatever", @@ -521,8 +520,7 @@ public void testNoSubobjects() throws Exception { }, "fields": [ "data_stream.type", - "location", - "geo.location", + "host.geo.location", "test.start-timestamp", "test.start_timestamp", "vulnerability.textual_score" @@ -537,14 +535,9 @@ public void testNoSubobjects() throws Exception { // verify that data_stream.type has the correct constant_keyword value assertThat(fields.get("data_stream.type"), is(List.of("logs"))); // verify geo_point subfields evaluation - assertThat(((List>) fields.get("location")).get(0).get("type"), is("Point")); - List coordinates = ((List>>) fields.get("location")).get(0).get("coordinates"); - assertThat(coordinates.size(), is(2)); - assertThat(coordinates.get(0), equalTo(-71.34)); - assertThat(coordinates.get(1), equalTo(41.12)); - List geoLocation = (List) fields.get("geo.location"); + List geoLocation = (List) fields.get("host.geo.location"); assertThat(((Map) geoLocation.get(0)).get("type"), is("Point")); - coordinates = ((Map>) geoLocation.get(0)).get("coordinates"); + List coordinates = ((Map>) geoLocation.get(0)).get("coordinates"); assertThat(coordinates.size(), is(2)); assertThat(coordinates.get(0), equalTo(-73.614830)); assertThat(coordinates.get(1), equalTo(45.505918)); @@ -612,8 +605,7 @@ public void testNoSubobjects() throws Exception { assertThat(getValueFromPath(properties, List.of("vulnerability.textual_score", "type")), is("float")); assertThat(getValueFromPath(properties, List.of("host.cpu.usage", "type")), is("scaled_float")); assertThat(getValueFromPath(properties, List.of("host.cpu.usage", "scaling_factor")), is(1000.0)); - assertThat(getValueFromPath(properties, List.of("location", "type")), is("geo_point")); - assertThat(getValueFromPath(properties, List.of("geo.location", "type")), is("geo_point")); + assertThat(getValueFromPath(properties, List.of("host.geo.location", "type")), is("geo_point")); assertThat(getValueFromPath(properties, List.of("data_stream.dataset", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream.namespace", "type")), is("constant_keyword")); assertThat(getValueFromPath(properties, List.of("data_stream.type", "type")), is("constant_keyword")); diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json index 3eae6c1fa4f5a..1951431859ffe 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json @@ -189,8 +189,7 @@ "type": "geo_point" }, "path_match": [ - "location", - "*.location" + "*.geo.location" ] } }, diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java index 0612648078edc..b2dc04c1178e4 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -43,7 +43,7 @@ public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 4; + public static final int REGISTRY_VERSION = 5; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 3930cfe6cd941..4fdb2d05c5326 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -47,7 +47,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // The stack template registry version. This number must be incremented when we make changes // to built-in templates. - public static final int REGISTRY_VERSION = 9; + public static final int REGISTRY_VERSION = 10; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting STACK_TEMPLATES_ENABLED = Setting.boolSetting( From 4d9c2df47e4eb35e11dc93661d073b706f832127 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Wed, 8 May 2024 09:23:26 +0200 Subject: [PATCH 048/117] [Bugfix] Connector API - fix status serialisation issue in termquery (#108365) --- docs/changelog/108365.yaml | 5 +++++ .../application/connector/ConnectorIndexService.java | 4 ++-- .../syncjob/ConnectorSyncJobIndexService.java | 11 +++++++---- 3 files changed, 14 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/108365.yaml diff --git a/docs/changelog/108365.yaml b/docs/changelog/108365.yaml new file mode 100644 index 0000000000000..d94486e2f3ea7 --- /dev/null +++ b/docs/changelog/108365.yaml @@ -0,0 +1,5 @@ +pr: 108365 +summary: "[Bugfix] Connector API - fix status serialisation issue in termquery" +area: Application +type: bug +issues: [] diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 99240d6b6d49d..333ef30f078e6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -823,7 +823,7 @@ public void updateConnectorNative(UpdateConnectorNativeAction.Request request, A Connector.IS_NATIVE_FIELD.getPreferredName(), request.isNative(), Connector.STATUS_FIELD.getPreferredName(), - ConnectorStatus.CONFIGURED + ConnectorStatus.CONFIGURED.toString() ) ) @@ -969,7 +969,7 @@ public void updateConnectorServiceType(UpdateConnectorServiceTypeAction.Request Connector.SERVICE_TYPE_FIELD.getPreferredName(), request.getServiceType(), Connector.STATUS_FIELD.getPreferredName(), - newStatus + newStatus.toString() ) ) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index f5ab8309e27e7..4316b4bccd9bc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -266,7 +266,7 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener Date: Wed, 8 May 2024 10:25:23 +0300 Subject: [PATCH 049/117] Fix Netty4HttpServerTransportTests testChannelAcceptorCannotTamperThreadContext (#108374) Fix Netty4HttpServerTransportTests testChannelAcceptorCannotTamperThreadContext --- .../http/netty4/Netty4HttpServerTransportTests.java | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index e202d99218144..d2be4212cf41e 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -568,11 +568,9 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } }; // there's only one netty worker thread that's reused across client requests - Settings settings = Settings.builder() - .put(Netty4Plugin.WORKER_COUNT.getKey(), 1) + Settings settings = createBuilderWithPort().put(Netty4Plugin.WORKER_COUNT.getKey(), 1) .put(Netty4Plugin.SETTING_HTTP_WORKER_COUNT.getKey(), 0) .build(); - NioEventLoopGroup group = new NioEventLoopGroup(); AtomicBoolean acceptChannel = new AtomicBoolean(); try ( Netty4HttpServerTransport transport = new Netty4HttpServerTransport( @@ -601,9 +599,9 @@ public boolean test(String profile, InetSocketAddress peerAddress) { ) { transport.start(); int nRetries = randomIntBetween(7, 9); - for (int i = 0; i < nRetries; i++) { - acceptChannel.set(randomBoolean()); - try (Netty4HttpClient client = new Netty4HttpClient()) { + try (Netty4HttpClient client = new Netty4HttpClient()) { + for (int i = 0; i < nRetries; i++) { + acceptChannel.set(randomBoolean()); var responses = client.get(randomFrom(transport.boundAddress().boundAddresses()).address(), "/test/url"); try { if (acceptChannel.get()) { @@ -619,8 +617,6 @@ public boolean test(String profile, InetSocketAddress peerAddress) { } } } - } finally { - group.shutdownGracefully().await(); } } From 678886a2b80b33ec78bc1dca20562b21037df59c Mon Sep 17 00:00:00 2001 From: Andrew Wilkins Date: Wed, 8 May 2024 15:34:13 +0800 Subject: [PATCH 050/117] apm-data: improve default pipeline performance (#108396) * apm-data: ingest pipeline per data stream --- docs/changelog/108396.yaml | 6 + .../logs-apm.app@template.yaml | 2 +- .../logs-apm.error@template.yaml | 2 +- .../metrics-apm.app@template.yaml | 2 +- .../metrics-apm.internal@template.yaml | 2 +- ...-apm.service_destination.10m@template.yaml | 2 +- ...s-apm.service_destination.1m@template.yaml | 2 +- ...-apm.service_destination.60m@template.yaml | 2 +- ...rics-apm.service_summary.10m@template.yaml | 2 +- ...trics-apm.service_summary.1m@template.yaml | 2 +- ...rics-apm.service_summary.60m@template.yaml | 2 +- ...-apm.service_transaction.10m@template.yaml | 2 +- ...s-apm.service_transaction.1m@template.yaml | 2 +- ...-apm.service_transaction.60m@template.yaml | 2 +- .../metrics-apm.transaction.10m@template.yaml | 2 +- .../metrics-apm.transaction.1m@template.yaml | 2 +- .../metrics-apm.transaction.60m@template.yaml | 2 +- .../traces-apm.rum@template.yaml | 2 +- .../index-templates/traces-apm@template.yaml | 2 +- .../apm@default-pipeline.yaml | 56 ----- .../logs-apm.app@default-pipeline.yaml | 22 ++ .../logs-apm.error@default-pipeline.yaml | 22 ++ .../metrics-apm.app@default-pipeline.yaml | 22 ++ ...metrics-apm.internal@default-pipeline.yaml | 38 ++++ ....service_destination@default-pipeline.yaml | 23 ++ ...-apm.service_summary@default-pipeline.yaml | 23 ++ ....service_transaction@default-pipeline.yaml | 23 ++ ...rics-apm.transaction@default-pipeline.yaml | 23 ++ .../traces-apm.rum@default-pipeline.yaml | 22 ++ .../traces-apm@default-pipeline.yaml | 22 ++ .../src/main/resources/resources.yaml | 24 +- .../APMIndexTemplateRegistryTests.java | 30 +++ .../test/30_custom_pipelines.yml | 205 ++++++++++++++++-- 33 files changed, 507 insertions(+), 90 deletions(-) create mode 100644 docs/changelog/108396.yaml delete mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml create mode 100644 x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml diff --git a/docs/changelog/108396.yaml b/docs/changelog/108396.yaml new file mode 100644 index 0000000000000..63937646b755c --- /dev/null +++ b/docs/changelog/108396.yaml @@ -0,0 +1,6 @@ +pr: 108396 +summary: "Apm-data: improve default pipeline performance" +area: Data streams +type: enhancement +issues: + - 108290 diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml index 3d9c1490e5a86..6c5d991621315 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml @@ -20,5 +20,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: logs-apm.app@default-pipeline final_pipeline: apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml index 4adcf125b2df9..6373363774602 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml @@ -27,5 +27,5 @@ template: value: error settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: logs-apm.error@default-pipeline final_pipeline: apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml index c2233469110f8..a8f3e8a4c99e3 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml @@ -22,5 +22,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.app@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml index 3d6d05c58e780..1aa06a361b722 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml @@ -23,7 +23,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.internal@default-pipeline final_pipeline: metrics-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml index f234b60b1a6ec..729110457f53e 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml index aa4f212532e56..0e18d1cd179ef 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml index 9b1a26486f482..d349c62e2255c 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_destination@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml index c37ec93651d9d..f71a4c70abde7 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml index 3a99bc8472c66..218fbb2eaac87 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml index d829967f7eddf..9421b8e2f1fce 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_summary@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml index bc21b35d4777f..5e8b7e94673f4 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml index 87a1e254baea7..c51bd79c6513d 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml index b45ce0ec0fad7..22e56fd7cabca 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.service_transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml index 51d3c90cb4af8..6b4102bb673b8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml index 8825a93db28dc..7b10125fbce99 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml @@ -24,5 +24,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml index e6657fbfe5d28..62359a8729f08 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml @@ -25,5 +25,5 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: metrics-apm.transaction@default-pipeline final_pipeline: metrics-apm@pipeline diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml index 174aec8c5515a..4f4d9a6a7e7d8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml @@ -23,7 +23,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: traces-apm.rum@default-pipeline final_pipeline: traces-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml index de9c47dfd3f1b..e5c2ef8d57471 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml @@ -22,7 +22,7 @@ ignore_missing_component_templates: template: settings: index: - default_pipeline: apm@default-pipeline + default_pipeline: traces-apm@default-pipeline final_pipeline: traces-apm@pipeline mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml deleted file mode 100644 index 65d8840e8f713..0000000000000 --- a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/apm@default-pipeline.yaml +++ /dev/null @@ -1,56 +0,0 @@ ---- -version: ${xpack.apmdata.template.version} -_meta: - managed: true -description: | - Built-in default ingest pipeline for all APM data streams. - - This pipeline exists purely for routing, which cannot be - performed in a final pipeline, and for invoking user-defined - custom pipelines. All built-in processing occurs in the final - pipelines. -processors: - # Older versions of apm-server write various metrics to the - # metrics-apm.internal data stream, which newer versions break - # into separate datasets. We reroute these metrics coming from - # older versions of apm-server based on 'metricset.name'. -- set: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'transaction' || ctx.metricset?.name == 'service_destination') - field: metricset.interval - value: 1m - override: false -- reroute: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'transaction') - dataset: apm.transaction.1m -- reroute: - if: | - (ctx.data_stream?.dataset == 'apm.internal' || ctx['data_stream.dataset'] == 'apm.internal') && - (ctx.metricset?.name == 'service_destination') - dataset: apm.service_destination.1m - -# Invoke user-defined custom pipelines, in ascending order of specificity: -- pipeline: - name: global@custom - ignore_missing_pipeline: true -- pipeline: - name: "{{{data_stream.type}}}@custom" - ignore_missing_pipeline: true -- pipeline: - if: "ctx?.data_stream?.dataset != 'apm'" - name: "{{{data_stream.type}}}-apm@custom" - ignore_missing_pipeline: true -- pipeline: - # (logs|metrics)-apm.app.-* should invoke (logs|metrics)-apm.app@custom, - # i.e. excluding service.name from the dataset. - if: "ctx.data_stream?.dataset != null && ctx.data_stream?.dataset.startsWith('apm.app.')" - name: "{{{data_stream.type}}}-apm.app@custom" - ignore_missing_pipeline: true -- pipeline: - # other data streams should include the whole dataset. - if: "ctx.data_stream?.dataset != null && !ctx.data_stream?.dataset.startsWith('apm.app.')" - name: "{{{data_stream.type}}}-{{{data_stream.dataset}}}@custom" - ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml new file mode 100644 index 0000000000000..a1f9565676fd4 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.app@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for logs-apm.app.*-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: logs@custom + ignore_missing_pipeline: true +- pipeline: + name: logs-apm.app@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml new file mode 100644 index 0000000000000..c46a1c1b44f96 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/logs-apm.error@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for logs-apm.error-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: logs@custom + ignore_missing_pipeline: true +- pipeline: + name: logs-apm.error@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml new file mode 100644 index 0000000000000..bc07840727cca --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.app@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.app.*-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.app@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml new file mode 100644 index 0000000000000..247ee4cae67f0 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.internal@default-pipeline.yaml @@ -0,0 +1,38 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.internal-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: + # Older versions of apm-server write various metrics to the + # metrics-apm.internal data stream, which newer versions break + # into separate datasets. We reroute these metrics coming from + # older versions of apm-server based on 'metricset.name'. +- set: + if: "ctx.metricset?.name == 'transaction' || ctx.metricset?.name == 'service_destination'" + field: metricset.interval + value: 1m + override: false +- reroute: + if: "ctx.metricset?.name == 'transaction'" + dataset: apm.transaction.1m +- reroute: + if: "ctx.metricset?.name == 'service_destination'" + dataset: apm.service_destination.1m + +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.internal@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml new file mode 100644 index 0000000000000..d8912fc2dd220 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_destination@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_destination.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.service_destination@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml new file mode 100644 index 0000000000000..4cf5652e46bf4 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_summary@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_summary.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.service_summary@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml new file mode 100644 index 0000000000000..44ab85998cee7 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.service_transaction@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.service_transaction.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: "global@custom" + ignore_missing_pipeline: true +- pipeline: + name: "metrics@custom" + ignore_missing_pipeline: true +- pipeline: + name: "metrics-apm.service_transaction@custom" + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml new file mode 100644 index 0000000000000..12e58e6747b5a --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/metrics-apm.transaction@default-pipeline.yaml @@ -0,0 +1,23 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for metrics-apm.transaction.*-* + data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipelines. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics@custom + ignore_missing_pipeline: true +- pipeline: + name: metrics-apm.transaction@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml new file mode 100644 index 0000000000000..b1ce73308c5bc --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm.rum@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for traces-apm.rum-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipeline. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: traces@custom + ignore_missing_pipeline: true +- pipeline: + name: traces-apm.rum@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml new file mode 100644 index 0000000000000..039b6dccf7d57 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/ingest-pipelines/traces-apm@default-pipeline.yaml @@ -0,0 +1,22 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + managed: true +description: | + Built-in default ingest pipeline for traces-apm-* data streams. + + This pipeline exists purely for routing, which cannot be + performed in a final pipeline, and for invoking user-defined + custom pipelines. All built-in processing occurs in the final + pipeline. +processors: +# Invoke user-defined custom pipelines, in ascending order of specificity: +- pipeline: + name: global@custom + ignore_missing_pipeline: true +- pipeline: + name: traces@custom + ignore_missing_pipeline: true +- pipeline: + name: traces-apm@custom + ignore_missing_pipeline: true diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index 2f2025c37f70f..0e27e454f867d 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 2 +version: 3 component-templates: # Data lifecycle. @@ -49,7 +49,27 @@ index-templates: # Ingest pipeline configuration requires to manually specify pipeline dependencies ingest-pipelines: - - apm@default-pipeline: {} + # Default pipelines. + # + # Each data stream index template gets its own default pipeline, + # with the exception of the interval data streams which share one + # for all intervals of the same metric, and the sampled traces + # data stream which does not have (or need) one. + - logs-apm.app@default-pipeline: {} + - logs-apm.error@default-pipeline: {} + - metrics-apm.app@default-pipeline: {} + - metrics-apm.internal@default-pipeline: + dependencies: + - metrics-apm.service_destination@default-pipeline + - metrics-apm.transaction@default-pipeline + - metrics-apm.service_destination@default-pipeline: {} + - metrics-apm.service_summary@default-pipeline: {} + - metrics-apm.service_transaction@default-pipeline: {} + - metrics-apm.transaction@default-pipeline: {} + - traces-apm@default-pipeline: {} + - traces-apm.rum@default-pipeline: {} + + # Final pipelines. - apm@pipeline: {} - traces-apm@pipeline: dependencies: diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index 4f6a5b58ff38d..8228d7011c9c1 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -322,6 +322,36 @@ public void testIndexTemplateConventions() throws Exception { .filter(t -> t.endsWith("@custom")) .toList(); assertThat(requiredCustomComponentTemplates, empty()); + + final Settings settings = template.template().settings(); + if (namePrefix.equals("traces-apm.sampled")) { + // traces-apm.sampled does not have any ingest pipelines. + assertThat(settings, equalTo(null)); + } else { + final boolean isIntervalDataStream = dataStreamType.equals("metrics") && namePrefix.matches(".*\\.[0-9]+m"); + final String defaultPipeline = settings.get("index.default_pipeline"); + if (isIntervalDataStream) { + // e.g. metrics-apm.service_transaction.10m should call + // metrics-apm.service_transaction@default-pipeline + final String withoutInterval = namePrefix.substring(0, namePrefix.lastIndexOf('.')); + assertThat(defaultPipeline, equalTo(withoutInterval + "@default-pipeline")); + } else { + // All other data streams should call a default pipeline + // specific to the data stream. + assertThat(defaultPipeline, equalTo(namePrefix + "@default-pipeline")); + break; + } + + final String finalPipeline = settings.get("index.final_pipeline"); + switch (dataStreamType) { + case "metrics", "traces": + assertThat(finalPipeline, equalTo(dataStreamType + "-apm@pipeline")); + break; + default: + assertThat(finalPipeline, equalTo("apm@pipeline")); + break; + } + } } } diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml index 8a039e7b4eb1d..339b3b56462ac 100644 --- a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_pipelines.yml @@ -22,6 +22,51 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "logs@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "logs-apm.app@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "logs-apm.error@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + - do: ingest.put_pipeline: id: "metrics@custom" @@ -39,7 +84,7 @@ setup: - do: ingest.put_pipeline: - id: "metrics-apm@custom" + id: "metrics-apm.internal@custom" body: > { "processors": [ @@ -67,6 +112,66 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "metrics-apm.service_destination@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.service_summary@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.service_transaction@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + + - do: + ingest.put_pipeline: + id: "metrics-apm.transaction@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + - do: ingest.put_pipeline: id: "traces@custom" @@ -97,42 +202,114 @@ setup: ] } + - do: + ingest.put_pipeline: + id: "traces-apm.rum@custom" + body: > + { + "processors": [ + { + "set" : { + "field": "custom_pipelines", + "value": "{{{custom_pipelines}}},{{{_ingest.pipeline}}}" + } + } + ] + } + --- -"Test metrics @custom ingest pipelines": +"Test logs @custom ingest pipelines": - do: bulk: - index: metrics-apm.app.svc1-testing refresh: true body: - - create: {} - - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "logs-apm.app.svc1-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "logs", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "logs-apm.error-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "logs", "dataset": "apm.error", "namespace": "testing"}}' - is_false: errors + - do: { search: { index: logs-apm.app.svc1-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,logs@custom,logs-apm.app@custom" + + - do: { search: { index: logs-apm.error-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,logs@custom,logs-apm.error@custom" + +--- +"Test metrics @custom ingest pipelines": - do: - search: - index: metrics-apm.app.svc1-testing + bulk: + refresh: true body: - fields: ["custom_pipelines"] + - create: {"_index": "metrics-apm.app.svc1-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.app.svc1", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.internal-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.internal", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_destination.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_destination.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_summary.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_summary.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.service_transaction.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.service_transaction.1m", "namespace": "testing"}}' + - create: {"_index": "metrics-apm.transaction.1m-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "metrics", "dataset": "apm.transaction.1m", "namespace": "testing"}}' + + - is_false: errors + + - do: { search: { index: metrics-apm.app.svc1-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.app@custom" + + - do: { search: { index: metrics-apm.internal-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.internal@custom" + + - do: { search: { index: metrics-apm.service_destination.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_destination@custom" + + - do: { search: { index: metrics-apm.service_summary.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_summary@custom" + + - do: { search: { index: metrics-apm.service_transaction.1m-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.service_transaction@custom" + + - do: { search: { index: metrics-apm.transaction.1m-testing } } - length: { hits.hits: 1 } - match: - hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm@custom,metrics-apm.app@custom" + hits.hits.0._source.custom_pipelines: ",global@custom,metrics@custom,metrics-apm.transaction@custom" --- "Test traces @custom ingest pipelines": - do: bulk: - index: traces-apm-testing refresh: true body: - - create: {} + - create: {"_index": "traces-apm-testing"} - '{"@timestamp": "2017-06-22", "data_stream": {"type": "traces", "dataset": "apm", "namespace": "testing"}}' + - create: {"_index": "traces-apm.rum-testing"} + - '{"@timestamp": "2017-06-22", "data_stream": {"type": "traces", "dataset": "apm.rum", "namespace": "testing"}}' - is_false: errors - - do: - search: - index: traces-apm-testing + - do: { search: { index: traces-apm-testing } } - length: { hits.hits: 1 } - match: hits.hits.0._source.custom_pipelines: ",global@custom,traces@custom,traces-apm@custom" + + - do: { search: { index: traces-apm.rum-testing } } + - length: { hits.hits: 1 } + - match: + hits.hits.0._source.custom_pipelines: ",global@custom,traces@custom,traces-apm.rum@custom" From e0d1b049480ff946208338761b08e20aa9bd5a3d Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 8 May 2024 10:13:44 +0200 Subject: [PATCH 051/117] [Inference API] Fix AzureOpenAiEmbeddingsRequestTaskSettingsTests semantics (#108353) --- ...AzureOpenAiEmbeddingsRequestTaskSettingsTests.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java index 3ff73e0f23656..40ea42e923680 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; -import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettings; import java.util.HashMap; import java.util.Map; @@ -21,24 +20,24 @@ public class AzureOpenAiEmbeddingsRequestTaskSettingsTests extends ESTestCase { public void testFromMap_ReturnsEmptySettings_WhenTheMapIsEmpty() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of())); - assertThat(settings, is(OpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS)); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AzureOpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS)); } public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); assertNull(settings.user()); } public void testFromMap_ReturnsUser() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); + var settings = AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); assertThat(settings.user(), is("user")); } public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { var exception = expectThrows( ValidationException.class, - () -> OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, ""))) + () -> AzureOpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, ""))) ); assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); From 6f17735e1a7ee98f0a8e5cfb2cb35b8226a84cdd Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 8 May 2024 10:14:12 +0200 Subject: [PATCH 052/117] [Inference API] Remove unused loggers in request task settings --- .../embeddings/AzureOpenAiEmbeddingsRequestTaskSettings.java | 3 --- .../openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java | 3 --- 2 files changed, 6 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettings.java index dc7012203a9c8..ffb8c844ac89f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettings.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.inference.services.azureopenai.embeddings; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.ValidationException; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; @@ -25,7 +23,6 @@ * @param user a unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse */ public record AzureOpenAiEmbeddingsRequestTaskSettings(@Nullable String user) { - private static final Logger logger = LogManager.getLogger(AzureOpenAiEmbeddingsRequestTaskSettings.class); public static final AzureOpenAiEmbeddingsRequestTaskSettings EMPTY_SETTINGS = new AzureOpenAiEmbeddingsRequestTaskSettings(null); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java index 373704af37fcd..b3b94f7584563 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.inference.services.openai.embeddings; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.ValidationException; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; @@ -25,7 +23,6 @@ * @param user a unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse */ public record OpenAiEmbeddingsRequestTaskSettings(@Nullable String user) { - private static final Logger logger = LogManager.getLogger(OpenAiEmbeddingsRequestTaskSettings.class); public static final OpenAiEmbeddingsRequestTaskSettings EMPTY_SETTINGS = new OpenAiEmbeddingsRequestTaskSettings(null); From 15be94a281e3273f829df6e976e8c8bd4ca69479 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 8 May 2024 10:14:35 +0200 Subject: [PATCH 053/117] [Inference API] Rename getRequestTaskSettingsMap to createRequestTaskSettingsMap (#108354) --- .../AzureOpenAiActionCreatorTests.java | 14 +++++++------- .../action/openai/OpenAiActionCreatorTests.java | 16 ++++++++-------- ...OpenAiEmbeddingsRequestTaskSettingsTests.java | 2 +- .../embeddings/OpenAiEmbeddingsModelTests.java | 4 ++-- ...OpenAiEmbeddingsRequestTaskSettingsTests.java | 2 +- 5 files changed, 19 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java index 4bdba67beec17..129b39a2f7b33 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreatorTests.java @@ -46,7 +46,7 @@ import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests.createModel; -import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -103,7 +103,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel() throws IOException { var model = createModel("resource", "deployment", "apiversion", "orig_user", "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -153,7 +153,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_WithoutUser() throws IOExcepti var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap(null); + var overriddenTaskSettings = createRequestTaskSettingsMap(null); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -209,7 +209,7 @@ public void testCreate_AzureOpenAiEmbeddingsModel_FailsFromInvalidResponseFormat var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -281,7 +281,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -357,7 +357,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC var model = createModel("resource", "deployment", "apiversion", null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -416,7 +416,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { var model = createModel("resource", "deployment", "apiversion", null, false, 1, null, null, "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = (AzureOpenAiEmbeddingsAction) actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index 98eff32f72983..ff2448803d7ce 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -45,7 +45,7 @@ import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests.createModel; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -101,7 +101,7 @@ public void testCreate_OpenAiEmbeddingsModel() throws IOException { var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -154,7 +154,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutUser() throws IOException { var model = createModel(getUrl(webServer), "org", "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap(null); + var overriddenTaskSettings = createRequestTaskSettingsMap(null); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -206,7 +206,7 @@ public void testCreate_OpenAiEmbeddingsModel_WithoutOrganization() throws IOExce var model = createModel(getUrl(webServer), null, "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -265,7 +265,7 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th var model = createModel(getUrl(webServer), null, "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -595,7 +595,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusC var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -678,7 +678,7 @@ public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusC var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); @@ -746,7 +746,7 @@ public void testExecute_TruncatesInputBeforeSending() throws IOException { // truncated to 1 token = 3 characters var model = createModel(getUrl(webServer), "org", "secret", "model", "user", 1); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); - var overriddenTaskSettings = getRequestTaskSettingsMap("overridden_user"); + var overriddenTaskSettings = createRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java index 40ea42e923680..0aef2a97ee0a1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTaskSettingsTests.java @@ -43,7 +43,7 @@ public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); } - public static Map getRequestTaskSettingsMap(@Nullable String user) { + public static Map createRequestTaskSettingsMap(@Nullable String user) { var map = new HashMap(); if (user != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index b978e2563ece7..86b7f4421954d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -16,7 +16,7 @@ import java.util.Map; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @@ -24,7 +24,7 @@ public class OpenAiEmbeddingsModelTests extends ESTestCase { public void testOverrideWith_OverridesUser() { var model = createModel("url", "org", "api_key", "model_name", null); - var requestTaskSettingsMap = getRequestTaskSettingsMap("user_override"); + var requestTaskSettingsMap = createRequestTaskSettingsMap("user_override"); var overriddenModel = OpenAiEmbeddingsModel.of(model, requestTaskSettingsMap); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java index c95853e2d0128..6892e92d936e5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java @@ -32,7 +32,7 @@ public void testFromMap_ReturnsUser() { assertThat(settings.user(), is("user")); } - public static Map getRequestTaskSettingsMap(@Nullable String user) { + public static Map createRequestTaskSettingsMap(@Nullable String user) { var map = new HashMap(); if (user != null) { From 7b62ea9d673240297fb2cad5e328daf9c1b28b26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Wed, 8 May 2024 11:35:50 +0200 Subject: [PATCH 054/117] Introduce role description field (#107088) This commit introduces new `description` field to roles definitions. The description is optional and can have max 1000 characters. Role API: ```json POST /_security/role/viewer { "description": "Grants permission to view all indices.", "indices": [ { "names": [ "*" ], "privileges": [ "read" , "view_index_metadata"] } ] } ``` File-based role: ```yml viewer: description: 'Grants permission to view all indices.' indices: - names: [ '*' ] privileges: [ 'read', 'view_index_metadata' ] ``` --- docs/changelog/107088.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../security/action/role/PutRoleRequest.java | 12 +- .../action/role/PutRoleRequestBuilder.java | 8 +- .../role/RoleDescriptorRequestValidator.java | 7 + .../authc/CrossClusterAccessSubjectInfo.java | 5 +- .../core/security/authz/RoleDescriptor.java | 60 ++- .../authz/RoleDescriptorsIntersection.java | 5 +- .../KibanaOwnedReservedRoleDescriptors.java | 1 + .../authz/store/ReservedRolesStore.java | 2 + .../core/security/support/Validation.java | 10 + .../xpack/core/security/user/SystemUser.java | 1 + .../security/action/apikey/ApiKeyTests.java | 4 +- .../apikey/BulkUpdateApiKeyRequestTests.java | 3 +- .../apikey/CreateApiKeyRequestTests.java | 3 +- .../apikey/UpdateApiKeyRequestTests.java | 3 +- .../authc/AuthenticationTestHelper.java | 1 + .../CrossClusterAccessSubjectInfoTests.java | 2 +- .../authz/RoleDescriptorTestHelper.java | 314 ++++++++++++++ .../security/authz/RoleDescriptorTests.java | 408 +++++++----------- .../RoleDescriptorsIntersectionTests.java | 2 +- .../authz/permission/SimpleRoleTests.java | 5 +- .../authz/store/RoleReferenceTests.java | 4 +- .../RemoteClusterSecurityApiKeyRestIT.java | 1 + .../RemoteClusterSecurityBwcRestIT.java | 1 + .../RemoteClusterSecurityRestIT.java | 2 + .../SecurityOnTrialLicenseRestTestCase.java | 12 +- .../xpack/security/apikey/ApiKeyRestIT.java | 165 ++++++- ...CrossClusterAccessHeadersForCcsRestIT.java | 16 +- .../role/RoleWithDescriptionRestIT.java | 146 +++++++ ...RoleWithRemoteIndicesPrivilegesRestIT.java | 2 + .../security/authc/ApiKeyIntegTests.java | 9 +- .../authc/esnative/NativeRealmIntegTests.java | 3 + .../xpack/security/authc/ApiKeyService.java | 31 +- .../security/authz/store/FileRolesStore.java | 15 +- .../authz/store/NativeRolesStore.java | 24 +- .../support/SecuritySystemIndices.java | 107 +++-- .../test/TestSecurityClient.java | 2 +- .../security/authc/ApiKeyServiceTests.java | 68 +-- ...usterAccessAuthenticationServiceTests.java | 2 +- .../authc/CrossClusterAccessHeadersTests.java | 2 +- .../authz/AuthorizationServiceIntegTests.java | 13 +- .../xpack/security/authz/RBACEngineTests.java | 10 +- .../authz/store/CompositeRolesStoreTests.java | 63 ++- .../authz/store/FileRolesStoreTests.java | 32 +- .../authz/store/NativeRolesStoreTests.java | 61 +-- .../security/profile/ProfileServiceTests.java | 1 + .../apikey/RestGetApiKeyActionTests.java | 4 +- .../CacheInvalidatorRegistryTests.java | 4 +- .../support/SecurityIndexManagerTests.java | 23 +- .../SecurityMainIndexMappingVersionTests.java | 35 ++ ...curityServerTransportInterceptorTests.java | 2 +- .../security/authz/store/invalid_roles.yml | 3 + .../xpack/security/authz/store/roles.yml | 6 + .../rest-api-spec/test/roles/10_basic.yml | 23 +- .../ApiKeyBackwardsCompatibilityIT.java | 19 +- .../RolesBackwardsCompatibilityIT.java | 268 ++++++++++++ 57 files changed, 1611 insertions(+), 430 deletions(-) create mode 100644 docs/changelog/107088.yaml create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java create mode 100644 x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java create mode 100644 x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java diff --git a/docs/changelog/107088.yaml b/docs/changelog/107088.yaml new file mode 100644 index 0000000000000..01a926f185eea --- /dev/null +++ b/docs/changelog/107088.yaml @@ -0,0 +1,5 @@ +pr: 107088 +summary: Introduce role description field +area: Authorization +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 78fe55a1df9b5..1cc7e47cddda3 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -194,6 +194,7 @@ static TransportVersion def(int id) { public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX = def(8_651_00_0); public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); + public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java index 9c53c1483c9df..27f7c42d74018 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java @@ -46,6 +46,7 @@ public class PutRoleRequest extends ActionRequest { private List remoteIndicesPrivileges = new ArrayList<>(); private RemoteClusterPermissions remoteClusterPermissions = RemoteClusterPermissions.NONE; private boolean restrictRequest = false; + private String description; public PutRoleRequest() {} @@ -63,6 +64,10 @@ public void name(String name) { this.name = name; } + public void description(String description) { + this.description = description; + } + public void cluster(String... clusterPrivilegesArray) { this.clusterPrivileges = clusterPrivilegesArray; } @@ -164,6 +169,10 @@ public String name() { return name; } + public String description() { + return description; + } + public String[] cluster() { return clusterPrivileges; } @@ -213,7 +222,8 @@ public RoleDescriptor roleDescriptor() { Collections.emptyMap(), remoteIndicesPrivileges.toArray(new RoleDescriptor.RemoteIndicesPrivileges[0]), remoteClusterPermissions, - null + null, + description ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java index daf485814c799..486a347775264 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java @@ -21,7 +21,7 @@ */ public class PutRoleRequestBuilder extends ActionRequestBuilder { - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowDescription(true).build(); public PutRoleRequestBuilder(ElasticsearchClient client) { super(client, PutRoleAction.INSTANCE, new PutRoleRequest()); @@ -43,6 +43,7 @@ public PutRoleRequestBuilder source(String name, BytesReference source, XContent request.addApplicationPrivileges(descriptor.getApplicationPrivileges()); request.runAs(descriptor.getRunAs()); request.metadata(descriptor.getMetadata()); + request.description(descriptor.getDescription()); return this; } @@ -51,6 +52,11 @@ public PutRoleRequestBuilder name(String name) { return this; } + public PutRoleRequestBuilder description(String description) { + request.description(description); + return this; + } + public PutRoleRequestBuilder cluster(String... cluster) { request.cluster(cluster); return this; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java index 472faee97a707..ec8fcd1c421ef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/RoleDescriptorRequestValidator.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.restriction.WorkflowResolver; import org.elasticsearch.xpack.core.security.support.MetadataUtils; +import org.elasticsearch.xpack.core.security.support.Validation; import java.util.Arrays; import java.util.Set; @@ -102,6 +103,12 @@ public static ActionRequestValidationException validate( } } } + if (roleDescriptor.hasDescription()) { + Validation.Error error = Validation.Roles.validateRoleDescription(roleDescriptor.getDescription()); + if (error != null) { + validationException = addValidationError(error.toString(), validationException); + } + } return validationException; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java index f91df320bb92d..82bfc4b4a0dd4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java @@ -224,7 +224,10 @@ public static final class RoleDescriptorsBytes implements Writeable { public static final RoleDescriptorsBytes EMPTY = new RoleDescriptorsBytes(new BytesArray("{}")); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build(); private final BytesReference rawBytes; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index caa5567364cd3..1dc293f929121 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -49,6 +49,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.common.xcontent.XContentHelper.createParserNotCompressed; + /** * A holder for a Role that contains user-readable information about the Role * without containing the actual Role object. @@ -70,6 +72,7 @@ public class RoleDescriptor implements ToXContentObject, Writeable { private final Restriction restriction; private final Map metadata; private final Map transientMetadata; + private final String description; /** * Needed as a stop-gap measure because {@link FieldPermissionsCache} has state (settings) but we need to use one @@ -93,7 +96,7 @@ public RoleDescriptor( /** * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], - * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction)} + * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction, String)} */ @Deprecated public RoleDescriptor( @@ -108,7 +111,7 @@ public RoleDescriptor( /** * @deprecated Use {@link #RoleDescriptor(String, String[], IndicesPrivileges[], ApplicationResourcePrivileges[], - * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction)} + * ConfigurableClusterPrivilege[], String[], Map, Map, RemoteIndicesPrivileges[], RemoteClusterPermissions, Restriction, String)} */ @Deprecated public RoleDescriptor( @@ -130,7 +133,8 @@ public RoleDescriptor( transientMetadata, RemoteIndicesPrivileges.NONE, RemoteClusterPermissions.NONE, - Restriction.NONE + Restriction.NONE, + null ); } @@ -155,7 +159,8 @@ public RoleDescriptor( transientMetadata, RemoteIndicesPrivileges.NONE, RemoteClusterPermissions.NONE, - Restriction.NONE + Restriction.NONE, + null ); } @@ -170,7 +175,8 @@ public RoleDescriptor( @Nullable Map transientMetadata, @Nullable RemoteIndicesPrivileges[] remoteIndicesPrivileges, @Nullable RemoteClusterPermissions remoteClusterPermissions, - @Nullable Restriction restriction + @Nullable Restriction restriction, + @Nullable String description ) { this.name = name; this.clusterPrivileges = clusterPrivileges != null ? clusterPrivileges : Strings.EMPTY_ARRAY; @@ -187,6 +193,7 @@ public RoleDescriptor( ? remoteClusterPermissions : RemoteClusterPermissions.NONE; this.restriction = restriction != null ? restriction : Restriction.NONE; + this.description = description != null ? description : ""; } public RoleDescriptor(StreamInput in) throws IOException { @@ -218,12 +225,21 @@ public RoleDescriptor(StreamInput in) throws IOException { } else { this.remoteClusterPermissions = RemoteClusterPermissions.NONE; } + if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + this.description = in.readOptionalString(); + } else { + this.description = ""; + } } public String getName() { return this.name; } + public String getDescription() { + return description; + } + public String[] getClusterPrivileges() { return this.clusterPrivileges; } @@ -272,6 +288,10 @@ public boolean hasRunAs() { return runAs.length != 0; } + public boolean hasDescription() { + return description.length() != 0; + } + public boolean hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster() { return hasConfigurableClusterPrivileges() || hasApplicationPrivileges() @@ -338,6 +358,7 @@ public String toString() { sb.append(group.toString()).append(","); } sb.append("], restriction=").append(restriction); + sb.append(", description=").append(description); sb.append("]"); return sb.toString(); } @@ -358,7 +379,8 @@ public boolean equals(Object o) { if (Arrays.equals(runAs, that.runAs) == false) return false; if (Arrays.equals(remoteIndicesPrivileges, that.remoteIndicesPrivileges) == false) return false; if (remoteClusterPermissions.equals(that.remoteClusterPermissions) == false) return false; - return restriction.equals(that.restriction); + if (restriction.equals(that.restriction) == false) return false; + return Objects.equals(description, that.description); } @Override @@ -373,6 +395,7 @@ public int hashCode() { result = 31 * result + Arrays.hashCode(remoteIndicesPrivileges); result = 31 * result + remoteClusterPermissions.hashCode(); result = 31 * result + restriction.hashCode(); + result = 31 * result + Objects.hashCode(description); return result; } @@ -431,6 +454,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea if (hasRestriction()) { builder.field(Fields.RESTRICTION.getPreferredName(), restriction); } + if (hasDescription()) { + builder.field(Fields.DESCRIPTION.getPreferredName(), description); + } return builder.endObject(); } @@ -456,17 +482,22 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS)) { remoteClusterPermissions.writeTo(out); } + if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + out.writeOptionalString(description); + } } public static Parser.Builder parserBuilder() { return new Parser.Builder(); } - public record Parser(boolean allow2xFormat, boolean allowRestriction) { + public record Parser(boolean allow2xFormat, boolean allowRestriction, boolean allowDescription) { public static final class Builder { + private boolean allow2xFormat = false; private boolean allowRestriction = false; + private boolean allowDescription = false; private Builder() {} @@ -480,8 +511,13 @@ public Builder allowRestriction(boolean allowRestriction) { return this; } + public Builder allowDescription(boolean allowDescription) { + this.allowDescription = allowDescription; + return this; + } + public Parser build() { - return new Parser(allow2xFormat, allowRestriction); + return new Parser(allow2xFormat, allowRestriction, allowDescription); } } @@ -565,6 +601,8 @@ public RoleDescriptor parse(String name, XContentParser parser) throws IOExcepti remoteClusterPermissions = parseRemoteCluster(name, parser); } else if (allowRestriction && Fields.RESTRICTION.match(currentFieldName, parser.getDeprecationHandler())) { restriction = Restriction.parse(name, parser); + } else if (allowDescription && Fields.DESCRIPTION.match(currentFieldName, parser.getDeprecationHandler())) { + description = parser.text(); } else if (Fields.TYPE.match(currentFieldName, parser.getDeprecationHandler())) { // don't need it } else { @@ -586,7 +624,8 @@ public RoleDescriptor parse(String name, XContentParser parser) throws IOExcepti null, remoteIndicesPrivileges, remoteClusterPermissions, - restriction + restriction, + description ); } @@ -686,7 +725,7 @@ public static PrivilegesToCheck parsePrivilegesToCheck( } private static XContentParser createParser(BytesReference source, XContentType xContentType) throws IOException { - return XContentHelper.createParserNotCompressed(LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, source, xContentType); + return createParserNotCompressed(LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, source, xContentType); } public static RoleDescriptor.IndicesPrivileges[] parseIndices(String roleName, XContentParser parser, boolean allow2xFormat) @@ -1821,5 +1860,6 @@ public interface Fields { ParseField TYPE = new ParseField("type"); ParseField RESTRICTION = new ParseField("restriction"); ParseField WORKFLOWS = new ParseField("workflows"); + ParseField DESCRIPTION = new ParseField("description"); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java index 446209b1d7ac3..38aa1bc106e99 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java @@ -26,7 +26,10 @@ public record RoleDescriptorsIntersection(Collection> roleDe public static RoleDescriptorsIntersection EMPTY = new RoleDescriptorsIntersection(Collections.emptyList()); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build(); public RoleDescriptorsIntersection(RoleDescriptor roleDescriptor) { this(List.of(Set.of(roleDescriptor))); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 8e4f9108c3b9c..49be4c5d466b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -407,6 +407,7 @@ static RoleDescriptor kibanaSystem(String name) { getRemoteIndicesReadPrivileges("traces-apm.*"), getRemoteIndicesReadPrivileges("traces-apm-*") }, null, + null, null ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 0793578004a4e..dd8f34a60fa1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -102,6 +102,7 @@ public class ReservedRolesStore implements BiConsumer, ActionListene new String[] { "*" } ) ), + null, null ); @@ -201,6 +202,7 @@ private static Map initializeReservedRoles() { getRemoteIndicesReadPrivileges("/metrics-(beats|elasticsearch|enterprisesearch|kibana|logstash).*/"), getRemoteIndicesReadPrivileges("metricbeat-*") }, null, + null, null ) ), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java index 3c482b82075fc..eaf59e001d098 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Validation.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.security.support; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; @@ -204,10 +205,19 @@ public static Error validatePassword(SecureString password) { public static final class Roles { + public static final int MAX_DESCRIPTION_LENGTH = 1000; + public static Error validateRoleName(String roleName, boolean allowReserved) { return validateRoleName(roleName, allowReserved, MAX_NAME_LENGTH); } + public static Error validateRoleDescription(String description) { + if (description != null && description.length() > MAX_DESCRIPTION_LENGTH) { + return new Error(Strings.format("Role description must be less than %s characters.", MAX_DESCRIPTION_LENGTH)); + } + return null; + } + static Error validateRoleName(String roleName, boolean allowReserved, int maxLength) { if (roleName == null) { return new Error("role name is missing"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java index 1413d7f87eaa1..a1b141d0aa0e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/SystemUser.java @@ -46,6 +46,7 @@ public class SystemUser extends InternalUser { null, null, null, + null, null ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java index 710c4c5adaf67..1bad9bdfbfc77 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKeyTests.java @@ -30,8 +30,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomCrossClusterAccessRoleDescriptor; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java index 525c805f37929..78cf2020f26cc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTests.java @@ -71,7 +71,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(unknownWorkflows) + new RoleDescriptor.Restriction(unknownWorkflows), + null ) ), null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java index 17298c04709a4..bb7778b821457 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestTests.java @@ -106,7 +106,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(unknownWorkflows) + new RoleDescriptor.Restriction(unknownWorkflows), + null ) ), null diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java index 161e9419f9561..03706d928caad 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTests.java @@ -63,7 +63,8 @@ public void testRoleDescriptorValidation() { null, null, null, - new RoleDescriptor.Restriction(workflows.toArray(String[]::new)) + new RoleDescriptor.Restriction(workflows.toArray(String[]::new)), + null ) ), null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java index b7495004e58e7..483b2426e6ad2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTestHelper.java @@ -314,6 +314,7 @@ public static CrossClusterAccessSubjectInfo randomCrossClusterAccessSubjectInfo( null, null, null, + null, null ) ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java index f22bf886357c4..ec20e6e5fa2ff 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfoTests.java @@ -31,7 +31,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java new file mode 100644 index 0000000000000..e6b9097a023cc --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTestHelper.java @@ -0,0 +1,314 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz; + +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Strings; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.support.MetadataUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.test.ESTestCase.generateRandomStringArray; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomInt; +import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.test.ESTestCase.randomList; +import static org.elasticsearch.test.ESTestCase.randomNonEmptySubsetOf; +import static org.elasticsearch.test.ESTestCase.randomSubsetOf; +import static org.elasticsearch.test.ESTestCase.randomValueOtherThanMany; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_INDICES_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_CLUSTER_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_INDICES_PRIVILEGE_NAMES; +import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.ROLE_DESCRIPTOR_NAME; + +public final class RoleDescriptorTestHelper { + + public static Builder builder() { + return new Builder(); + } + + public static RoleDescriptor randomRoleDescriptor() { + return builder().allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(); + } + + public static Map randomRoleDescriptorMetadata(boolean allowReservedMetadata) { + final Map metadata = new HashMap<>(); + while (randomBoolean()) { + String key = randomAlphaOfLengthBetween(4, 12); + if (allowReservedMetadata && randomBoolean()) { + key = MetadataUtils.RESERVED_PREFIX + key; + } + final Object value = randomBoolean() ? randomInt() : randomAlphaOfLengthBetween(3, 50); + metadata.put(key, value); + } + return metadata; + } + + public static ConfigurableClusterPrivilege[] randomClusterPrivileges() { + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = switch (randomIntBetween(0, 4)) { + case 0 -> new ConfigurableClusterPrivilege[0]; + case 1 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 2 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 3 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 4 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + default -> throw new IllegalStateException("Unexpected value"); + }; + return configurableClusterPrivileges; + } + + public static RoleDescriptor.ApplicationResourcePrivileges[] randomApplicationPrivileges() { + final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = + new RoleDescriptor.ApplicationResourcePrivileges[randomIntBetween(0, 2)]; + for (int i = 0; i < applicationPrivileges.length; i++) { + final RoleDescriptor.ApplicationResourcePrivileges.Builder builder = RoleDescriptor.ApplicationResourcePrivileges.builder(); + builder.application("app" + randomAlphaOfLengthBetween(5, 12) + (randomBoolean() ? "*" : "")); + if (randomBoolean()) { + builder.privileges("*"); + } else { + builder.privileges(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); + } + if (randomBoolean()) { + builder.resources("*"); + } else { + builder.resources(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); + } + applicationPrivileges[i] = builder.build(); + } + return applicationPrivileges; + } + + public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max) { + return randomRemoteIndicesPrivileges(min, max, Set.of()); + } + + public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max, Set excludedPrivileges) { + final RoleDescriptor.IndicesPrivileges[] innerIndexPrivileges = randomIndicesPrivileges(min, max, excludedPrivileges); + final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges = + new RoleDescriptor.RemoteIndicesPrivileges[innerIndexPrivileges.length]; + for (int i = 0; i < remoteIndexPrivileges.length; i++) { + remoteIndexPrivileges[i] = new RoleDescriptor.RemoteIndicesPrivileges( + innerIndexPrivileges[i], + generateRandomStringArray(5, randomIntBetween(3, 9), false, false) + ); + } + return remoteIndexPrivileges; + } + + public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max) { + return randomIndicesPrivileges(min, max, Set.of()); + } + + public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max, Set excludedPrivileges) { + final RoleDescriptor.IndicesPrivileges[] indexPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(min, max)]; + for (int i = 0; i < indexPrivileges.length; i++) { + indexPrivileges[i] = randomIndicesPrivilegesBuilder(excludedPrivileges).build(); + } + return indexPrivileges; + } + + public static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder() { + return randomIndicesPrivilegesBuilder(Set.of()); + } + + private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder(Set excludedPrivileges) { + final Set candidatePrivilegesNames = Sets.difference(IndexPrivilege.names(), excludedPrivileges); + assert false == candidatePrivilegesNames.isEmpty() : "no candidate privilege names to random from"; + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(randomSubsetOf(randomIntBetween(1, 4), candidatePrivilegesNames)) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + randomDlsFls(builder); + return builder; + } + + private static void randomDlsFls(RoleDescriptor.IndicesPrivileges.Builder builder) { + if (randomBoolean()) { + builder.query(randomBoolean() ? Strings.format(""" + { "term": { "%s" : "%s" } } + """, randomAlphaOfLengthBetween(3, 24), randomAlphaOfLengthBetween(3, 24)) : """ + { "match_all": {} } + """); + } + if (randomBoolean()) { + if (randomBoolean()) { + builder.grantedFields("*"); + builder.deniedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); + } else { + builder.grantedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); + } + } + } + + public static RoleDescriptor randomCrossClusterAccessRoleDescriptor() { + final int searchSize = randomIntBetween(0, 3); + final int replicationSize = randomIntBetween(searchSize == 0 ? 1 : 0, 3); + assert searchSize + replicationSize > 0; + + final String[] clusterPrivileges; + if (searchSize > 0 && replicationSize > 0) { + clusterPrivileges = CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; + } else if (searchSize > 0) { + clusterPrivileges = CCS_CLUSTER_PRIVILEGE_NAMES; + } else { + clusterPrivileges = CCR_CLUSTER_PRIVILEGE_NAMES; + } + + final List indexPrivileges = new ArrayList<>(); + for (int i = 0; i < searchSize; i++) { + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(CCS_INDICES_PRIVILEGE_NAMES) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + randomDlsFls(builder); + indexPrivileges.add(builder.build()); + } + for (int i = 0; i < replicationSize; i++) { + final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() + .privileges(CCR_INDICES_PRIVILEGE_NAMES) + .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) + .allowRestrictedIndices(randomBoolean()); + indexPrivileges.add(builder.build()); + } + + return new RoleDescriptor( + ROLE_DESCRIPTOR_NAME, + clusterPrivileges, + indexPrivileges.toArray(RoleDescriptor.IndicesPrivileges[]::new), + null + ); + } + + public static List randomUniquelyNamedRoleDescriptors(int minSize, int maxSize) { + return randomValueOtherThanMany( + roleDescriptors -> roleDescriptors.stream().map(RoleDescriptor::getName).distinct().count() != roleDescriptors.size(), + () -> randomList(minSize, maxSize, () -> builder().build()) + ); + } + + public static RemoteClusterPermissions randomRemoteClusterPermissions(int maxGroups) { + final RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions(); + final String[] supportedPermissions = RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]); + for (int i = 0; i < maxGroups; i++) { + remoteClusterPermissions.addGroup( + new RemoteClusterPermissionGroup( + randomNonEmptySubsetOf(Arrays.asList(supportedPermissions)).toArray(new String[0]), + generateRandomStringArray(5, randomIntBetween(3, 9), false, false) + ) + ); + } + return remoteClusterPermissions; + } + + public static class Builder { + + private boolean allowReservedMetadata = false; + private boolean allowRemoteIndices = false; + private boolean alwaysIncludeRemoteIndices = false; + private boolean allowRestriction = false; + private boolean allowDescription = false; + private boolean allowRemoteClusters = false; + + public Builder() {} + + public Builder allowReservedMetadata(boolean allowReservedMetadata) { + this.allowReservedMetadata = allowReservedMetadata; + return this; + } + + public Builder alwaysIncludeRemoteIndices() { + this.alwaysIncludeRemoteIndices = true; + return this; + } + + public Builder allowRemoteIndices(boolean allowRemoteIndices) { + this.allowRemoteIndices = allowRemoteIndices; + return this; + } + + public Builder allowRestriction(boolean allowRestriction) { + this.allowRestriction = allowRestriction; + return this; + } + + public Builder allowDescription(boolean allowDescription) { + this.allowDescription = allowDescription; + return this; + } + + public Builder allowRemoteClusters(boolean allowRemoteClusters) { + this.allowRemoteClusters = allowRemoteClusters; + return this; + } + + public RoleDescriptor build() { + final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges; + if (alwaysIncludeRemoteIndices || (allowRemoteIndices && randomBoolean())) { + remoteIndexPrivileges = randomRemoteIndicesPrivileges(0, 3); + } else { + remoteIndexPrivileges = null; + } + + RemoteClusterPermissions remoteClusters = RemoteClusterPermissions.NONE; + if (allowRemoteClusters && randomBoolean()) { + remoteClusters = randomRemoteClusterPermissions(randomIntBetween(1, 5)); + } + + return new RoleDescriptor( + randomAlphaOfLengthBetween(3, 90), + randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), + randomIndicesPrivileges(0, 3), + randomApplicationPrivileges(), + randomClusterPrivileges(), + generateRandomStringArray(5, randomIntBetween(2, 8), false, true), + randomRoleDescriptorMetadata(allowReservedMetadata), + Map.of(), + remoteIndexPrivileges, + remoteClusters, + allowRestriction ? RoleRestrictionTests.randomWorkflowsRestriction(1, 3) : null, + allowDescription ? randomAlphaOfLengthBetween(0, 20) : null + ); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index a3a590dc5a4d4..d7b9f9ddd5b58 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -31,33 +31,24 @@ import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; -import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; -import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; -import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.hamcrest.Matchers; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import java.util.Set; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCR_INDICES_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_CLUSTER_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.CCS_INDICES_PRIVILEGE_NAMES; -import static org.elasticsearch.xpack.core.security.action.apikey.CrossClusterApiKeyRoleDescriptorBuilder.ROLE_DESCRIPTOR_NAME; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivilegesBuilder; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -156,17 +147,18 @@ public void testToString() { + ", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" + ", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" + ", runAs=[sudo], metadata=[{}], remoteIndicesPrivileges=[], remoteClusterPrivileges=[]" - + ", restriction=Restriction[workflows=[]]]" + + ", restriction=Restriction[workflows=[]], description=]" ) ); } public void testToXContentRoundtrip() throws Exception { - final RoleDescriptor descriptor = randomRoleDescriptor(true, true, true, true); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.randomRoleDescriptor(); final XContentType xContentType = randomFrom(XContentType.values()); final BytesReference xContentValue = toShuffledXContent(descriptor, xContentType, ToXContent.EMPTY_PARAMS, false); final RoleDescriptor parsed = RoleDescriptor.parserBuilder() .allowRestriction(true) + .allowDescription(true) .build() .parse(descriptor.getName(), xContentValue, xContentType); assertThat(parsed, equalTo(descriptor)); @@ -268,9 +260,14 @@ public void testParse() throws Exception { ], "restriction":{ "workflows": ["search_application_query"] - } + }, + "description": "Lorem ipsum dolor sit amet, consectetur adipiscing elit." }"""; - rd = RoleDescriptor.parserBuilder().allowRestriction(true).build().parse("test", new BytesArray(q), XContentType.JSON); + rd = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .allowDescription(true) + .build() + .parse("test", new BytesArray(q), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(3, rd.getIndicesPrivileges().length); @@ -594,16 +591,18 @@ public void testSerializationForCurrentVersion() throws Exception { final boolean canIncludeRemoteIndices = version.onOrAfter(TransportVersions.V_8_8_0); final boolean canIncludeRemoteClusters = version.onOrAfter(TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS); final boolean canIncludeWorkflows = version.onOrAfter(WORKFLOWS_RESTRICTION_VERSION); + final boolean canIncludeDescription = version.onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION); logger.info("Testing serialization with version {}", version); BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor( - true, - canIncludeRemoteIndices, - canIncludeWorkflows, - canIncludeRemoteClusters - ); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(canIncludeRemoteIndices) + .allowRestriction(canIncludeWorkflows) + .allowDescription(canIncludeDescription) + .allowRemoteClusters(canIncludeRemoteClusters) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -626,7 +625,14 @@ public void testSerializationWithRemoteIndicesWithElderVersion() throws IOExcept final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, true, false, false); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(true) + .allowRestriction(false) + .allowDescription(false) + .allowRemoteClusters(false) + .build(); + descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -650,7 +656,8 @@ public void testSerializationWithRemoteIndicesWithElderVersion() throws IOExcept descriptor.getTransientMetadata(), null, null, - descriptor.getRestriction() + descriptor.getRestriction(), + descriptor.getDescription() ) ) ); @@ -671,7 +678,13 @@ public void testSerializationWithRemoteClusterWithElderVersion() throws IOExcept final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, false, false, true); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(false) + .allowRestriction(false) + .allowDescription(false) + .allowRemoteClusters(true) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -693,9 +706,10 @@ public void testSerializationWithRemoteClusterWithElderVersion() throws IOExcept descriptor.getRunAs(), descriptor.getMetadata(), descriptor.getTransientMetadata(), + descriptor.getRemoteIndicesPrivileges(), null, - descriptor.getRemoteClusterPermissions(), - descriptor.getRestriction() + descriptor.getRestriction(), + descriptor.getDescription() ) ) ); @@ -715,7 +729,13 @@ public void testSerializationWithWorkflowsRestrictionAndUnsupportedVersions() th final BytesStreamOutput output = new BytesStreamOutput(); output.setTransportVersion(version); - final RoleDescriptor descriptor = randomRoleDescriptor(true, false, true, false); + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(false) + .allowRestriction(true) + .allowDescription(false) + .allowRemoteClusters(false) + .build(); descriptor.writeTo(output); final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); StreamInput streamInput = new NamedWriteableAwareStreamInput( @@ -739,7 +759,8 @@ public void testSerializationWithWorkflowsRestrictionAndUnsupportedVersions() th descriptor.getTransientMetadata(), descriptor.getRemoteIndicesPrivileges(), descriptor.getRemoteClusterPermissions(), - null + null, + descriptor.getDescription() ) ) ); @@ -793,6 +814,96 @@ public void testParseRoleWithRestrictionWhenAllowRestrictionIsTrue() throws IOEx assertThat(role.getRestriction().getWorkflows(), arrayContaining("search_application")); } + public void testSerializationWithDescriptionAndUnsupportedVersions() throws IOException { + final TransportVersion versionBeforeRoleDescription = TransportVersionUtils.getPreviousVersion( + TransportVersions.SECURITY_ROLE_DESCRIPTION + ); + final TransportVersion version = TransportVersionUtils.randomVersionBetween( + random(), + TransportVersions.V_7_17_0, + versionBeforeRoleDescription + ); + final BytesStreamOutput output = new BytesStreamOutput(); + output.setTransportVersion(version); + + final RoleDescriptor descriptor = RoleDescriptorTestHelper.builder().allowDescription(true).build(); + descriptor.writeTo(output); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); + StreamInput streamInput = new NamedWriteableAwareStreamInput( + ByteBufferStreamInput.wrap(BytesReference.toBytes(output.bytes())), + registry + ); + streamInput.setTransportVersion(version); + final RoleDescriptor serialized = new RoleDescriptor(streamInput); + if (descriptor.hasDescription()) { + assertThat( + serialized, + equalTo( + new RoleDescriptor( + descriptor.getName(), + descriptor.getClusterPrivileges(), + descriptor.getIndicesPrivileges(), + descriptor.getApplicationPrivileges(), + descriptor.getConditionalClusterPrivileges(), + descriptor.getRunAs(), + descriptor.getMetadata(), + descriptor.getTransientMetadata(), + descriptor.getRemoteIndicesPrivileges(), + descriptor.getRemoteClusterPermissions(), + descriptor.getRestriction(), + null + ) + ) + ); + } else { + assertThat(descriptor, equalTo(serialized)); + } + } + + public void testParseRoleWithDescriptionFailsWhenAllowDescriptionIsFalse() { + final String json = """ + { + "description": "Lorem ipsum", + "cluster": ["manage_security"] + }"""; + final ElasticsearchParseException e = expectThrows( + ElasticsearchParseException.class, + () -> RoleDescriptor.parserBuilder() + .allowRestriction(randomBoolean()) + .allowDescription(false) + .build() + .parse( + "test_role_with_description", + XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON) + ) + ); + assertThat( + e, + TestMatchers.throwableWithMessage( + containsString("failed to parse role [test_role_with_description]. unexpected field [description]") + ) + ); + } + + public void testParseRoleWithDescriptionWhenAllowDescriptionIsTrue() throws IOException { + final String json = """ + { + "description": "Lorem ipsum", + "cluster": ["manage_security"] + }"""; + RoleDescriptor role = RoleDescriptor.parserBuilder() + .allowRestriction(randomBoolean()) + .allowDescription(true) + .build() + .parse( + "test_role_with_description", + XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON) + ); + assertThat(role.getName(), equalTo("test_role_with_description")); + assertThat(role.getDescription(), equalTo("Lorem ipsum")); + assertThat(role.getClusterPrivileges(), arrayContaining("manage_security")); + } + public void testParseEmptyQuery() throws Exception { String json = """ { @@ -1148,6 +1259,7 @@ public void testIsEmpty() { new HashMap<>(), new RoleDescriptor.RemoteIndicesPrivileges[0], RemoteClusterPermissions.NONE, + null, null ).isEmpty() ); @@ -1189,7 +1301,8 @@ public void testIsEmpty() { : new RoleDescriptor.RemoteIndicesPrivileges[] { RoleDescriptor.RemoteIndicesPrivileges.builder("rmt").indices("idx").privileges("foo").build() }, booleans.get(7) ? null : randomRemoteClusterPermissions(5), - booleans.get(8) ? null : RoleRestrictionTests.randomWorkflowsRestriction(1, 2) + booleans.get(8) ? null : RoleRestrictionTests.randomWorkflowsRestriction(1, 2), + randomAlphaOfLengthBetween(0, 20) ); if (booleans.stream().anyMatch(e -> e.equals(false))) { @@ -1212,11 +1325,18 @@ public void testHasPrivilegesOtherThanIndex() { null, null, null, + null, null ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), is(false) ); - final RoleDescriptor roleDescriptor = randomRoleDescriptor(); + final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.builder() + .allowReservedMetadata(true) + .allowRemoteIndices(true) + .allowRestriction(true) + .allowDescription(true) + .allowRemoteClusters(true) + .build(); final boolean expected = roleDescriptor.hasClusterPrivileges() || roleDescriptor.hasConfigurableClusterPrivileges() || roleDescriptor.hasApplicationPrivileges() @@ -1225,234 +1345,8 @@ public void testHasPrivilegesOtherThanIndex() { assertThat(roleDescriptor.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), equalTo(expected)); } - public static List randomUniquelyNamedRoleDescriptors(int minSize, int maxSize) { - return randomValueOtherThanMany( - roleDescriptors -> roleDescriptors.stream().map(RoleDescriptor::getName).distinct().count() != roleDescriptors.size(), - () -> randomList(minSize, maxSize, () -> randomRoleDescriptor(false)) - ); - } - - public static RoleDescriptor randomRoleDescriptor() { - return randomRoleDescriptor(true); - } - - public static RoleDescriptor randomRoleDescriptor(boolean allowReservedMetadata) { - return randomRoleDescriptor(allowReservedMetadata, false, false, false); - } - - public static RoleDescriptor randomRoleDescriptor( - boolean allowReservedMetadata, - boolean allowRemoteIndices, - boolean allowWorkflows, - boolean allowRemoteClusters - ) { - final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges; - if (false == allowRemoteIndices || randomBoolean()) { - remoteIndexPrivileges = null; - } else { - remoteIndexPrivileges = randomRemoteIndicesPrivileges(0, 3); - } - - RemoteClusterPermissions remoteClusters = RemoteClusterPermissions.NONE; - if (allowRemoteClusters && randomBoolean()) { - randomRemoteClusterPermissions(randomIntBetween(1, 5)); - } - - return new RoleDescriptor( - randomAlphaOfLengthBetween(3, 90), - randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - randomIndicesPrivileges(0, 3), - randomApplicationPrivileges(), - randomClusterPrivileges(), - generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - randomRoleDescriptorMetadata(allowReservedMetadata), - Map.of(), - remoteIndexPrivileges, - remoteClusters, - allowWorkflows ? RoleRestrictionTests.randomWorkflowsRestriction(1, 3) : null - ); - } - - public static Map randomRoleDescriptorMetadata(boolean allowReservedMetadata) { - final Map metadata = new HashMap<>(); - while (randomBoolean()) { - String key = randomAlphaOfLengthBetween(4, 12); - if (allowReservedMetadata && randomBoolean()) { - key = MetadataUtils.RESERVED_PREFIX + key; - } - final Object value = randomBoolean() ? randomInt() : randomAlphaOfLengthBetween(3, 50); - metadata.put(key, value); - } - return metadata; - } - - public static ConfigurableClusterPrivilege[] randomClusterPrivileges() { - final ConfigurableClusterPrivilege[] configurableClusterPrivileges = switch (randomIntBetween(0, 4)) { - case 0 -> new ConfigurableClusterPrivilege[0]; - case 1 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 2 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 3 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ), - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - case 4 -> new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.ManageApplicationPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ), - new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( - Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) - ) }; - default -> throw new IllegalStateException("Unexpected value"); - }; - return configurableClusterPrivileges; - } - - public static ApplicationResourcePrivileges[] randomApplicationPrivileges() { - final ApplicationResourcePrivileges[] applicationPrivileges = new ApplicationResourcePrivileges[randomIntBetween(0, 2)]; - for (int i = 0; i < applicationPrivileges.length; i++) { - final ApplicationResourcePrivileges.Builder builder = ApplicationResourcePrivileges.builder(); - builder.application("app" + randomAlphaOfLengthBetween(5, 12) + (randomBoolean() ? "*" : "")); - if (randomBoolean()) { - builder.privileges("*"); - } else { - builder.privileges(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); - } - if (randomBoolean()) { - builder.resources("*"); - } else { - builder.resources(generateRandomStringArray(6, randomIntBetween(4, 8), false, false)); - } - applicationPrivileges[i] = builder.build(); - } - return applicationPrivileges; - } - - public static RemoteClusterPermissions randomRemoteClusterPermissions(int maxGroups) { - final RemoteClusterPermissions remoteClusterPermissions = new RemoteClusterPermissions(); - final String[] supportedPermissions = RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]); - for (int i = 0; i < maxGroups; i++) { - remoteClusterPermissions.addGroup( - new RemoteClusterPermissionGroup( - randomNonEmptySubsetOf(Arrays.asList(supportedPermissions)).toArray(new String[0]), - generateRandomStringArray(5, randomIntBetween(3, 9), false, false) - ) - ); - } - return remoteClusterPermissions; - } - - public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max) { - return randomRemoteIndicesPrivileges(min, max, Set.of()); - } - - public static RoleDescriptor.RemoteIndicesPrivileges[] randomRemoteIndicesPrivileges(int min, int max, Set excludedPrivileges) { - final RoleDescriptor.IndicesPrivileges[] innerIndexPrivileges = randomIndicesPrivileges(min, max, excludedPrivileges); - final RoleDescriptor.RemoteIndicesPrivileges[] remoteIndexPrivileges = - new RoleDescriptor.RemoteIndicesPrivileges[innerIndexPrivileges.length]; - for (int i = 0; i < remoteIndexPrivileges.length; i++) { - remoteIndexPrivileges[i] = new RoleDescriptor.RemoteIndicesPrivileges( - innerIndexPrivileges[i], - generateRandomStringArray(5, randomIntBetween(3, 9), false, false) - ); - } - return remoteIndexPrivileges; - } - - public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max) { - return randomIndicesPrivileges(min, max, Set.of()); - } - - public static RoleDescriptor.IndicesPrivileges[] randomIndicesPrivileges(int min, int max, Set excludedPrivileges) { - final RoleDescriptor.IndicesPrivileges[] indexPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(min, max)]; - for (int i = 0; i < indexPrivileges.length; i++) { - indexPrivileges[i] = randomIndicesPrivilegesBuilder(excludedPrivileges).build(); - } - return indexPrivileges; - } - - private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder() { - return randomIndicesPrivilegesBuilder(Set.of()); - } - - private static RoleDescriptor.IndicesPrivileges.Builder randomIndicesPrivilegesBuilder(Set excludedPrivileges) { - final Set candidatePrivilegesNames = Sets.difference(IndexPrivilege.names(), excludedPrivileges); - assert false == candidatePrivilegesNames.isEmpty() : "no candidate privilege names to random from"; - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(randomSubsetOf(randomIntBetween(1, 4), candidatePrivilegesNames)) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - randomDlsFls(builder); - return builder; - } - - private static void randomDlsFls(RoleDescriptor.IndicesPrivileges.Builder builder) { - if (randomBoolean()) { - builder.query( - randomBoolean() - ? "{ \"term\": { \"" + randomAlphaOfLengthBetween(3, 24) + "\" : \"" + randomAlphaOfLengthBetween(3, 24) + "\" }" - : "{ \"match_all\": {} }" - ); - } - if (randomBoolean()) { - if (randomBoolean()) { - builder.grantedFields("*"); - builder.deniedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); - } else { - builder.grantedFields(generateRandomStringArray(4, randomIntBetween(4, 9), false, false)); - } - } - } - private static void resetFieldPermssionsCache() { RoleDescriptor.setFieldPermissionsCache(new FieldPermissionsCache(Settings.EMPTY)); } - public static RoleDescriptor randomCrossClusterAccessRoleDescriptor() { - final int searchSize = randomIntBetween(0, 3); - final int replicationSize = randomIntBetween(searchSize == 0 ? 1 : 0, 3); - assert searchSize + replicationSize > 0; - - final String[] clusterPrivileges; - if (searchSize > 0 && replicationSize > 0) { - clusterPrivileges = CCS_AND_CCR_CLUSTER_PRIVILEGE_NAMES; - } else if (searchSize > 0) { - clusterPrivileges = CCS_CLUSTER_PRIVILEGE_NAMES; - } else { - clusterPrivileges = CCR_CLUSTER_PRIVILEGE_NAMES; - } - - final List indexPrivileges = new ArrayList<>(); - for (int i = 0; i < searchSize; i++) { - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(CCS_INDICES_PRIVILEGE_NAMES) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - randomDlsFls(builder); - indexPrivileges.add(builder.build()); - } - for (int i = 0; i < replicationSize; i++) { - final RoleDescriptor.IndicesPrivileges.Builder builder = RoleDescriptor.IndicesPrivileges.builder() - .privileges(CCR_INDICES_PRIVILEGE_NAMES) - .indices(generateRandomStringArray(5, randomIntBetween(3, 9), false, false)) - .allowRestrictedIndices(randomBoolean()); - indexPrivileges.add(builder.build()); - } - - return new RoleDescriptor( - ROLE_DESCRIPTOR_NAME, - clusterPrivileges, - indexPrivileges.toArray(RoleDescriptor.IndicesPrivileges[]::new), - null - ); - } - } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java index 6f8691fbb317a..a892e8b864e6e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersectionTests.java @@ -27,7 +27,7 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.equalTo; public class RoleDescriptorsIntersectionTests extends ESTestCase { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java index 0c15256d1951e..5401be220fe8b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRoleTests.java @@ -276,7 +276,8 @@ public void testForWorkflowWithRestriction() { null, null, null, - new RoleDescriptor.Restriction(new String[] { WorkflowResolver.SEARCH_APPLICATION_QUERY_WORKFLOW.name() }) + new RoleDescriptor.Restriction(new String[] { WorkflowResolver.SEARCH_APPLICATION_QUERY_WORKFLOW.name() }), + null ), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, @@ -290,7 +291,7 @@ public void testForWorkflowWithRestriction() { public void testForWorkflowWithoutRestriction() { final SimpleRole role = Role.buildFromRoleDescriptor( - new RoleDescriptor("r1", null, null, null, null, null, null, null, null, null, null), + new RoleDescriptor("r1", null, null, null, null, null, null, null, null, null, null, null), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, List.of() diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java index 554c82dfa44fb..74c8e6addf243 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/RoleReferenceTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import java.util.Set; import java.util.concurrent.ExecutionException; @@ -82,7 +82,7 @@ public void testCrossClusterAccessRoleReference() { } public void testFixedRoleReference() throws ExecutionException, InterruptedException { - final RoleDescriptor roleDescriptor = RoleDescriptorTests.randomRoleDescriptor(); + final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.randomRoleDescriptor(); final String source = "source"; final var fixedRoleReference = new RoleReference.FixedRoleReference(roleDescriptor, source); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java index 2f3ece56b3281..3154a5ac0cd7d 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java @@ -108,6 +108,7 @@ public void testCrossClusterSearchWithApiKey() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "role with privileges for remote and local indices", "cluster": ["manage_own_api_key"], "indices": [ { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java index ccf9d66a5bc21..cbf735c66462c 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java @@ -99,6 +99,7 @@ public void testBwcWithLegacyCrossClusterSearch() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "This description should not be sent to remote clusters.", "cluster": ["manage_own_api_key"], "indices": [ { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index c6bb6e10f0537..6eb49ec1ab8ae 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -187,6 +187,7 @@ public void testCrossClusterSearch() throws Exception { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "Role with privileges for remote and local indices.", "indices": [ { "names": ["local_index"], @@ -293,6 +294,7 @@ public void testCrossClusterSearch() throws Exception { final var putLocalSearchRoleRequest = new Request("PUT", "/_security/role/local_search"); putLocalSearchRoleRequest.setJsonEntity(Strings.format(""" { + "description": "Role with privileges for searching local only indices.", "indices": [ { "names": ["local_index"], diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java index 3ad250c4e6037..bdbd5c659c479 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java @@ -87,8 +87,16 @@ protected void createRole(String name, Collection clusterPrivileges) thr final RoleDescriptor role = new RoleDescriptor( name, clusterPrivileges.toArray(String[]::new), - new RoleDescriptor.IndicesPrivileges[0], - new String[0] + null, + null, + null, + null, + null, + null, + null, + null, + null, + null ); getSecurityClient().putRole(role); } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index fc522b0213eeb..1b0d3397daa90 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -97,7 +97,7 @@ public void createUsers() throws IOException { createUser(MANAGE_API_KEY_USER, END_USER_PASSWORD, List.of("manage_api_key_role")); createRole("manage_api_key_role", Set.of("manage_api_key")); createUser(MANAGE_SECURITY_USER, END_USER_PASSWORD, List.of("manage_security_role")); - createRole("manage_security_role", Set.of("manage_security")); + createRoleWithDescription("manage_security_role", Set.of("manage_security"), "Allows all security-related operations!"); } @After @@ -1681,6 +1681,134 @@ public void testCrossClusterApiKeyAccessInResponseCanBeUsedAsInputForUpdate() th assertThat(updateResponse4.evaluate("updated"), is(false)); } + public void testUserRoleDescriptionsGetsRemoved() throws IOException { + // Creating API key whose owner's role (limited-by) has description should succeed, + // and limited-by role descriptor should be filtered to remove description. + { + final Request createRestApiKeyRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRestApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRestApiKeyRequest.setJsonEntity(""" + { + "name": "my-api-key" + } + """); + final ObjectPath createRestApiKeyResponse = assertOKAndCreateObjectPath(client().performRequest(createRestApiKeyRequest)); + String apiKeyId = createRestApiKeyResponse.evaluate("id"); + + ObjectPath fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, apiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(apiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors"), equalTo(Map.of())); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + + // Updating should behave the same as create. No limited-by role description should be persisted. + final Request updateRequest = new Request("PUT", "_security/api_key/" + apiKeyId); + setUserForRequest(updateRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + updateRequest.setJsonEntity(""" + { + "role_descriptors":{ + "my-role": { + "cluster": ["all"] + } + } + } + """); + assertThat(responseAsMap(client().performRequest(updateRequest)).get("updated"), equalTo(true)); + fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, apiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(apiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors.my-role.cluster"), equalTo(List.of("all"))); + } + { + final Request grantApiKeyRequest = new Request("POST", "_security/api_key/grant"); + grantApiKeyRequest.setJsonEntity(Strings.format(""" + { + "grant_type":"password", + "username":"%s", + "password":"%s", + "api_key":{ + "name":"my-granted-api-key", + "role_descriptors":{ + "my-role":{ + "cluster":["all"] + } + } + } + }""", MANAGE_SECURITY_USER, END_USER_PASSWORD)); + String grantedApiKeyId = assertOKAndCreateObjectPath(adminClient().performRequest(grantApiKeyRequest)).evaluate("id"); + var fetchResponse = assertOKAndCreateObjectPath(fetchApiKeyWithUser(MANAGE_SECURITY_USER, grantedApiKeyId, true)); + assertThat(fetchResponse.evaluate("api_keys.0.id"), equalTo(grantedApiKeyId)); + assertThat(fetchResponse.evaluate("api_keys.0.name"), equalTo("my-granted-api-key")); + assertThat(fetchResponse.evaluate("api_keys.0.limited_by.0.manage_security_role.description"), is(nullValue())); + assertThat(fetchResponse.evaluate("api_keys.0.role_descriptors.my-role.cluster"), equalTo(List.of("all"))); + } + } + + public void testCreatingApiKeyWithRoleDescriptionFails() throws IOException { + final Request createRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRequest.setJsonEntity(""" + { + "name": "my-api-key" + } + """); + final ObjectPath createResponse = assertOKAndCreateObjectPath(client().performRequest(createRequest)); + String apiKeyId = createResponse.evaluate("id"); + + final Request updateRequest = new Request("PUT", "_security/api_key/" + apiKeyId); + setUserForRequest(updateRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + updateRequest.setJsonEntity(""" + { + "role_descriptors":{ + "my-role": { + "description": "This description should not be allowed!" + } + } + } + """); + + var e = expectThrows(ResponseException.class, () -> client().performRequest(updateRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + + public void testUpdatingApiKeyWithRoleDescriptionFails() throws IOException { + final Request createRestApiKeyRequest = new Request("POST", "_security/api_key"); + setUserForRequest(createRestApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + createRestApiKeyRequest.setJsonEntity(""" + { + "name": "my-api-key", + "role_descriptors":{ + "my-role": { + "description": "This description should not be allowed!" + } + } + } + """); + + var e = expectThrows(ResponseException.class, () -> client().performRequest(createRestApiKeyRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + + public void testGrantApiKeyWithRoleDescriptionFails() throws Exception { + final Request grantApiKeyRequest = new Request("POST", "_security/api_key/grant"); + setUserForRequest(grantApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); + grantApiKeyRequest.setJsonEntity(Strings.format(""" + { + "grant_type":"password", + "username":"%s", + "password":"%s", + "api_key":{ + "name":"my-granted-api-key", + "role_descriptors":{ + "my-role":{ + "description": "This role does not grant any permissions!" + } + } + } + }""", MANAGE_SECURITY_USER, END_USER_PASSWORD.toString())); + var e = expectThrows(ResponseException.class, () -> client().performRequest(grantApiKeyRequest)); + assertThat(e.getMessage(), containsString("failed to parse role [my-role]. unexpected field [description]")); + } + public void testWorkflowsRestrictionSupportForApiKeys() throws IOException { final Request createApiKeyRequest = new Request("POST", "_security/api_key"); createApiKeyRequest.setJsonEntity(""" @@ -1916,6 +2044,22 @@ private Response fetchApiKey(String apiKeyId) throws IOException { return getApiKeyResponse; } + private Response fetchApiKeyWithUser(String username, String apiKeyId, boolean withLimitedBy) throws IOException { + final Request fetchRequest; + if (randomBoolean()) { + fetchRequest = new Request("GET", "/_security/api_key"); + fetchRequest.addParameter("id", apiKeyId); + fetchRequest.addParameter("with_limited_by", String.valueOf(withLimitedBy)); + } else { + fetchRequest = new Request("GET", "/_security/_query/api_key"); + fetchRequest.addParameter("with_limited_by", String.valueOf(withLimitedBy)); + fetchRequest.setJsonEntity(Strings.format(""" + { "query": { "ids": { "values": ["%s"] } } }""", apiKeyId)); + } + setUserForRequest(fetchRequest, username, END_USER_PASSWORD); + return client().performRequest(fetchRequest); + } + private void assertBadCreateCrossClusterApiKeyRequest(String body, String expectedErrorMessage) throws IOException { final Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); createRequest.setJsonEntity(body); @@ -2178,8 +2322,27 @@ private void createRole(String name, Collection localClusterPrivileges, remoteIndicesClusterAliases ) ), + null, null ); getSecurityClient().putRole(role); } + + protected void createRoleWithDescription(String name, Collection clusterPrivileges, String description) throws IOException { + final RoleDescriptor role = new RoleDescriptor( + name, + clusterPrivileges.toArray(String[]::new), + null, + null, + null, + null, + null, + null, + null, + null, + null, + description + ); + getSecurityClient().putRole(role); + } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java index 9402d627063c4..500b796e62660 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java @@ -102,6 +102,7 @@ public void setup() throws IOException { final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); putRoleRequest.setJsonEntity(""" { + "description": "Grants permission for searching local and remote clusters.", "cluster": ["manage_api_key"], "indices": [ { @@ -204,7 +205,8 @@ public void testCrossClusterAccessHeadersSentSingleRemote() throws Exception { null, null, null, - null + null, + null // description is never sent across clusters ) ) ); @@ -273,6 +275,7 @@ public void testCrossClusterAccessHeadersSentMultipleRemotes() throws Exception null, null, null, + null, null ) ) @@ -305,6 +308,7 @@ public void testCrossClusterAccessHeadersSentMultipleRemotes() throws Exception null, null, null, + null, null ) ) @@ -418,6 +422,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ), @@ -438,6 +443,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ) @@ -466,6 +472,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ), @@ -489,6 +496,7 @@ public void testApiKeyCrossClusterAccessHeadersSentMultipleRemotes() throws Exce null, null, null, + null, null ) ) @@ -581,6 +589,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ), @@ -601,6 +610,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -625,6 +635,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -713,6 +724,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ), @@ -733,6 +745,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) @@ -757,6 +770,7 @@ public void testApiKeyCrossClusterAccessHeadersSentSingleRemote() throws Excepti null, null, null, + null, null ) ) diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java new file mode 100644 index 0000000000000..95a650737d452 --- /dev/null +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.role; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.Strings; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.support.Validation; +import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class RoleWithDescriptionRestIT extends SecurityOnTrialLicenseRestTestCase { + + public void testCreateOrUpdateRoleWithDescription() throws Exception { + final String roleName = "role_with_description"; + final String initialRoleDescription = randomAlphaOfLengthBetween(0, 10); + { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/" + roleName); + createRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }""", initialRoleDescription)); + Response createResponse = adminClient().performRequest(createRoleRequest); + assertOK(createResponse); + fetchRoleAndAssertEqualsExpected( + roleName, + new RoleDescriptor( + roleName, + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + initialRoleDescription + ) + ); + } + { + final String newRoleDescription = randomValueOtherThan(initialRoleDescription, () -> randomAlphaOfLengthBetween(0, 10)); + Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/" + roleName); + updateRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["index-*"], "privileges": ["all"]}] + }""", newRoleDescription)); + Response updateResponse = adminClient().performRequest(updateRoleRequest); + assertOK(updateResponse); + + fetchRoleAndAssertEqualsExpected( + roleName, + new RoleDescriptor( + roleName, + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("index-*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + newRoleDescription + ) + ); + } + } + + public void testCreateRoleWithInvalidDescriptionFails() { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/role_with_large_description"); + createRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5)))); + + ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(createRoleRequest)); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat( + e.getMessage(), + containsString("Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters.") + ); + } + + public void testUpdateRoleWithInvalidDescriptionFails() throws IOException { + Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/my_role"); + createRoleRequest.setJsonEntity(""" + { + "cluster": ["all"], + "indices": [{"names": ["*"], "privileges": ["all"]}] + }"""); + Response createRoleResponse = adminClient().performRequest(createRoleRequest); + assertOK(createRoleResponse); + + Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/my_role"); + updateRoleRequest.setJsonEntity(Strings.format(""" + { + "description": "%s", + "cluster": ["all"], + "indices": [{"names": ["index-*"], "privileges": ["all"]}] + }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5)))); + + ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(updateRoleRequest)); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat( + e.getMessage(), + containsString("Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters.") + ); + } + + private void fetchRoleAndAssertEqualsExpected(final String roleName, final RoleDescriptor expectedRoleDescriptor) throws IOException { + final Response getRoleResponse = adminClient().performRequest(new Request("GET", "/_security/role/" + roleName)); + assertOK(getRoleResponse); + final Map actual = responseAsParser(getRoleResponse).map( + HashMap::new, + p -> RoleDescriptor.parserBuilder().allowDescription(true).build().parse(expectedRoleDescriptor.getName(), p) + ); + assertThat(actual, equalTo(Map.of(expectedRoleDescriptor.getName(), expectedRoleDescriptor))); + } +} diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java index 28da12b226a66..aa5967ea7277a 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java @@ -89,6 +89,7 @@ public void testRemoteIndexPrivileges() throws IOException { .grantedFields("field") .build() }, null, + null, null ) ); @@ -163,6 +164,7 @@ public void testRemoteIndexPrivileges() throws IOException { .grantedFields("field") .build() }, null, + null, null ) ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 351cf05b2096d..58d6657b99e32 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -85,7 +85,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmDomain; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; @@ -2551,11 +2551,11 @@ public void testUpdateApiKeysNoopScenarios() throws Exception { final List newRoleDescriptors = List.of( randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null || initialRequest.getRoleDescriptors().contains(rd), - () -> RoleDescriptorTests.randomRoleDescriptor(false) + () -> RoleDescriptorTestHelper.builder().build() ), randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null || initialRequest.getRoleDescriptors().contains(rd), - () -> RoleDescriptorTests.randomRoleDescriptor(false) + () -> RoleDescriptorTestHelper.builder().build() ) ); response = updateSingleApiKeyMaybeUsingBulkAction( @@ -2769,7 +2769,7 @@ private List randomRoleDescriptors() { new RoleDescriptor(randomAlphaOfLength(10), new String[] { "all" }, null, null), randomValueOtherThanMany( rd -> RoleDescriptorRequestValidator.validate(rd) != null, - () -> RoleDescriptorTests.randomRoleDescriptor(false, true, false, true) + () -> RoleDescriptorTestHelper.builder().allowRemoteIndices(true).allowRemoteClusters(true).build() ) ); case 2 -> null; @@ -2887,6 +2887,7 @@ private void expectRoleDescriptorsForApiKey( final var descriptor = (Map) rawRoleDescriptor.get(expectedRoleDescriptor.getName()); final var roleDescriptor = RoleDescriptor.parserBuilder() .allowRestriction(true) + .allowDescription(true) .build() .parse( expectedRoleDescriptor.getName(), diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 9d56528a060c3..ce4c8719f0642 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -223,6 +223,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .metadata(metadata) .get(); logger.error("--> waiting for .security index"); @@ -245,6 +246,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .get(); preparePutRole("test_role3").cluster("all", "none") .runAs("root", "nobody") @@ -256,6 +258,7 @@ private void testAddAndGetRole(String roleName) { new BytesArray("{\"match_all\": {}}"), randomBoolean() ) + .description(randomAlphaOfLengthBetween(5, 20)) .get(); logger.info("--> retrieving all roles"); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 6d76fac71e900..55a89e184f84f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -360,8 +360,9 @@ && hasRemoteIndices(request.getRoleDescriptors())) { return; } + final Set userRolesWithoutDescription = removeUserRoleDescriptorDescriptions(userRoleDescriptors); final Set filteredUserRoleDescriptors = maybeRemoveRemotePrivileges( - userRoleDescriptors, + userRolesWithoutDescription, transportVersion, request.getId() ); @@ -370,6 +371,28 @@ && hasRemoteIndices(request.getRoleDescriptors())) { } } + private Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { + return userRoleDescriptors.stream().map(roleDescriptor -> { + if (roleDescriptor.hasDescription()) { + return new RoleDescriptor( + roleDescriptor.getName(), + roleDescriptor.getClusterPrivileges(), + roleDescriptor.getIndicesPrivileges(), + roleDescriptor.getApplicationPrivileges(), + roleDescriptor.getConditionalClusterPrivileges(), + roleDescriptor.getRunAs(), + roleDescriptor.getMetadata(), + roleDescriptor.getTransientMetadata(), + roleDescriptor.getRemoteIndicesPrivileges(), + roleDescriptor.getRemoteClusterPermissions(), + roleDescriptor.getRestriction(), + null + ); + } + return roleDescriptor; + }).collect(Collectors.toSet()); + } + private TransportVersion getMinTransportVersion() { return clusterService.state().getMinTransportVersion(); } @@ -534,8 +557,9 @@ public void updateApiKeys( } final String[] apiKeyIds = request.getIds().toArray(String[]::new); + final Set userRolesWithoutDescription = removeUserRoleDescriptorDescriptions(userRoleDescriptors); final Set filteredUserRoleDescriptors = maybeRemoveRemotePrivileges( - userRoleDescriptors, + userRolesWithoutDescription, transportVersion, apiKeyIds ); @@ -673,7 +697,8 @@ static Set maybeRemoveRemotePrivileges( roleDescriptor.hasRemoteClusterPermissions() && transportVersion.before(ROLE_REMOTE_CLUSTER_PRIVS) ? null : roleDescriptor.getRemoteClusterPermissions(), - roleDescriptor.getRestriction() + roleDescriptor.getRestriction(), + roleDescriptor.getDescription() ); } return roleDescriptor; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 71a78c1627946..7618135c8662f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -67,7 +67,10 @@ public class FileRolesStore implements BiConsumer, ActionListener, ActionListener< private static final Logger logger = LogManager.getLogger(NativeRolesStore.class); - private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allow2xFormat(true).build(); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allow2xFormat(true) + .allowDescription(true) + .build(); private final Settings settings; private final Client client; @@ -272,9 +276,18 @@ public void putRole(final PutRoleRequest request, final RoleDescriptor role, fin "all nodes must have version [" + ROLE_REMOTE_CLUSTER_PRIVS + "] or higher to support remote cluster privileges" ) ); - } else { - innerPutRole(request, role, listener); - } + } else if (role.hasDescription() + && clusterService.state().getMinTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + listener.onFailure( + new IllegalStateException( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } else { + innerPutRole(request, role, listener); + } } // pkg-private for testing @@ -535,7 +548,8 @@ static RoleDescriptor transformRole(String id, BytesReference sourceBytes, Logge transientMap, roleDescriptor.getRemoteIndicesPrivileges(), roleDescriptor.getRemoteClusterPermissions(), - roleDescriptor.getRestriction() + roleDescriptor.getRestriction(), + roleDescriptor.getDescription() ); } else { return roleDescriptor; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index ed198834d24f1..9e20cb05a3cdc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.VersionId; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; @@ -23,9 +24,12 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Arrays; import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; @@ -38,7 +42,6 @@ public class SecuritySystemIndices { public static final int INTERNAL_MAIN_INDEX_FORMAT = 6; - public static final int INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT = 1; private static final int INTERNAL_TOKENS_INDEX_FORMAT = 7; private static final int INTERNAL_TOKENS_INDEX_MAPPINGS_FORMAT = 1; private static final int INTERNAL_PROFILE_INDEX_FORMAT = 8; @@ -119,18 +122,22 @@ private void checkInitialized() { } private SystemIndexDescriptor getSecurityMainIndexDescriptor() { - return SystemIndexDescriptor.builder() - // This can't just be `.security-*` because that would overlap with the tokens index pattern - .setIndexPattern(".security-[0-9]+*") - .setPrimaryIndex(MAIN_INDEX_CONCRETE_NAME) - .setDescription("Contains Security configuration") - .setMappings(getMainIndexMappings()) - .setSettings(getMainIndexSettings()) - .setAliasName(SECURITY_MAIN_ALIAS) - .setIndexFormat(INTERNAL_MAIN_INDEX_FORMAT) - .setVersionMetaKey(SECURITY_VERSION_STRING) - .setOrigin(SECURITY_ORIGIN) - .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS) + final Function securityIndexDescriptorBuilder = + mappingVersion -> SystemIndexDescriptor.builder() + // This can't just be `.security-*` because that would overlap with the tokens index pattern + .setIndexPattern(".security-[0-9]+*") + .setPrimaryIndex(MAIN_INDEX_CONCRETE_NAME) + .setDescription("Contains Security configuration") + .setMappings(getMainIndexMappings(mappingVersion)) + .setSettings(getMainIndexSettings()) + .setAliasName(SECURITY_MAIN_ALIAS) + .setIndexFormat(INTERNAL_MAIN_INDEX_FORMAT) + .setVersionMetaKey(SECURITY_VERSION_STRING) + .setOrigin(SECURITY_ORIGIN) + .setThreadPools(ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS); + + return securityIndexDescriptorBuilder.apply(SecurityMainIndexMappingVersion.latest()) + .setPriorSystemIndexDescriptors(List.of(securityIndexDescriptorBuilder.apply(SecurityMainIndexMappingVersion.INITIAL).build())) .build(); } @@ -149,14 +156,14 @@ private static Settings getMainIndexSettings() { .build(); } - private XContentBuilder getMainIndexMappings() { + private XContentBuilder getMainIndexMappings(SecurityMainIndexMappingVersion mappingVersion) { try { final XContentBuilder builder = jsonBuilder(); builder.startObject(); { builder.startObject("_meta"); builder.field(SECURITY_VERSION_STRING, BWC_MAPPINGS_VERSION); // Only needed for BWC with pre-8.15.0 nodes - builder.field(SystemIndexDescriptor.VERSION_META_KEY, INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT); + builder.field(SystemIndexDescriptor.VERSION_META_KEY, mappingVersion.id); builder.endObject(); builder.field("dynamic", "strict"); @@ -304,22 +311,24 @@ private XContentBuilder getMainIndexMappings() { } builder.endObject(); - builder.startObject("remote_cluster"); - { - builder.field("type", "object"); - builder.startObject("properties"); + if (mappingVersion.onOrAfter(SecurityMainIndexMappingVersion.ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS)) { + builder.startObject("remote_cluster"); { - builder.startObject("clusters"); - builder.field("type", "keyword"); - builder.endObject(); + builder.field("type", "object"); + builder.startObject("properties"); + { + builder.startObject("clusters"); + builder.field("type", "keyword"); + builder.endObject(); - builder.startObject("privileges"); - builder.field("type", "keyword"); + builder.startObject("privileges"); + builder.field("type", "keyword"); + builder.endObject(); + } builder.endObject(); } builder.endObject(); } - builder.endObject(); builder.startObject("applications"); { @@ -402,6 +411,12 @@ private XContentBuilder getMainIndexMappings() { builder.field("type", "keyword"); builder.endObject(); + if (mappingVersion.onOrAfter(SecurityMainIndexMappingVersion.ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS)) { + builder.startObject("description"); + builder.field("type", "text"); + builder.endObject(); + } + builder.startObject("run_as"); builder.field("type", "keyword"); builder.endObject(); @@ -1010,4 +1025,46 @@ private static void defineRealmDomain(XContentBuilder builder, String fieldName) builder.endObject(); } + /** + * Every change to the mapping of .security index must be versioned. When adding a new mapping version: + *
    + *
  • pick the next largest version ID - this will automatically become the new {@link #latest()} version
  • + *
  • add your mapping change in {@link #getMainIndexMappings(SecurityMainIndexMappingVersion)} conditionally to a new version
  • + *
  • make sure to set old latest version to "prior system index descriptors" in {@link #getSecurityMainIndexDescriptor()}
  • + *
+ */ + public enum SecurityMainIndexMappingVersion implements VersionId { + + /** + * Initial .security index mapping version. + */ + INITIAL(1), + + /** + * The mapping was changed to add new text description and remote_cluster fields. + */ + ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS(2), + + ; + + private static final SecurityMainIndexMappingVersion LATEST = Arrays.stream(values()) + .max(Comparator.comparingInt(v -> v.id)) + .orElseThrow(); + + private final int id; + + SecurityMainIndexMappingVersion(int id) { + assert id > 0; + this.id = id; + } + + @Override + public int id() { + return id; + } + + public static SecurityMainIndexMappingVersion latest() { + return LATEST; + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java index e8eb50e3a6529..a7014ece93ae5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java @@ -212,7 +212,7 @@ private Map getRoleDescriptors(String roleParameter) thr XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); final String roleName = parser.currentName(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - final RoleDescriptor role = RoleDescriptor.parserBuilder().build().parse(roleName, parser); + final RoleDescriptor role = RoleDescriptor.parserBuilder().allowDescription(true).build().parse(roleName, parser); roles.put(roleName, role); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 107f7c0632ea7..7752b85c6345c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -103,7 +103,7 @@ import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContextSerializer; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; @@ -1857,6 +1857,7 @@ public void testApiKeyDocCache() throws IOException, ExecutionException, Interru ApiKeyCredentials apiKeyCredentials3 = getApiKeyCredentials(docId3, apiKey3, type); final List keyRoles = List.of( RoleDescriptor.parserBuilder() + .allowRestriction(true) .allow2xFormat(true) .build() .parse("key-role", new BytesArray("{\"cluster\":[\"monitor\"]}"), XContentType.JSON) @@ -2348,12 +2349,12 @@ public void testMaybeBuildUpdatedDocument() throws IOException { final ApiKey.Type type = randomFrom(ApiKey.Type.values()); final Set oldUserRoles = type == ApiKey.Type.CROSS_CLUSTER ? Set.of() - : randomSet(0, 3, RoleDescriptorTests::randomRoleDescriptor); + : randomSet(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()); final List oldKeyRoles; if (type == ApiKey.Type.CROSS_CLUSTER) { oldKeyRoles = List.of(CrossClusterApiKeyRoleDescriptorBuilder.parse(randomCrossClusterApiKeyAccessField()).build()); } else { - oldKeyRoles = randomList(3, RoleDescriptorTests::randomRoleDescriptor); + oldKeyRoles = randomList(3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()); } final long now = randomMillisUpToYear9999(); when(clock.instant()).thenReturn(Instant.ofEpochMilli(now)); @@ -2388,7 +2389,10 @@ public void testMaybeBuildUpdatedDocument() throws IOException { final boolean changeExpiration = randomBoolean(); final Set newUserRoles = changeUserRoles - ? randomValueOtherThan(oldUserRoles, () -> randomSet(0, 3, RoleDescriptorTests::randomRoleDescriptor)) + ? randomValueOtherThan( + oldUserRoles, + () -> randomSet(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()) + ) : oldUserRoles; final List newKeyRoles; if (changeKeyRoles) { @@ -2401,7 +2405,10 @@ public void testMaybeBuildUpdatedDocument() throws IOException { } }); } else { - newKeyRoles = randomValueOtherThan(oldKeyRoles, () -> randomList(0, 3, RoleDescriptorTests::randomRoleDescriptor)); + newKeyRoles = randomValueOtherThan( + oldKeyRoles, + () -> randomList(0, 3, () -> RoleDescriptorTestHelper.builder().allowReservedMetadata(true).build()) + ); } } else { newKeyRoles = randomBoolean() ? oldKeyRoles : null; @@ -2582,7 +2589,16 @@ public void testGetApiKeyMetadata() throws IOException { public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( - randomList(2, 5, () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), randomBoolean(), false)) + randomList( + 2, + 5, + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowRemoteClusters(false) + .build() + ) ); // Selecting random unsupported version. @@ -2615,11 +2631,7 @@ public void testMaybeRemoveRemoteIndicesPrivilegesWithUnsupportedVersion() { public void testMaybeRemoveRemoteClusterPrivilegesWithUnsupportedVersion() { final String apiKeyId = randomAlphaOfLengthBetween(5, 8); final Set userRoleDescriptors = Set.copyOf( - randomList( - 2, - 5, - () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()) - ) + randomList(2, 5, () -> RoleDescriptorTestHelper.builder().allowRemoteClusters(true).build()) ); // Selecting random unsupported version. @@ -2931,7 +2943,12 @@ public void testValidateOwnerUserRoleDescriptorsWithWorkflowsRestriction() { final List requestRoleDescriptors = randomList( 0, 1, - () -> RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), false, randomBoolean(), false) + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(false) + .allowRestriction(randomBoolean()) + .allowRemoteClusters(false) + .build() ); final AbstractCreateApiKeyRequest createRequest = mock(AbstractCreateApiKeyRequest.class); @@ -2959,34 +2976,23 @@ private static RoleDescriptor randomRoleDescriptorWithRemotePrivileges() { return new RoleDescriptor( randomAlphaOfLengthBetween(3, 90), randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3), - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + RoleDescriptorTestHelper.randomIndicesPrivileges(0, 3), + RoleDescriptorTestHelper.randomApplicationPrivileges(), + RoleDescriptorTestHelper.randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(randomBoolean()), + RoleDescriptorTestHelper.randomRoleDescriptorMetadata(randomBoolean()), Map.of(), - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 3), + RoleDescriptorTestHelper.randomRemoteIndicesPrivileges(1, 3), new RemoteClusterPermissions().addGroup( new RemoteClusterPermissionGroup(new String[] { "monitor_enrich" }, new String[] { "*" }) ), - RoleRestrictionTests.randomWorkflowsRestriction(1, 3) + RoleRestrictionTests.randomWorkflowsRestriction(1, 3), + randomAlphaOfLengthBetween(0, 10) ); } private static RoleDescriptor randomRoleDescriptorWithWorkflowsRestriction() { - return new RoleDescriptor( - randomAlphaOfLengthBetween(3, 90), - randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3), - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), - generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(randomBoolean()), - Map.of(), - null, - null, - RoleRestrictionTests.randomWorkflowsRestriction(1, 3) - ); + return RoleDescriptorTestHelper.builder().allowReservedMetadata(true).allowRestriction(true).allowRemoteIndices(false).build(); } public static String randomCrossClusterApiKeyAccessField() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java index 20555ced32bd7..7219561dcf9df 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessAuthenticationServiceTests.java @@ -164,7 +164,7 @@ public void testExceptionProcessingRequestOnInvalidCrossClusterAccessSubjectInfo // Invalid internal user AuthenticationTestHelper.builder().internal(InternalUsers.XPACK_USER).build(), new RoleDescriptorsIntersection( - new RoleDescriptor("invalid_role", new String[] { "all" }, null, null, null, null, null, null, null, null, null) + new RoleDescriptor("invalid_role", new String[] { "all" }, null, null, null, null, null, null, null, null, null, null) ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java index 664eec036832a..f567057d5b410 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/CrossClusterAccessHeadersTests.java @@ -19,7 +19,7 @@ import java.util.Base64; import java.util.Set; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY; import static org.hamcrest.Matchers.equalTo; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java index 08628c1a5f5af..501c0bee36264 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceIntegTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; @@ -74,7 +74,8 @@ public void testGetRoleDescriptorsIntersectionForRemoteCluster() throws IOExcept .build(), randomNonEmptySubsetOf(List.of(concreteClusterAlias, "*")).toArray(new String[0]) ) }, - null, // TODO: add tests here + null, + null, null ) ); @@ -133,7 +134,13 @@ public void testCrossClusterAccessWithInvalidRoleDescriptors() { new RoleDescriptorsIntersection( randomValueOtherThanMany( rd -> false == rd.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), - () -> RoleDescriptorTests.randomRoleDescriptor() + () -> RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java index 1923d4d86dc71..d71c2b0d19074 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -74,7 +74,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.permission.ApplicationPermission; import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; @@ -199,7 +199,13 @@ public void testResolveAuthorizationInfoForEmptyRestrictedRolesWithAuthenticatio @SuppressWarnings("unchecked") final var listener = (ActionListener>) invocation.getArgument(1); final Supplier randomRoleSupplier = () -> Role.buildFromRoleDescriptor( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), false, randomBoolean(), false), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(false) + .allowRestriction(randomBoolean()) + .allowDescription(randomBoolean()) + .allowRemoteClusters(false) + .build(), new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES, List.of() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index e039f0c66eaeb..fd32bde0f3c53 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -64,7 +64,7 @@ import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptorsIntersection; import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetBitsetCache; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; @@ -959,7 +959,8 @@ public ClusterPermission.Builder buildPermission(ClusterPermission.Builder build RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*", "remote").indices("abc-*", "xyz-*").privileges("read").build(), RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*").indices("remote-idx-1-*").privileges("read").build(), }, getValidRemoteClusterPermissions(new String[] { "remote-*" }), - null + null, + randomAlphaOfLengthBetween(0, 20) ); ConfigurableClusterPrivilege ccp2 = new MockConfigurableClusterPrivilege() { @@ -988,7 +989,8 @@ public ClusterPermission.Builder buildPermission(ClusterPermission.Builder build RoleDescriptor.RemoteIndicesPrivileges.builder("*").indices("remote-idx-2-*").privileges("read").build(), RoleDescriptor.RemoteIndicesPrivileges.builder("remote-*").indices("remote-idx-3-*").privileges("read").build() }, null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); FieldPermissionsCache cache = new FieldPermissionsCache(Settings.EMPTY); @@ -1100,7 +1102,15 @@ public void testBuildRoleWithSingleRemoteClusterDefinition() { } public void testBuildRoleFromDescriptorsWithSingleRestriction() { - Role role = buildRole(RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean())); + Role role = buildRole( + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() + ); assertThat(role.hasWorkflowsRestriction(), equalTo(true)); } @@ -1108,8 +1118,20 @@ public void testBuildRoleFromDescriptorsWithViolationOfRestrictionValidation() { var e = expectThrows( IllegalArgumentException.class, () -> buildRole( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()) + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ); assertThat(e.getMessage(), containsString("more than one role descriptor with restriction is not allowed")); @@ -1117,9 +1139,27 @@ public void testBuildRoleFromDescriptorsWithViolationOfRestrictionValidation() { e = expectThrows( IllegalArgumentException.class, () -> buildRole( - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), true, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), false, randomBoolean()), - RoleDescriptorTests.randomRoleDescriptor(randomBoolean(), randomBoolean(), false, randomBoolean()) + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(true) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(false) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build(), + RoleDescriptorTestHelper.builder() + .allowReservedMetadata(randomBoolean()) + .allowRemoteIndices(randomBoolean()) + .allowRestriction(false) + .allowDescription(randomBoolean()) + .allowRemoteClusters(randomBoolean()) + .build() ) ); assertThat(e.getMessage(), containsString("combining role descriptors with and without restriction is not allowed")); @@ -2145,6 +2185,7 @@ public void testGetRoleForCrossClusterAccessAuthentication() throws Exception { null, null, null, + null, null ) ) @@ -3089,11 +3130,11 @@ private RoleDescriptor roleDescriptorWithIndicesPrivileges( final RoleDescriptor.RemoteIndicesPrivileges[] rips, final IndicesPrivileges[] ips ) { - return new RoleDescriptor(name, null, ips, null, null, null, null, null, rips, null, null); + return new RoleDescriptor(name, null, ips, null, null, null, null, null, rips, null, null, null); } private RoleDescriptor roleDescriptorWithRemoteClusterPrivileges(final String name, RemoteClusterPermissions remoteClusterPermissions) { - return new RoleDescriptor(name, null, null, null, null, null, null, null, null, remoteClusterPermissions, null); + return new RoleDescriptor(name, null, null, null, null, null, null, null, null, remoteClusterPermissions, null, null); } private RemoteClusterPermissions getValidRemoteClusterPermissions(String[] aliases) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 3d30a3534d422..0a2c40d2a257a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -110,7 +110,7 @@ public void testParseFile() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); RoleDescriptor descriptor = roles.get("role1"); assertNotNull(descriptor); @@ -286,6 +286,18 @@ public void testParseFile() throws Exception { assertThat(group.getQuery(), notNullValue()); assertThat(roles.get("role_query_invalid"), nullValue()); + + descriptor = roles.get("role_with_description"); + assertNotNull(descriptor); + assertThat(descriptor.getDescription(), is(equalTo("Allows all security-related operations!"))); + role = Role.buildFromRoleDescriptor(descriptor, new FieldPermissionsCache(Settings.EMPTY), restrictedIndices); + assertThat(role, notNullValue()); + assertThat(role.names(), equalTo(new String[] { "role_with_description" })); + assertThat(role.cluster(), notNullValue()); + assertThat(role.cluster().privileges(), equalTo(Set.of(ClusterPrivilegeResolver.MANAGE_SECURITY))); + assertThat(role.indices(), is(IndicesPermission.NONE)); + assertThat(role.runAs(), is(RunAsPermission.NONE)); + } public void testParseFileWithRemoteIndicesAndCluster() throws IllegalAccessException, IOException { @@ -395,7 +407,7 @@ public void testParseFileWithFLSAndDLSDisabled() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(7)); + assertThat(roles.size(), is(8)); assertThat(roles.get("role_fields"), nullValue()); assertThat(roles.get("role_query"), nullValue()); assertThat(roles.get("role_query_fields"), nullValue()); @@ -452,7 +464,7 @@ public void testParseFileWithFLSAndDLSUnlicensed() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); assertNotNull(roles.get("role_fields")); assertNotNull(roles.get("role_query")); assertNotNull(roles.get("role_query_fields")); @@ -664,7 +676,7 @@ public void testThatInvalidRoleDefinitions() throws Exception { assertThat(role, notNullValue()); assertThat(role.names(), equalTo(new String[] { "valid_role" })); - assertThat(entries, hasSize(7)); + assertThat(entries, hasSize(8)); assertThat( entries.get(0), startsWith("invalid role definition [fóóbár] in roles file [" + path.toAbsolutePath() + "]. invalid role name") @@ -675,6 +687,10 @@ public void testThatInvalidRoleDefinitions() throws Exception { assertThat(entries.get(4), startsWith("failed to parse role [role4]")); assertThat(entries.get(5), startsWith("failed to parse indices privileges for role [role5]")); assertThat(entries.get(6), startsWith("failed to parse role [role6]. unexpected field [restriction]")); + assertThat( + entries.get(7), + startsWith("invalid role definition [role7] in roles file [" + path.toAbsolutePath() + "]. invalid description") + ); } public void testThatRoleNamesDoesNotResolvePermissions() throws Exception { @@ -683,8 +699,8 @@ public void testThatRoleNamesDoesNotResolvePermissions() throws Exception { List events = CapturingLogger.output(logger.getName(), Level.ERROR); events.clear(); Set roleNames = FileRolesStore.parseFileForRoleNames(path, logger); - assertThat(roleNames.size(), is(7)); - assertThat(roleNames, containsInAnyOrder("valid_role", "role1", "role2", "role3", "role4", "role5", "role6")); + assertThat(roleNames.size(), is(8)); + assertThat(roleNames, containsInAnyOrder("valid_role", "role1", "role2", "role3", "role4", "role5", "role6", "role7")); assertThat(events, hasSize(1)); assertThat( @@ -746,7 +762,7 @@ public void testUsageStats() throws Exception { Map usageStats = store.usageStats(); - assertThat(usageStats.get("size"), is(flsDlsEnabled ? 10 : 7)); + assertThat(usageStats.get("size"), is(flsDlsEnabled ? 11 : 8)); assertThat(usageStats.get("remote_indices"), is(1L)); assertThat(usageStats.get("remote_cluster"), is(1L)); assertThat(usageStats.get("fls"), is(flsDlsEnabled)); @@ -781,7 +797,7 @@ public void testExists() throws Exception { new FileRoleValidator.Default() ); assertThat(roles, notNullValue()); - assertThat(roles.size(), is(10)); + assertThat(roles.size(), is(11)); for (var role : roles.keySet()) { assertThat(store.exists(role), is(true)); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 35591f99727f2..9d83d5f5c60ed 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; @@ -76,6 +75,10 @@ import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomClusterPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -130,14 +133,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().privileges("READ").indices("*").grantedFields("*").deniedFields("foo").build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(flsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -147,14 +151,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { "dls", randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").query(matchAllBytes).build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(dlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -169,14 +174,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { .deniedFields("foo") .query(matchAllBytes) .build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(flsDlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -184,14 +190,15 @@ public void testRoleDescriptorWithFlsDlsLicensing() throws IOException { "no_fls_dls", randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - null + null, + randomAlphaOfLengthBetween(0, 20) ); assertFalse(noFlsDlsRole.getTransientMetadata().containsKey("unlicensed_features")); @@ -281,14 +288,15 @@ public void testTransformingRoleWithRestrictionFails() throws IOException { : "{ \"match_all\": {} }" ) .build() }, - RoleDescriptorTests.randomApplicationPrivileges(), - RoleDescriptorTests.randomClusterPrivileges(), + randomApplicationPrivileges(), + randomClusterPrivileges(), generateRandomStringArray(5, randomIntBetween(2, 8), true, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), null, - RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 2), + randomRemoteIndicesPrivileges(1, 2), null, - RoleRestrictionTests.randomWorkflowsRestriction(1, 2) + RoleRestrictionTests.randomWorkflowsRestriction(1, 2), + randomAlphaOfLengthBetween(0, 20) ); XContentBuilder builder = roleWithRestriction.toXContent( @@ -463,6 +471,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final null, remoteIndicesPrivileges, remoteClusterPermissions, + null, null ); PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index ca974e4e1e723..f076dc24e5d5b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -1483,6 +1483,7 @@ private static ApiKey createApiKeyForOwner(String apiKeyId, String username, Str null, null, null, + null, null ) ), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index 810ef4056fd99..577a8eb9f698e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -42,8 +42,8 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomCrossClusterAccessRoleDescriptor; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.is; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java index 8849edca70d68..6b60336276c35 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry.CacheInvalidator; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; import org.junit.Before; import java.time.Instant; import java.util.List; import java.util.Set; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT; import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -61,7 +61,7 @@ public void testSecurityIndexStateChangeWillInvalidateAllRegisteredInvalidators( true, true, true, - new SystemIndexDescriptor.MappingsVersion(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT, 0), + new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0), ".security", ClusterHealthStatus.GREEN, IndexMetadata.State.OPEN, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 2abeeb3fa040b..a7c5c616cf5bf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.hamcrest.Matchers; import org.junit.Before; @@ -63,7 +64,6 @@ import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -389,7 +389,10 @@ public void testCanUpdateIndexMappings() { // Ensure that the mappings for the index are out-of-date, so that the security index manager will // attempt to update them. - int previousVersion = INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT - 1; + int previousVersion = randomValueOtherThanMany( + v -> v.onOrAfter(SecurityMainIndexMappingVersion.latest()), + () -> randomFrom(SecurityMainIndexMappingVersion.values()) + ).id(); // State recovered with index, with mappings with a prior version ClusterState.Builder clusterStateBuilder = createClusterState( @@ -419,11 +422,15 @@ public void testCannotUpdateIndexMappingsWhenMinMappingVersionTooLow() { // Hard-code a failure here. doReturn("Nope").when(descriptorSpy).getMinimumMappingsVersionMessage(anyString()); - doReturn(null).when(descriptorSpy).getDescriptorCompatibleWith(eq(new SystemIndexDescriptor.MappingsVersion(1, 0))); + doReturn(null).when(descriptorSpy) + .getDescriptorCompatibleWith(eq(new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0))); // Ensure that the mappings for the index are out-of-date, so that the security index manager will // attempt to update them. - int previousVersion = INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT - 1; + int previousVersion = randomValueOtherThanMany( + v -> v.onOrAfter(SecurityMainIndexMappingVersion.latest()), + () -> randomFrom(SecurityMainIndexMappingVersion.values()) + ).id(); ClusterState.Builder clusterStateBuilder = createClusterState( TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7, @@ -457,7 +464,7 @@ public void testNoUpdateWhenIndexMappingsVersionNotBumped() { SecuritySystemIndices.SECURITY_MAIN_ALIAS, SecuritySystemIndices.INTERNAL_MAIN_INDEX_FORMAT, IndexMetadata.State.OPEN, - getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT) + getMappings(SecurityMainIndexMappingVersion.latest().id()) ); manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); manager.prepareIndexIfNeededThenExecute(prepareException::set, () -> prepareRunnableCalled.set(true)); @@ -480,7 +487,7 @@ public void testNoUpdateWhenNoIndexMappingsVersionInClusterState() { SecuritySystemIndices.SECURITY_MAIN_ALIAS, SecuritySystemIndices.INTERNAL_MAIN_INDEX_FORMAT, IndexMetadata.State.OPEN, - getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT), + getMappings(SecurityMainIndexMappingVersion.latest().id()), Map.of() ); manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); @@ -628,7 +635,7 @@ private static ClusterState.Builder createClusterState( format, state, mappings, - Map.of(indexName, new SystemIndexDescriptor.MappingsVersion(1, 0)) + Map.of(indexName, new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0)) ); } @@ -689,7 +696,7 @@ private static IndexMetadata.Builder getIndexMetadata( } private static String getMappings() { - return getMappings(INTERNAL_MAIN_INDEX_MAPPINGS_FORMAT); + return getMappings(SecurityMainIndexMappingVersion.latest().id()); } private static String getMappings(Integer version) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java new file mode 100644 index 0000000000000..7550b96fdf4f9 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMainIndexMappingVersionTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; + +import java.util.HashMap; +import java.util.Map; + +public class SecurityMainIndexMappingVersionTests extends ESTestCase { + + public void testVersionIdUniqueness() { + Map ids = new HashMap<>(); + for (var version : SecurityMainIndexMappingVersion.values()) { + var existing = ids.put(version.id(), version); + if (existing != null) { + fail( + "duplicate ID [" + + version.id() + + "] definition found in SecurityMainIndexMappingVersion for [" + + version + + "] and [" + + existing + + "]" + ); + } + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index 473cf5ee387b8..00f170a4cf8d8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -88,7 +88,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_PROFILE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomUniquelyNamedRoleDescriptors; import static org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml index 21e9d87189cf0..fa0addce53035 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml @@ -58,3 +58,6 @@ role6: workflows: - workflow1 - workflow2 +role7: + description: + "tJywjBJUSwXDiRtpoJxEotFupzVVUIfwnoFMFiTwRoFiURksYxmQOaoykJIYwFvNpiGnfFePFUrCPTEbDXPkXQudrpBikHSQmdqvNjxXvktEghvvIQuzZitqwKjmnQvqlDfqYXSccRiqEslDdkjdcXPmSSggJMqrXmkdNtwBItbjLpHdNPuSgVYLwcBCblGHysaXJFcZHLFbqhirxNGTkENBMpzTXjsMXwSEnqKUZtDSckxGUyFfKXCvumgJkjLrrBvSxjnanuHpmXzUlFGEHqqxJjAstxSGKnPPzzsuZAlsrLTAzAdpBOnLDMdOBDyAweiCLzIvyfwuTWcOMGRWItPUdEdqcLjlYRhOgpTuWsDQcrCYnlIuiEpBodlGwaCDYnppZWmBDMyQCSPSTCwjilXtqmTuwuxwfyCNLbqNWjzKOPhEPsKjuvNpexRhleNgMqrDpmhWOZzRZMDnLYIjNJZKdsgErOoVuyUlJAKnJlpevIZUjXDIyybxXaaFGztppkpMAOVLFHjbiJuGVDdpyBHwxlyvPJOgVeViYZNiKEOWmaIypbuWenBnYRvSdYiHHaSLwuNILDIrAqoNBiFBdMhuLvTKOkepMYFcbXpYqLWYmtPYIVXGfHPUgmYhhsfIatqwhhnefxfTeqqUlVLmLcNAjiBFiiCRfiQvtvWOWJyfATrUeCVNfquIXHzHQWPWtbpeTiYTUvEPQWeeTjKpHrycLmKpsWjCLteqlutXgaeLSAvDvbvrlJZyAWflVnuzdcNxtzfcEocKsoJGOfjKXyQlxapPvOyDZYbvHYoYljYHTrEVPbMOQuwMxKPYkbyEDJuMqOtfgqVHZpsaimFmQjTlAdNOwtDTJdJhZVzgpVTWZCJRBopvQZgbIzPEJOoCVlYRhLDRARxmlrxrAMApKaZxfiMDyhMVZKXCankStqBfYSYOmtYMvkARtngxNINwAehRhDNMZoZuGTylxteKhLqFVKudMuSCpRfCxjNsanWHVvghUJYpcxildbvAhgpU" diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml index cb956ff970800..ec0d325566127 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml @@ -92,3 +92,9 @@ role_remote: - 'remote-*' privileges: - "monitor_enrich" + +role_with_description: + description: + "Allows all security-related operations!" + cluster: + - manage_security diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml index edc79a8ebfc9e..db4ea4e8b205d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml @@ -29,7 +29,10 @@ teardown: security.delete_role: name: "backwards_role" ignore: 404 - + - do: + security.delete_role: + name: "role_with_description" + ignore: 404 --- "Test put role api": - do: @@ -83,3 +86,21 @@ teardown: - match: { admin_role.metadata.key2: "val2" } - match: { admin_role.indices.0.names.0: "*" } - match: { admin_role.indices.0.privileges.0: "all" } + + - do: + security.put_role: + name: "role_with_description" + body: > + { + "description": "Allows all security-related operations such as CRUD operations on users and roles and cache clearing.", + "cluster": ["manage_security"] + } + - match: { role: { created: true } } + + - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" + security.get_role: + name: "role_with_description" + - match: { role_with_description.cluster.0: "manage_security" } + - match: { role_with_description.description: "Allows all security-related operations such as CRUD operations on users and roles and cache clearing." } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 84c8b0bd95b4f..8a775c7f7d3d8 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.test.SecuritySettingsSourceField; @@ -44,6 +43,11 @@ import java.util.function.Consumer; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -420,16 +424,15 @@ private static RoleDescriptor randomRoleDescriptor(boolean includeRemoteDescript return new RoleDescriptor( randomAlphaOfLengthBetween(3, 90), randomSubsetOf(Set.of("all", "monitor", "none")).toArray(String[]::new), - RoleDescriptorTests.randomIndicesPrivileges(0, 3, excludedPrivileges), - RoleDescriptorTests.randomApplicationPrivileges(), + randomIndicesPrivileges(0, 3, excludedPrivileges), + randomApplicationPrivileges(), null, generateRandomStringArray(5, randomIntBetween(2, 8), false, true), - RoleDescriptorTests.randomRoleDescriptorMetadata(false), + randomRoleDescriptorMetadata(false), Map.of(), - includeRemoteDescriptors ? RoleDescriptorTests.randomRemoteIndicesPrivileges(1, 3, excludedPrivileges) : null, - includeRemoteDescriptors - ? RoleDescriptorTests.randomRemoteClusterPermissions(randomIntBetween(1, 3)) - : RemoteClusterPermissions.NONE, + includeRemoteDescriptors ? randomRemoteIndicesPrivileges(1, 3, excludedPrivileges) : null, + includeRemoteDescriptors ? randomRemoteClusterPermissions(randomIntBetween(1, 3)) : RemoteClusterPermissions.NONE, + null, null ); } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java new file mode 100644 index 0000000000000..4f4ff1d5743ee --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RolesBackwardsCompatibilityIT.java @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.upgrades; + +import org.apache.http.HttpHost; +import org.elasticsearch.Build; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.XContentTestUtils; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; +import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRoleDescriptorMetadata; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class RolesBackwardsCompatibilityIT extends AbstractUpgradeTestCase { + + private RestClient oldVersionClient = null; + private RestClient newVersionClient = null; + + public void testCreatingAndUpdatingRoles() throws Exception { + assumeTrue( + "The role description is supported after transport version: " + TransportVersions.SECURITY_ROLE_DESCRIPTION, + minimumTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION) + ); + switch (CLUSTER_TYPE) { + case OLD -> { + // Creating role in "old" cluster should succeed when description is not provided + final String initialRole = randomRoleDescriptorSerialized(false); + createRole(client(), "my-old-role", initialRole); + updateRole("my-old-role", randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(false))); + + // and fail if we include description + var createException = expectThrows( + Exception.class, + () -> createRole(client(), "my-invalid-old-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + createException.getMessage(), + containsString("failed to parse role [my-invalid-old-role]. unexpected field [description]") + ); + + RestClient client = client(); + var updateException = expectThrows( + Exception.class, + () -> updateRole(client, "my-old-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + updateException.getMessage(), + containsString("failed to parse role [my-old-role]. unexpected field [description]") + ); + } + case MIXED -> { + try { + this.createClientsByVersion(); + // succeed when role description is not provided + final String initialRole = randomRoleDescriptorSerialized(false); + createRole(client(), "my-valid-mixed-role", initialRole); + updateRole("my-valid-mixed-role", randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(false))); + + // against old node, fail when description is provided either in update or create request + { + Exception e = expectThrows( + Exception.class, + () -> updateRole(oldVersionClient, "my-valid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + allOf(containsString("failed to parse role"), containsString("unexpected field [description]")) + ); + } + { + Exception e = expectThrows( + Exception.class, + () -> createRole(oldVersionClient, "my-invalid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString("failed to parse role [my-invalid-mixed-role]. unexpected field [description]") + ); + } + + // and against new node in a mixed cluster we should fail + { + Exception e = expectThrows( + Exception.class, + () -> createRole(newVersionClient, "my-invalid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } + { + Exception e = expectThrows( + Exception.class, + () -> updateRole(newVersionClient, "my-valid-mixed-role", randomRoleDescriptorSerialized(true)) + ); + assertThat( + e.getMessage(), + containsString( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" + ) + ); + } + } finally { + this.closeClientsByVersion(); + } + } + case UPGRADED -> { + // on upgraded cluster which supports new description field + // create/update requests should succeed either way (with or without description) + final String initialRole = randomRoleDescriptorSerialized(randomBoolean()); + createRole(client(), "my-valid-upgraded-role", initialRole); + updateRole( + "my-valid-upgraded-role", + randomValueOtherThan(initialRole, () -> randomRoleDescriptorSerialized(randomBoolean())) + ); + } + } + } + + private void createRole(RestClient client, String roleName, String role) throws IOException { + final Request createRoleRequest = new Request("POST", "_security/role/" + roleName); + createRoleRequest.setJsonEntity(role); + var createRoleResponse = client.performRequest(createRoleRequest); + assertOK(createRoleResponse); + } + + private void updateRole(String roleName, String payload) throws IOException { + updateRole(client(), roleName, payload); + } + + private void updateRole(RestClient client, String roleName, String payload) throws IOException { + final Request updateRequest = new Request("PUT", "_security/role/" + roleName); + updateRequest.setJsonEntity(payload); + boolean created = assertOKAndCreateObjectPath(client.performRequest(updateRequest)).evaluate("role.created"); + assertThat(created, equalTo(false)); + } + + private static String randomRoleDescriptorSerialized(boolean includeDescription) { + try { + return XContentTestUtils.convertToXContent( + XContentTestUtils.convertToMap(randomRoleDescriptor(includeDescription)), + XContentType.JSON + ).utf8ToString(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private boolean nodeSupportRoleDescription(Map nodeDetails) { + String nodeVersionString = (String) nodeDetails.get("version"); + TransportVersion transportVersion = getTransportVersionWithFallback( + nodeVersionString, + nodeDetails.get("transport_version"), + () -> TransportVersions.ZERO + ); + + if (transportVersion.equals(TransportVersions.ZERO)) { + // In cases where we were not able to find a TransportVersion, a pre-8.8.0 node answered about a newer (upgraded) node. + // In that case, the node will be current (upgraded), and remote indices are supported for sure. + var nodeIsCurrent = nodeVersionString.equals(Build.current().version()); + assertTrue(nodeIsCurrent); + return true; + } + return transportVersion.onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION); + } + + private void createClientsByVersion() throws IOException { + var clientsByCapability = getRestClientByCapability(); + if (clientsByCapability.size() == 2) { + for (Map.Entry client : clientsByCapability.entrySet()) { + if (client.getKey() == false) { + oldVersionClient = client.getValue(); + } else { + newVersionClient = client.getValue(); + } + } + assertThat(oldVersionClient, notNullValue()); + assertThat(newVersionClient, notNullValue()); + } else { + fail("expected 2 versions during rolling upgrade but got: " + clientsByCapability.size()); + } + } + + private void closeClientsByVersion() throws IOException { + if (oldVersionClient != null) { + oldVersionClient.close(); + oldVersionClient = null; + } + if (newVersionClient != null) { + newVersionClient.close(); + newVersionClient = null; + } + } + + @SuppressWarnings("unchecked") + private Map getRestClientByCapability() throws IOException { + Response response = client().performRequest(new Request("GET", "_nodes")); + assertOK(response); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + Map nodesAsMap = objectPath.evaluate("nodes"); + Map> hostsByCapability = new HashMap<>(); + for (Map.Entry entry : nodesAsMap.entrySet()) { + Map nodeDetails = (Map) entry.getValue(); + var capabilitySupported = nodeSupportRoleDescription(nodeDetails); + Map httpInfo = (Map) nodeDetails.get("http"); + hostsByCapability.computeIfAbsent(capabilitySupported, k -> new ArrayList<>()) + .add(HttpHost.create((String) httpInfo.get("publish_address"))); + } + Map clientsByCapability = new HashMap<>(); + for (var entry : hostsByCapability.entrySet()) { + clientsByCapability.put(entry.getKey(), buildClient(restClientSettings(), entry.getValue().toArray(new HttpHost[0]))); + } + return clientsByCapability; + } + + private static RoleDescriptor randomRoleDescriptor(boolean includeDescription) { + final Set excludedPrivileges = Set.of( + "cross_cluster_replication", + "cross_cluster_replication_internal", + "manage_data_stream_lifecycle" + ); + return new RoleDescriptor( + randomAlphaOfLengthBetween(3, 90), + randomSubsetOf(Set.of("all", "monitor", "none")).toArray(String[]::new), + randomIndicesPrivileges(0, 3, excludedPrivileges), + randomApplicationPrivileges(), + null, + generateRandomStringArray(5, randomIntBetween(2, 8), false, true), + randomRoleDescriptorMetadata(false), + Map.of(), + null, + null, + null, + includeDescription ? randomAlphaOfLength(20) : null + ); + } +} From 3c56eae070e0985f23c6ac0dda48104d000d6124 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 8 May 2024 10:39:55 +0100 Subject: [PATCH 055/117] Avoid closing shard under ICSS mutex (#108244) When a shard fails locally we remove it from the `IndicesService` and close it on a `GENERIC` thread, but do so while synchronized on the `IndicesClusterStateService`. With this commit we dispatch the work to close the shard into the background in order to release the mutex sooner. Relates #108145 --- .../cluster/IndicesClusterStateService.java | 45 ++++++++++--------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index fa2475921aa93..c682c44b47bab 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -1067,27 +1067,30 @@ public void accept(final IndexShard.ShardFailure shardFailure) { final ShardRouting shardRouting = shardFailure.routing(); threadPool.generic().execute(() -> { synchronized (IndicesClusterStateService.this) { - try { - CloseUtils.executeDirectly( - l -> failAndRemoveShard( - shardRouting, - true, - "shard failure, reason [" + shardFailure.reason() + "]", - shardFailure.cause(), - clusterService.state(), - EsExecutors.DIRECT_EXECUTOR_SERVICE /* NB holding mutex while closing shard, ES-8334 TODO revisit this? */, - l - ) - ); - } catch (Exception e) { - // should not be possible - final var wrappedException = new IllegalStateException( - "unexpected failure in FailedShardHandler on " + shardRouting, - e - ); - logger.error(wrappedException.getMessage(), e); - assert false : e; - } + ActionListener.run(ActionListener.assertOnce(new ActionListener() { + @Override + public void onResponse(Void unused) {} + + @Override + public void onFailure(Exception e) { + final var wrappedException = new IllegalStateException( + "unexpected failure in FailedShardHandler on " + shardRouting, + e + ); + logger.error(wrappedException.getMessage(), e); + assert false : e; + } + }), + l -> failAndRemoveShard( + shardRouting, + true, + "shard failure, reason [" + shardFailure.reason() + "]", + shardFailure.cause(), + clusterService.state(), + shardCloseExecutor, + l + ) + ); } }); } From d53d056aaaa8b66abd7286483406c17f00dee6ee Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 8 May 2024 11:03:57 +0100 Subject: [PATCH 056/117] [ML] Fix startOffset must be non-negative error in XLMRoBERTa tokenizer (#107891) Fixes an error is when the normalisation step produces text that is longer than the input text and an offset needs to be added to map back to the original. --- docs/changelog/107891.yaml | 6 + .../PrecompiledCharMapNormalizer.java | 15 +- .../PrecompiledCharMapNormalizerTests.java | 37 + .../nlp/tokenizers/XLMRobertaTestVocab.java | 2179 +++++++++++++++++ .../tokenizers/XLMRobertaTokenizerTests.java | 24 + .../xlm_roberta_test_vocabulary.json | 2172 ++++++++++++++++ 6 files changed, 4431 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107891.yaml create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java create mode 100644 x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json diff --git a/docs/changelog/107891.yaml b/docs/changelog/107891.yaml new file mode 100644 index 0000000000000..deb3fbd2258ff --- /dev/null +++ b/docs/changelog/107891.yaml @@ -0,0 +1,6 @@ +pr: 107891 +summary: Fix `startOffset` must be non-negative error in XLMRoBERTa tokenizer +area: Machine Learning +type: bug +issues: + - 104626 diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index 93dc8077196d7..bbe5bea691c35 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -190,9 +190,20 @@ Reader normalize(CharSequence str) { BytesRef subStr = maybeSubStr.get(); int numChars = UnicodeUtil.UTF8toUTF16(subStr.bytes, subStr.offset, subStr.length, reusableCharDecodeBuffer); normalizedCharPos += numChars; - if (numChars != end - startIter) { - addOffCorrectMap(normalizedCharPos, getLastCumulativeDiff() + end - startIter - numChars); + int charDelta = numChars - (end - startIter); // output length - input length + if (charDelta < 0) { + // normalised form is shorter + int lastDiff = getLastCumulativeDiff(); + addOffCorrectMap(normalizedCharPos, lastDiff + charDelta); + } else if (charDelta > 0) { + // inserted chars, add the offset in the output stream + int lastDiff = getLastCumulativeDiff(); + int startOffset = normalizedCharPos - charDelta; + for (int i = 1; i <= charDelta; i++) { + addOffCorrectMap(startOffset + i, lastDiff - i); + } } + strBuilder.append(reusableCharDecodeBuffer, 0, numChars); bytePos += byteLen; continue; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java index eef9902d35e59..20b68b2b6e750 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java @@ -62,6 +62,28 @@ public void testCharThatNormalizesToLongText() throws IOException { assertNormalization("ﷺ", parsed, "صلى الله عليه وسلم"); } + public void testOutOfBounds() throws IOException { + @SuppressWarnings("checkstyle:linelength") + String[] inputs = new String[] { + "ﷺ", + "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā stavljali su glinu razmočenu vodom. Tim su materijalom gradili prostorije veličine do 6 četvornih metara. U potkrovljima su skladištili žitarice i druge plodine, dok su kive - ceremonijalne prostorije - gradili ispred soba, ali ukopane u zemlju, nešto poput današnjih podruma. Kiva je bila vrhunski dizajnirana prostorija okruglog oblika s prostorom za vatru zimi te s dovodom hladnog zraka za klimatizaciju ljeti. U zidane konstrukcije stavljali su i lokalno posječena stabla, što današnjim arheolozima pomaže u preciznom datiranju nastanka pojedine građevine metodom dendrokronologije. Ta stabla pridonose i teoriji o mogućem konačnom slomu ondašnjeg društva. Nakon što su, tijekom nekoliko stoljeća, šume do kraja srušene, a njihova obnova zbog sušne klime traje i po 200 godina, nije proteklo puno vremena do konačnog urušavanja civilizacije, koja se, na svojem vrhuncu osjećala nepobjedivom. 90 % sagrađenih naseobina ispod stijena ima do deset prostorija. ⅓ od ukupnog broja sagrađenih kuća ima jednu ili dvije kamene prostorije", + "Histoarysk wie in acre in stik lân dat 40 roeden (oftewol 1 furlong of ⅛ myl of 660 foet) lang wie, en 4 roeden (of 66 foet) breed. Men is fan tinken dat dat likernôch de grûnmjitte wie dy't men mei in jok oksen yn ien dei beploegje koe.", + "創業当初の「太平洋化学工業社」から1959年太平洋化学工業株式会社へ、1987年には太平洋化学㈱に社名を変更。 1990年以降、海外拠点を増やし本格的な国際進出を始動。 創業者がつくりあげた化粧品会社を世界企業へと成長させるべく2002年3月英文社名AMOREPACIFICに改めた。", + "امام محمد بن جرير رح جن جي ولادت باسعادت 224 هجري طبرستان جي شهر آمل ۾ ٿي ، هي اهو دور هو جڏهن سلطنت عباسيه جو عروج هو ۽ سندس سڄي جمار عهد خلافت عباسيه ۾ گذري ، طبرستان هن وقت پڻ سياست ۽ مذهبي حلقن جنهن ۾ معتزلي ، خوارج ، باطني جو گهوارو هو ۽ ابن جرير جي ٻيهر طبرستان ورڻ وقت روافض جو عروج ٿي ويو هو ابن جرير رح جو نالو ، محمد بن جرير بن يزيد بن ڪثير بن غالب الطبري الآملي هو سندس کوڙ سار لقب آهن جنهن ۾ الامام ، المجتهد ، المفسر ، المورخ، المحدث ، الحافظ ، العلامه ، اللغوي ، المقريءَ ۽ سندس اهي سڀئي القاب سندس بزرگيت تي دلالت ڪن ٿيون . سندس ڪنيت (ابن جرير) هئي ۽ طبرستان ۽ آمل ڏينهن نسبت هجڻ ڪري پاڻ الطبري ۽ الآملي سڏرائيندا هئا. ابن جرير رح هڪ آسودي گهراني ۾ اک کولي ، سندس پيءُ هڪ ڏينهن خواب ڏٺائين ته ابن جرير رح نبي ڪريم ﷺ جي ٻنهي هٿن جي وچ ۾ آهن ۽ نبي ڪريمﷺ جي هٿن مبارڪن ۾ پٿريون آهن جنهن کي ابن جرير رح کڻي اڇلائي رهيا آهن ، عالمن کان جڏهن هن جي تعبير پڇا ڪيائين ته انهن چيو ته اوهان جو پٽ وڏو ٿي ڪري دين جي خدمت سرانجام ڏيندو ۽ اهو خواب ابن جرير جو علم حاصل ڪرڻ جو سبب بڻيو. ابن جرير رح ستن سالن ۾ قرآن مجيد حفظ ڪيائين اٺن سالم ۾ امامت جهڙو فريضو انجام ڏنائين نون سالن ۾ حديث لکڻ شروع ڪيائين ۽ جڏهن سورهن سالن جا ٿيا ته اماماحمد بن حنبل رح جي زيارت جو شوق ۾ بغداد ڏانهن سفر ڪرڻ شروع ڪيائين ، سندس سڄو خرچ ۽ بار پيءُ کڻدو هو جڏهن سندس والد جو انتقال ٿيو ته ورثي ۾ زمين جو ٽڪڙو مليس جنهن جي آمدني مان ابن جرير رح پنهنجو گذر سفر فرمائيندا هئا .", + "۱۔ ھن شق جي مطابق قادياني گروھ يا لاھوري گروھ جي ڪنھن رڪن کي جيڪو پاڻ کي 'احمدي' يا ڪنھن ٻي نالي سان پڪاري جي لاءِ ممنوع قرار ڏنو ويو آھي تہ ھو (الف) ڳالھائي، لکي يا ڪنھن ٻي طريقي سان ڪنھن خليفي يا آنحضور ﷺ جي ڪنھن صحابي کان علاوہڍه ڪنھن کي امير المومنين يا خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔", + "سعد بن فضالہ جو شام کے جہاد میں سہیل کے ساتھ تھے بیان کرتے ہیں کہ ایک مرتبہ سہیل نے کہا کہ میں نے رسول اللہ ﷺ سے سنا ہے کہ خدا کی راہ میں ایک گھڑی صرف کرنا گھر کے تمام عمر کے اعمال سے بہتر ہے، اس لیے اب میں شام کا جہاد چھوڑ کر گھر نہ جاؤں گا اور یہیں جان دونگا، اس عہد پر اس سختی سے قائم رہے کہ طاعون عمواس میں بھی نہ ہٹے اور 18ھ میں اسی وبا میں شام کے غربت کدہ میں جان دی۔", + "دعوت اسلام کے آغاز یعنی آنحضرتﷺ کے ارقم کے گھر میں تشریف لانے سے پہلے مشرف باسلام ہوئے،پھر ہجرت کے زمانہ میں مکہ سے مدینہ گئے آنحضرتﷺ نے غربت کی اجنبیت دورکرنے کے لیے ان میں اورابوعبیدہ بن تیہاں میں مواخاۃ کرادی۔", + "ضرار اپنے قبیلہ کے اصحاب ثروت میں تھے، عرب میں سب سے بڑی دولت اونٹ کے گلے تھے، ضرار کے پاس ہزار اونٹوں کا گلہ تھا، اسلام کے جذب وولولے میں تمام مال ودولت چھوڑ کر خالی ہاتھ آستانِ نبوی پر پہنچے قبول اسلام کے بعد آنحضرتﷺ نے بنی صید اوربنی ہذیل کی طرف بھیجا۔", + "(2) اگر زلیخا کو ملامت کرنے والی عورتیں آپ ﷺ کی جبین انور دیکھ پاتیں تو ہاتھوں کے بجائے اپنے دل کاٹنے کو ترجیح دیتیں۔صحیح بخاری میں ہے، حضرت عطاء بن یسار ؓہُنے حضرت عبداللہ بن عمرو ؓسے سیّدِ عالمﷺ کے وہ اوصاف دریافت کئے جو توریت میں مذکور ہیں تو انہوں نے فرمایا : ’’خدا کی قسم! حضور سیدُ المرسلینﷺ کے جو اوصاف قرآنِ کریم میں آئے ہیں انہیں میں سے بعض اوصاف توریت میں مذکور ہیں۔ اس کے بعد انہوں نے پڑھنا شروع کیا: اے نبی! ہم نے تمہیں شاہد و مُبَشِّر اور نذیر اور اُمِّیُّوں کا نگہبان بنا کر بھیجا، تم میرے بندے اور میرے رسول ہو، میں نے تمہارا نام متوکل رکھا،نہ بدخلق ہو نہ سخت مزاج، نہ بازاروں میں آواز بلند کرنے والے ہو نہ برائی سے برائی کو دفع کرنے والے بلکہ خطا کاروں کو معاف کرتے ہو اور ان پر احسان فرماتے ہو، اللہ تعالیٰ تمہیں نہ اٹھائے گا جب تک کہ تمہاری برکت سے غیر مستقیم ملت کو اس طرح راست نہ فرمادے کہ لوگ صدق و یقین کے ساتھ ’’ لَآاِلٰہَ اِلَّا اللہُ مُحَمَّدٌ رَّسُوْلُ اللہِ‘‘ پکارنے لگیں اور تمہاری بدولت اندھی آنکھیں بینا اور بہرے کان شنوا (سننے والے) اور پردوں میں لپٹے ہوئے دل کشادہ ہوجائیں۔ اور کعب احبارؓسے سرکارِ رسالت ﷺکی صفات میں توریت شریف کا یہ مضمون بھی منقول ہے کہ’’ اللہ تعالیٰ نے آپ ﷺکی صفت میں فرمایا کہ’’ میں اُنہیں ہر خوبی کے قابل کروں گا، اور ہر خُلقِ کریم عطا فرماؤں گا، اطمینانِ قلب اور وقار کو اُن کا لباس بناؤں گا اور طاعات وا حسان کو ان کا شعار کروں گا۔ تقویٰ کو ان کا ضمیر، حکمت کو ان کا راز، صدق و وفا کو اُن کی طبیعت ،عفوو کرم کو اُن کی عادت ، عدل کو ان کی سیرت، اظہارِ حق کو اُن کی شریعت، ہدایت کو اُن کا امام اور اسلام کو اُن کی ملت بناؤں گا۔ احمد اُن کا نام ہے، مخلوق کو اُن کے صدقے میں گمراہی کے بعد ہدایت اور جہالت کے بعد علم و معرفت اور گمنامی کے بعد رفعت و منزلت عطا کروں گا۔ اُنہیں کی برکت سے قلت کے بعد کثرت اور فقر کے بعد دولت اور تَفَرُّقے کے بعد محبت عنایت کروں گا، اُنہیں کی بدولت مختلف قبائل، غیر مجتمع خواہشوں اور اختلاف رکھنے والے دلوں میں اُلفت پیدا کروں گا اور اُن کی اُمت کو تمام اُمتوں سے بہتر کروں گا۔ ایک اور حدیث میں توریت سے حضور سید المرسلینﷺسے یہ اوصاف منقول ہیں ’’میرے بندے احمد مختار، ان کی جائے ولادت مکہ مکرمہ اور جائے ہجرت مدینہ طیبہ ہے،اُن کی اُمت ہر حال میں اللہ تعالٰی کی کثیر حمد کرنے والی ہے۔ مُنَزَّہٌ عَنْ شَرِیْکٍ فِیْ مَحَاسِنِہٖ", + "بالآخر آنحضرتﷺ کے اس عفو وکرم نے یہ معجزہ دکھایا کہ سہیل حنین کی واپسی کے وقت آپ کے ساتھ ہوگئے اورمقام جعرانہ پہنچ کر خلعتِ اسلام سے سرفراز ہوئے آنحضرت ﷺ نے ازراہ مرحمت حنین کے مالِ غنیمت میں سے سو اونٹ عطا فرمائے، گو فتح مکہ کے بعد کے مسلمانوں کا شمار مؤلفۃ القلوب میں ہے، لیکن سہیل اس زمرہ میں اس حیثیت سے ممتاز ہیں کہ اسلام کے بعد ان سے کوئی بات اسلام کے خلاف ظہور پزیر نہیں ہوئی ،حافظ ابن حجرعسقلانی لکھتے ہیں، کان محمودالا سلام من حین اسلم۔", }; + + PrecompiledCharMapNormalizer.Config parsed = loadTestCharMap(); + + for (var s : inputs) { + normalise(s, parsed); + } + } + private void assertNormalization(String input, PrecompiledCharMapNormalizer.Config config, String expected) throws IOException { PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( config.offsets(), @@ -77,6 +99,21 @@ private void assertNormalization(String input, PrecompiledCharMapNormalizer.Conf } } + private void normalise(String input, PrecompiledCharMapNormalizer.Config config) throws IOException { + PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( + config.offsets(), + config.utf8str(), + new StringReader(input) + ); + char[] output = new char[64]; + int offset = 0; + int size = 64; + int read = normalizer.read(output, offset, size); + while (read > 0) { + read = normalizer.read(output, offset, size); + } + } + static PrecompiledCharMapNormalizer.Config loadTestCharMap() throws IOException { return PrecompiledCharMapNormalizer.fromBase64EncodedResource( "/org/elasticsearch/xpack/ml/inference.nlp.tokenizers/spm_precompiled_normalizer.txt" diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java new file mode 100644 index 0000000000000..b0cce14c59114 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTestVocab.java @@ -0,0 +1,2179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; + +import java.io.IOException; + +/** + * {@link #loadMultiLingualTestVocab()} loads a vocabulary file containing + * a subset of the XLM RoBERTa vocabulary and scores sufficient to tokenize + * the strings in {@link #MULTILINUGAL_TEXTS}. + * + * {@link #EXPECTED_TOKENS} is the tokenization of {@link #MULTILINUGAL_TEXTS} + * using the vocabulary and scores in the test vocabulary returned by + * {@link #loadMultiLingualTestVocab()}. The expected tokens were produced by + * tokenizing {@link #MULTILINUGAL_TEXTS} with the HuggingFace transformers + * XLMRoBERTa tokenizer and mapping those tokens to the position of the same + * tokens in the test vocab. + */ +public class XLMRobertaTestVocab { + + public static Vocabulary loadMultiLingualTestVocab() throws IOException { + try ( + var parser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + XLMRobertaTokenizer.class.getResourceAsStream( + "/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json" + ) + ) + ) { + return Vocabulary.PARSER.apply(parser, null); + } + } + + @SuppressWarnings("checkstyle:linelength") + public static String[] MULTILINUGAL_TEXTS = new String[] { + "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā stavljali su glinu razmočenu vodom. Tim su materijalom gradili prostorije veličine do 6 četvornih metara. U potkrovljima su skladištili žitarice i druge plodine, dok su kive - ceremonijalne prostorije - gradili ispred soba, ali ukopane u zemlju, nešto poput današnjih podruma. Kiva je bila vrhunski dizajnirana prostorija okruglog oblika s prostorom za vatru zimi te s dovodom hladnog zraka za klimatizaciju ljeti. U zidane konstrukcije stavljali su i lokalno posječena stabla, što današnjim arheolozima pomaže u preciznom datiranju nastanka pojedine građevine metodom dendrokronologije. Ta stabla pridonose i teoriji o mogućem konačnom slomu ondašnjeg društva. Nakon što su, tijekom nekoliko stoljeća, šume do kraja srušene, a njihova obnova zbog sušne klime traje i po 200 godina, nije proteklo puno vremena do konačnog urušavanja civilizacije, koja se, na svojem vrhuncu osjećala nepobjedivom. 90 % sagrađenih naseobina ispod stijena ima do deset prostorija. ⅓ od ukupnog broja sagrađenih kuća ima jednu ili dvije kamene prostorije", + "Histoarysk wie in acre in stik lân dat 40 roeden (oftewol 1 furlong of ⅛ myl of 660 foet) lang wie, en 4 roeden (of 66 foet) breed. Men is fan tinken dat dat likernôch de grûnmjitte wie dy't men mei in jok oksen yn ien dei beploegje koe.", + "創業当初の「太平洋化学工業社」から1959年太平洋化学工業株式会社へ、1987年には太平洋化学㈱に社名を変更。 1990年以降、海外拠点を増やし本格的な国際進出を始動。 創業者がつくりあげた化粧品会社を世界企業へと成長させるべく2002年3月英文社名AMOREPACIFICに改めた。", + "امام محمد بن جرير رح جن جي ولادت باسعادت 224 هجري طبرستان جي شهر آمل ۾ ٿي ، هي اهو دور هو جڏهن سلطنت عباسيه جو عروج هو ۽ سندس سڄي جمار عهد خلافت عباسيه ۾ گذري ، طبرستان هن وقت پڻ سياست ۽ مذهبي حلقن جنهن ۾ معتزلي ، خوارج ، باطني جو گهوارو هو ۽ ابن جرير جي ٻيهر طبرستان ورڻ وقت روافض جو عروج ٿي ويو هو ابن جرير رح جو نالو ، محمد بن جرير بن يزيد بن ڪثير بن غالب الطبري الآملي هو سندس کوڙ سار لقب آهن جنهن ۾ الامام ، المجتهد ، المفسر ، المورخ، المحدث ، الحافظ ، العلامه ، اللغوي ، المقريءَ ۽ سندس اهي سڀئي القاب سندس بزرگيت تي دلالت ڪن ٿيون . سندس ڪنيت (ابن جرير) هئي ۽ طبرستان ۽ آمل ڏينهن نسبت هجڻ ڪري پاڻ الطبري ۽ الآملي سڏرائيندا هئا. ابن جرير رح هڪ آسودي گهراني ۾ اک کولي ، سندس پيءُ هڪ ڏينهن خواب ڏٺائين ته ابن جرير رح نبي ڪريم ﷺ جي ٻنهي هٿن جي وچ ۾ آهن ۽ نبي ڪريمﷺ جي هٿن مبارڪن ۾ پٿريون آهن جنهن کي ابن جرير رح کڻي اڇلائي رهيا آهن ، عالمن کان جڏهن هن جي تعبير پڇا ڪيائين ته انهن چيو ته اوهان جو پٽ وڏو ٿي ڪري دين جي خدمت سرانجام ڏيندو ۽ اهو خواب ابن جرير جو علم حاصل ڪرڻ جو سبب بڻيو. ابن جرير رح ستن سالن ۾ قرآن مجيد حفظ ڪيائين اٺن سالم ۾ امامت جهڙو فريضو انجام ڏنائين نون سالن ۾ حديث لکڻ شروع ڪيائين ۽ جڏهن سورهن سالن جا ٿيا ته اماماحمد بن حنبل رح جي زيارت جو شوق ۾ بغداد ڏانهن سفر ڪرڻ شروع ڪيائين ، سندس سڄو خرچ ۽ بار پيءُ کڻدو هو جڏهن سندس والد جو انتقال ٿيو ته ورثي ۾ زمين جو ٽڪڙو مليس جنهن جي آمدني مان ابن جرير رح پنهنجو گذر سفر فرمائيندا هئا .", + "۱۔ ھن شق جي مطابق قادياني گروھ يا لاھوري گروھ جي ڪنھن رڪن کي جيڪو پاڻ کي 'احمدي' يا ڪنھن ٻي نالي سان پڪاري جي لاءِ ممنوع قرار ڏنو ويو آھي تہ ھو (الف) ڳالھائي، لکي يا ڪنھن ٻي طريقي سان ڪنھن خليفي يا آنحضور ﷺ جي ڪنھن صحابي کان علاوہڍه ڪنھن کي امير المومنين يا خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔", + "سعد بن فضالہ جو شام کے جہاد میں سہیل کے ساتھ تھے بیان کرتے ہیں کہ ایک مرتبہ سہیل نے کہا کہ میں نے رسول اللہ ﷺ سے سنا ہے کہ خدا کی راہ میں ایک گھڑی صرف کرنا گھر کے تمام عمر کے اعمال سے بہتر ہے، اس لیے اب میں شام کا جہاد چھوڑ کر گھر نہ جاؤں گا اور یہیں جان دونگا، اس عہد پر اس سختی سے قائم رہے کہ طاعون عمواس میں بھی نہ ہٹے اور 18ھ میں اسی وبا میں شام کے غربت کدہ میں جان دی۔", + "دعوت اسلام کے آغاز یعنی آنحضرتﷺ کے ارقم کے گھر میں تشریف لانے سے پہلے مشرف باسلام ہوئے،پھر ہجرت کے زمانہ میں مکہ سے مدینہ گئے آنحضرتﷺ نے غربت کی اجنبیت دورکرنے کے لیے ان میں اورابوعبیدہ بن تیہاں میں مواخاۃ کرادی۔", + "ضرار اپنے قبیلہ کے اصحاب ثروت میں تھے، عرب میں سب سے بڑی دولت اونٹ کے گلے تھے، ضرار کے پاس ہزار اونٹوں کا گلہ تھا، اسلام کے جذب وولولے میں تمام مال ودولت چھوڑ کر خالی ہاتھ آستانِ نبوی پر پہنچے قبول اسلام کے بعد آنحضرتﷺ نے بنی صید اوربنی ہذیل کی طرف بھیجا۔", + "(2) اگر زلیخا کو ملامت کرنے والی عورتیں آپ ﷺ کی جبین انور دیکھ پاتیں تو ہاتھوں کے بجائے اپنے دل کاٹنے کو ترجیح دیتیں۔صحیح بخاری میں ہے، حضرت عطاء بن یسار ؓہُنے حضرت عبداللہ بن عمرو ؓسے سیّدِ عالمﷺ کے وہ اوصاف دریافت کئے جو توریت میں مذکور ہیں تو انہوں نے فرمایا : ’’خدا کی قسم! حضور سیدُ المرسلینﷺ کے جو اوصاف قرآنِ کریم میں آئے ہیں انہیں میں سے بعض اوصاف توریت میں مذکور ہیں۔ اس کے بعد انہوں نے پڑھنا شروع کیا: اے نبی! ہم نے تمہیں شاہد و مُبَشِّر اور نذیر اور اُمِّیُّوں کا نگہبان بنا کر بھیجا، تم میرے بندے اور میرے رسول ہو، میں نے تمہارا نام متوکل رکھا،نہ بدخلق ہو نہ سخت مزاج، نہ بازاروں میں آواز بلند کرنے والے ہو نہ برائی سے برائی کو دفع کرنے والے بلکہ خطا کاروں کو معاف کرتے ہو اور ان پر احسان فرماتے ہو، اللہ تعالیٰ تمہیں نہ اٹھائے گا جب تک کہ تمہاری برکت سے غیر مستقیم ملت کو اس طرح راست نہ فرمادے کہ لوگ صدق و یقین کے ساتھ ’’ لَآاِلٰہَ اِلَّا اللہُ مُحَمَّدٌ رَّسُوْلُ اللہِ‘‘ پکارنے لگیں اور تمہاری بدولت اندھی آنکھیں بینا اور بہرے کان شنوا (سننے والے) اور پردوں میں لپٹے ہوئے دل کشادہ ہوجائیں۔ اور کعب احبارؓسے سرکارِ رسالت ﷺکی صفات میں توریت شریف کا یہ مضمون بھی منقول ہے کہ’’ اللہ تعالیٰ نے آپ ﷺکی صفت میں فرمایا کہ’’ میں اُنہیں ہر خوبی کے قابل کروں گا، اور ہر خُلقِ کریم عطا فرماؤں گا، اطمینانِ قلب اور وقار کو اُن کا لباس بناؤں گا اور طاعات وا حسان کو ان کا شعار کروں گا۔ تقویٰ کو ان کا ضمیر، حکمت کو ان کا راز، صدق و وفا کو اُن کی طبیعت ،عفوو کرم کو اُن کی عادت ، عدل کو ان کی سیرت، اظہارِ حق کو اُن کی شریعت، ہدایت کو اُن کا امام اور اسلام کو اُن کی ملت بناؤں گا۔ احمد اُن کا نام ہے، مخلوق کو اُن کے صدقے میں گمراہی کے بعد ہدایت اور جہالت کے بعد علم و معرفت اور گمنامی کے بعد رفعت و منزلت عطا کروں گا۔ اُنہیں کی برکت سے قلت کے بعد کثرت اور فقر کے بعد دولت اور تَفَرُّقے کے بعد محبت عنایت کروں گا، اُنہیں کی بدولت مختلف قبائل، غیر مجتمع خواہشوں اور اختلاف رکھنے والے دلوں میں اُلفت پیدا کروں گا اور اُن کی اُمت کو تمام اُمتوں سے بہتر کروں گا۔ ایک اور حدیث میں توریت سے حضور سید المرسلینﷺسے یہ اوصاف منقول ہیں ’’میرے بندے احمد مختار، ان کی جائے ولادت مکہ مکرمہ اور جائے ہجرت مدینہ طیبہ ہے،اُن کی اُمت ہر حال میں اللہ تعالٰی کی کثیر حمد کرنے والی ہے۔ مُنَزَّہٌ عَنْ شَرِیْکٍ فِیْ مَحَاسِنِہٖ", + "بالآخر آنحضرتﷺ کے اس عفو وکرم نے یہ معجزہ دکھایا کہ سہیل حنین کی واپسی کے وقت آپ کے ساتھ ہوگئے اورمقام جعرانہ پہنچ کر خلعتِ اسلام سے سرفراز ہوئے آنحضرت ﷺ نے ازراہ مرحمت حنین کے مالِ غنیمت میں سے سو اونٹ عطا فرمائے، گو فتح مکہ کے بعد کے مسلمانوں کا شمار مؤلفۃ القلوب میں ہے، لیکن سہیل اس زمرہ میں اس حیثیت سے ممتاز ہیں کہ اسلام کے بعد ان سے کوئی بات اسلام کے خلاف ظہور پزیر نہیں ہوئی ،حافظ ابن حجرعسقلانی لکھتے ہیں، کان محمودالا سلام من حین اسلم۔", }; + + public static int[][] EXPECTED_TOKENS = new int[][] { + { + 0, + 910, + 256, + 116, + 897, + 65, + 1039, + 830, + 287, + 993, + 660, + 770, + 67, + 619, + 455, + 802, + 73, + 785, + 993, + 990, + 565, + 666, + 194, + 1049, + 110, + 710, + 397, + 283, + 1073, + 666, + 276, + 79, + 486, + 30, + 959, + 912, + 577, + 571, + 658, + 1080, + 327, + 713, + 993, + 457, + 531, + 455, + 553, + 565, + 666, + 46, + 29, + 302, + 993, + 976, + 415, + 155, + 1050, + 956, + 65, + 441, + 65, + 888, + 84, + 511, + 30, + 547, + 908, + 993, + 174, + 350, + 74, + 454, + 500, + 139, + 1026, + 29, + 716, + 337, + 259, + 74, + 874, + 767, + 716, + 961, + 654, + 668, + 460, + 627, + 845, + 577, + 502, + 59, + 30, + 728, + 546, + 140, + 804, + 659, + 67, + 792, + 716, + 358, + 713, + 993, + 783, + 755, + 330, + 278, + 755, + 925, + 74, + 30, + 871, + 993, + 416, + 767, + 1040, + 713, + 331, + 1016, + 460, + 668, + 419, + 568, + 148, + 326, + 306, + 30, + 440, + 36, + 742, + 398, + 727, + 993, + 389, + 795, + 373, + 1009, + 681, + 577, + 455, + 410, + 246, + 1062, + 29, + 641, + 993, + 788, + 921, + 413, + 483, + 329, + 737, + 331, + 1016, + 413, + 1040, + 713, + 482, + 23, + 29, + 253, + 365, + 489, + 457, + 642, + 29, + 544, + 778, + 1077, + 68, + 27, + 379, + 59, + 30, + 639, + 965, + 48, + 52, + 851, + 773, + 331, + 1012, + 1076, + 481, + 661, + 461, + 331, + 767, + 166, + 1010, + 285, + 716, + 662, + 999, + 461, + 668, + 132, + 767, + 936, + 67, + 533, + 166, + 929, + 1046, + 677, + 456, + 124, + 30, + 440, + 183, + 954, + 730, + 65, + 716, + 358, + 713, + 993, + 455, + 637, + 748, + 40, + 472, + 149, + 527, + 709, + 29, + 490, + 1077, + 74, + 777, + 629, + 823, + 665, + 367, + 457, + 560, + 417, + 497, + 478, + 888, + 889, + 684, + 821, + 65, + 441, + 65, + 605, + 74, + 679, + 840, + 736, + 150, + 666, + 30, + 479, + 527, + 709, + 94, + 510, + 864, + 455, + 1074, + 667, + 453, + 308, + 74, + 390, + 74, + 647, + 733, + 469, + 265, + 67, + 764, + 30, + 15, + 490, + 993, + 29, + 447, + 971, + 123, + 29, + 501, + 65, + 668, + 559, + 461, + 591, + 737, + 29, + 449, + 233, + 1034, + 16, + 121, + 993, + 428, + 528, + 65, + 474, + 455, + 1056, + 275, + 324, + 29, + 718, + 991, + 717, + 473, + 980, + 668, + 390, + 67, + 716, + 711, + 464, + 224, + 1073, + 666, + 29, + 811, + 990, + 29, + 888, + 616, + 191, + 184, + 768, + 709, + 846, + 62, + 994, + 144, + 30, + 142, + 409, + 976, + 415, + 65, + 326, + 888, + 575, + 543, + 384, + 537, + 17, + 1029, + 668, + 343, + 331, + 1012, + 30, + 422, + 44, + 33, + 1036, + 279, + 67, + 1053, + 976, + 415, + 65, + 326, + 101, + 1029, + 54, + 1027, + 272, + 874, + 65, + 331, + 1016, + 2 }, + { + 0, + 433, + 204, + 360, + 870, + 514, + 962, + 449, + 295, + 962, + 624, + 208, + 497, + 995, + 1071, + 65, + 538, + 412, + 760, + 883, + 592, + 422, + 707, + 858, + 1032, + 422, + 44, + 34, + 875, + 72, + 1032, + 716, + 254, + 896, + 600, + 24, + 873, + 514, + 29, + 695, + 425, + 1071, + 65, + 538, + 412, + 760, + 98, + 896, + 600, + 24, + 273, + 30, + 729, + 960, + 188, + 1001, + 596, + 497, + 497, + 485, + 76, + 178, + 579, + 679, + 914, + 950, + 74, + 459, + 883, + 514, + 686, + 21, + 80, + 741, + 745, + 962, + 781, + 70, + 716, + 1003, + 151, + 455, + 596, + 522, + 638, + 310, + 65, + 1066, + 1020, + 30, + 2 }, + { + 0, + 716, + 725, + 652, + 77, + 9, + 444, + 463, + 20, + 232, + 10, + 270, + 427, + 886, + 444, + 463, + 20, + 588, + 85, + 4, + 470, + 886, + 692, + 444, + 463, + 22, + 28, + 24, + 71, + 232, + 539, + 100, + 975, + 6, + 146, + 886, + 534, + 4, + 362, + 432, + 122, + 100, + 104, + 90, + 51, + 992, + 39, + 359, + 997, + 32, + 317, + 100, + 292, + 424, + 6, + 716, + 725, + 171, + 582, + 96, + 49, + 58, + 516, + 705, + 100, + 320, + 377, + 968, + 701, + 333, + 86, + 47, + 610, + 886, + 33, + 979, + 115, + 232, + 539, + 731, + 586, + 581, + 1063, + 71, + 664, + 1075, + 6, + 2 }, + { + 0, + 548, + 1013, + 948, + 854, + 215, + 716, + 799, + 867, + 865, + 532, + 953, + 499, + 298, + 758, + 853, + 107, + 819, + 498, + 865, + 314, + 657, + 847, + 274, + 60, + 117, + 395, + 190, + 985, + 402, + 578, + 267, + 352, + 231, + 861, + 154, + 943, + 402, + 271, + 525, + 743, + 135, + 774, + 374, + 590, + 352, + 231, + 274, + 1078, + 117, + 107, + 819, + 498, + 400, + 361, + 282, + 738, + 271, + 439, + 1021, + 849, + 1038, + 274, + 243, + 673, + 93, + 117, + 484, + 797, + 117, + 716, + 200, + 127, + 861, + 825, + 219, + 852, + 402, + 271, + 669, + 854, + 215, + 865, + 923, + 107, + 819, + 498, + 394, + 931, + 361, + 716, + 941, + 11, + 861, + 154, + 943, + 60, + 670, + 402, + 669, + 854, + 215, + 716, + 799, + 861, + 385, + 117, + 1013, + 948, + 854, + 215, + 948, + 838, + 948, + 238, + 91, + 948, + 831, + 963, + 832, + 894, + 108, + 853, + 402, + 525, + 899, + 913, + 12, + 703, + 562, + 1038, + 274, + 900, + 798, + 117, + 554, + 688, + 815, + 117, + 958, + 45, + 117, + 535, + 800, + 782, + 958, + 613, + 117, + 926, + 761, + 117, + 926, + 1008, + 117, + 957, + 1004, + 853, + 117, + 958, + 31, + 207, + 859, + 271, + 525, + 198, + 1014, + 618, + 926, + 406, + 525, + 675, + 211, + 809, + 1048, + 152, + 905, + 689, + 716, + 30, + 525, + 905, + 211, + 412, + 615, + 849, + 854, + 215, + 24, + 706, + 271, + 107, + 819, + 498, + 271, + 657, + 847, + 506, + 5, + 569, + 63, + 363, + 963, + 832, + 271, + 894, + 108, + 853, + 1022, + 1030, + 378, + 635, + 30, + 669, + 854, + 215, + 716, + 799, + 325, + 651, + 355, + 1052, + 229, + 274, + 813, + 899, + 93, + 117, + 525, + 1059, + 860, + 325, + 506, + 353, + 220, + 891, + 119, + 789, + 669, + 854, + 215, + 716, + 799, + 301, + 63, + 848, + 714, + 550, + 749, + 614, + 865, + 754, + 423, + 849, + 865, + 443, + 274, + 562, + 271, + 301, + 63, + 848, + 693, + 550, + 749, + 614, + 865, + 423, + 849, + 159, + 192, + 612, + 274, + 566, + 608, + 562, + 1038, + 904, + 669, + 854, + 215, + 716, + 799, + 982, + 125, + 898, + 847, + 687, + 744, + 562, + 117, + 368, + 849, + 690, + 578, + 400, + 865, + 720, + 262, + 806, + 933, + 789, + 587, + 536, + 789, + 202, + 861, + 266, + 769, + 60, + 63, + 1043, + 865, + 576, + 977, + 601, + 271, + 190, + 353, + 669, + 854, + 215, + 861, + 369, + 280, + 102, + 861, + 82, + 126, + 964, + 852, + 30, + 669, + 854, + 215, + 716, + 799, + 986, + 849, + 747, + 274, + 407, + 234, + 213, + 607, + 933, + 125, + 891, + 849, + 746, + 274, + 548, + 808, + 294, + 839, + 828, + 852, + 187, + 1018, + 771, + 716, + 766, + 747, + 274, + 19, + 78, + 347, + 933, + 271, + 578, + 451, + 849, + 747, + 820, + 509, + 789, + 548, + 203, + 948, + 716, + 739, + 648, + 716, + 799, + 865, + 772, + 861, + 25, + 274, + 227, + 380, + 672, + 102, + 347, + 933, + 117, + 525, + 735, + 214, + 271, + 952, + 1059, + 860, + 972, + 775, + 402, + 578, + 525, + 315, + 861, + 462, + 529, + 789, + 394, + 715, + 274, + 120, + 861, + 716, + 1058, + 418, + 241, + 824, + 1038, + 865, + 318, + 853, + 756, + 669, + 854, + 215, + 716, + 799, + 189, + 436, + 672, + 816, + 687, + 378, + 635, + 716, + 30, + 2 }, + { + 0, + 268, + 951, + 7, + 716, + 903, + 865, + 584, + 168, + 887, + 229, + 653, + 932, + 421, + 217, + 932, + 386, + 653, + 932, + 865, + 716, + 835, + 143, + 612, + 904, + 593, + 363, + 904, + 411, + 203, + 853, + 21, + 421, + 716, + 835, + 185, + 387, + 81, + 209, + 597, + 865, + 296, + 862, + 901, + 223, + 1005, + 670, + 437, + 1033, + 8, + 412, + 176, + 24, + 704, + 687, + 782, + 289, + 421, + 716, + 835, + 185, + 776, + 853, + 81, + 716, + 835, + 716, + 466, + 26, + 421, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 716, + 835, + 716, + 1031, + 853, + 690, + 519, + 902, + 850, + 716, + 835, + 904, + 128, + 958, + 625, + 230, + 421, + 1037, + 225, + 934, + 958, + 625, + 230, + 421, + 1037, + 225, + 934, + 345, + 421, + 716, + 1031, + 942, + 421, + 371, + 550, + 408, + 307, + 951, + 412, + 807, + 24, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 555, + 219, + 230, + 690, + 491, + 716, + 835, + 904, + 924, + 958, + 625, + 230, + 307, + 951, + 412, + 810, + 24, + 656, + 404, + 714, + 550, + 749, + 614, + 865, + 396, + 865, + 125, + 1002, + 636, + 690, + 519, + 902, + 850, + 716, + 835, + 904, + 125, + 1002, + 636, + 307, + 951, + 412, + 815, + 24, + 206, + 312, + 791, + 932, + 904, + 617, + 307, + 951, + 2 }, + { + 0, + 349, + 948, + 927, + 186, + 861, + 556, + 779, + 763, + 83, + 112, + 180, + 779, + 1000, + 496, + 750, + 784, + 521, + 967, + 263, + 435, + 112, + 180, + 322, + 1069, + 967, + 83, + 322, + 179, + 611, + 714, + 550, + 749, + 614, + 133, + 762, + 321, + 967, + 841, + 780, + 493, + 83, + 263, + 1051, + 356, + 465, + 515, + 555, + 779, + 1081, + 344, + 779, + 1061, + 133, + 939, + 321, + 782, + 915, + 589, + 922, + 83, + 556, + 682, + 763, + 420, + 694, + 555, + 442, + 820, + 644, + 791, + 197, + 342, + 247, + 814, + 1017, + 685, + 782, + 915, + 480, + 35, + 915, + 645, + 133, + 1041, + 552, + 967, + 106, + 623, + 357, + 622, + 83, + 526, + 442, + 245, + 1024, + 197, + 1067, + 932, + 83, + 988, + 162, + 632, + 83, + 556, + 779, + 545, + 698, + 934, + 83, + 814, + 1072, + 2 }, + { + 0, + 335, + 316, + 779, + 467, + 572, + 656, + 1015, + 693, + 550, + 749, + 614, + 779, + 917, + 43, + 779, + 555, + 83, + 239, + 372, + 133, + 430, + 1023, + 944, + 291, + 1079, + 782, + 893, + 996, + 245, + 719, + 808, + 779, + 628, + 934, + 83, + 557, + 133, + 309, + 332, + 656, + 1015, + 693, + 550, + 749, + 614, + 322, + 545, + 780, + 125, + 517, + 157, + 985, + 595, + 236, + 779, + 589, + 945, + 83, + 197, + 615, + 175, + 732, + 884, + 948, + 134, + 257, + 708, + 83, + 716, + 319, + 740, + 937, + 694, + 966, + 951, + 2 }, + { + 0, + 716, + 328, + 391, + 1070, + 934, + 779, + 338, + 399, + 83, + 496, + 782, + 293, + 83, + 989, + 133, + 564, + 348, + 947, + 177, + 779, + 836, + 949, + 496, + 782, + 716, + 328, + 779, + 523, + 383, + 947, + 177, + 255, + 682, + 836, + 934, + 103, + 782, + 316, + 779, + 216, + 162, + 609, + 199, + 83, + 1081, + 753, + 393, + 602, + 420, + 694, + 434, + 494, + 129, + 498, + 862, + 248, + 261, + 35, + 13, + 1057, + 316, + 779, + 475, + 656, + 1015, + 693, + 550, + 749, + 614, + 322, + 697, + 136, + 163, + 197, + 650, + 942, + 245, + 817, + 180, + 780, + 113, + 906, + 723, + 2 }, + { + 0, + 339, + 722, + 145, + 196, + 740, + 899, + 244, + 92, + 492, + 55, + 299, + 247, + 680, + 714, + 550, + 749, + 614, + 780, + 818, + 182, + 567, + 796, + 520, + 247, + 787, + 205, + 779, + 583, + 391, + 1048, + 682, + 890, + 236, + 899, + 281, + 674, + 1006, + 911, + 160, + 879, + 83, + 321, + 782, + 303, + 716, + 909, + 948, + 250, + 346, + 716, + 790, + 934, + 860, + 236, + 303, + 173, + 948, + 540, + 716, + 790, + 974, + 118, + 863, + 815, + 862, + 368, + 693, + 550, + 749, + 614, + 779, + 340, + 947, + 313, + 724, + 726, + 861, + 351, + 157, + 83, + 928, + 521, + 787, + 594, + 322, + 699, + 429, + 626, + 99, + 780, + 341, + 18, + 827, + 541, + 860, + 958, + 585, + 182, + 693, + 550, + 749, + 614, + 779, + 861, + 947, + 313, + 407, + 862, + 477, + 83, + 130, + 521, + 603, + 83, + 133, + 448, + 947, + 313, + 351, + 157, + 83, + 928, + 880, + 915, + 779, + 475, + 594, + 322, + 201, + 111, + 347, + 193, + 37, + 842, + 75, + 18, + 69, + 322, + 907, + 221, + 162, + 159, + 860, + 807, + 859, + 826, + 323, + 819, + 197, + 161, + 817, + 169, + 197, + 125, + 860, + 848, + 323, + 942, + 300, + 255, + 682, + 716, + 218, + 934, + 222, + 381, + 694, + 526, + 712, + 782, + 786, + 573, + 382, + 949, + 197, + 573, + 179, + 66, + 782, + 83, + 322, + 786, + 934, + 877, + 264, + 580, + 604, + 1042, + 782, + 228, + 938, + 476, + 66, + 442, + 468, + 41, + 782, + 442, + 235, + 255, + 83, + 1045, + 114, + 492, + 56, + 66, + 442, + 940, + 765, + 133, + 940, + 765, + 899, + 561, + 492, + 56, + 471, + 260, + 643, + 255, + 899, + 973, + 784, + 66, + 197, + 945, + 35, + 1064, + 366, + 66, + 782, + 611, + 304, + 882, + 907, + 442, + 1047, + 702, + 791, + 818, + 1028, + 967, + 503, + 452, + 133, + 872, + 195, + 249, + 899, + 915, + 50, + 95, + 442, + 446, + 895, + 967, + 599, + 164, + 162, + 431, + 779, + 1000, + 626, + 170, + 859, + 803, + 806, + 862, + 847, + 882, + 934, + 859, + 125, + 862, + 847, + 290, + 806, + 611, + 860, + 159, + 860, + 812, + 859, + 848, + 290, + 815, + 855, + 143, + 290, + 824, + 860, + 852, + 866, + 847, + 860, + 611, + 862, + 450, + 105, + 236, + 392, + 247, + 197, + 503, + 938, + 602, + 945, + 805, + 288, + 655, + 806, + 197, + 869, + 935, + 690, + 1007, + 158, + 412, + 885, + 236, + 56, + 24, + 197, + 35, + 815, + 255, + 83, + 258, + 1024, + 1079, + 1048, + 691, + 620, + 934, + 876, + 951, + 197, + 237, + 981, + 920, + 192, + 790, + 974, + 984, + 676, + 862, + 458, + 714, + 550, + 749, + 614, + 671, + 969, + 83, + 351, + 157, + 354, + 682, + 342, + 563, + 526, + 251, + 918, + 321, + 967, + 14, + 14, + 611, + 304, + 882, + 322, + 680, + 714, + 550, + 749, + 614, + 671, + 445, + 83, + 699, + 967, + 14, + 14, + 83, + 125, + 860, + 649, + 57, + 388, + 779, + 881, + 694, + 255, + 791, + 782, + 197, + 57, + 131, + 860, + 88, + 862, + 477, + 97, + 446, + 644, + 791, + 782, + 678, + 862, + 549, + 197, + 162, + 405, + 899, + 125, + 860, + 849, + 682, + 334, + 948, + 700, + 791, + 197, + 138, + 1044, + 401, + 878, + 633, + 899, + 945, + 682, + 53, + 694, + 255, + 370, + 930, + 882, + 899, + 945, + 682, + 137, + 311, + 782, + 1055, + 899, + 945, + 682, + 109, + 782, + 164, + 162, + 716, + 570, + 899, + 125, + 860, + 849, + 780, + 61, + 117, + 558, + 852, + 574, + 899, + 125, + 860, + 849, + 780, + 634, + 117, + 305, + 899, + 945, + 780, + 542, + 808, + 782, + 438, + 862, + 892, + 899, + 125, + 860, + 849, + 780, + 507, + 782, + 504, + 899, + 125, + 860, + 849, + 682, + 548, + 197, + 316, + 899, + 125, + 860, + 849, + 780, + 249, + 948, + 700, + 370, + 505, + 125, + 860, + 849, + 682, + 2 }, + { + 0, + 212, + 495, + 656, + 1015, + 693, + 550, + 749, + 614, + 779, + 915, + 716, + 558, + 162, + 595, + 848, + 322, + 342, + 243, + 721, + 934, + 1019, + 153, + 967, + 112, + 180, + 716, + 739, + 182, + 780, + 640, + 779, + 361, + 680, + 779, + 1000, + 518, + 197, + 848, + 426, + 135, + 987, + 284, + 414, + 694, + 1037, + 983, + 862, + 316, + 133, + 752, + 1079, + 656, + 1015, + 714, + 550, + 749, + 614, + 322, + 916, + 794, + 934, + 159, + 512, + 808, + 716, + 739, + 182, + 779, + 753, + 862, + 970, + 92, + 83, + 133, + 998, + 947, + 177, + 97, + 446, + 702, + 782, + 829, + 978, + 557, + 779, + 475, + 779, + 277, + 682, + 487, + 240, + 87, + 937, + 955, + 837, + 83, + 321, + 782, + 513, + 112, + 180, + 915, + 336, + 83, + 915, + 252, + 133, + 734, + 521, + 967, + 316, + 779, + 475, + 945, + 133, + 181, + 946, + 316, + 779, + 226, + 141, + 934, + 172, + 209, + 822, + 169, + 1035, + 1068, + 117, + 761, + 669, + 364, + 833, + 824, + 42, + 286, + 508, + 521, + 782, + 690, + 663, + 156, + 488, + 251, + 1065, + 915, + 89, + 951, + 2 }, }; + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java index bff2c6a94d789..3fd51601e0138 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java @@ -124,6 +124,30 @@ public void testMultiByteEmoji() throws IOException { } } + public void testMultilingual() throws IOException { + var vocab = XLMRobertaTestVocab.loadMultiLingualTestVocab(); + + try ( + XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( + vocab.get(), + vocab.scores(), + new XLMRobertaTokenization(false, null, Tokenization.Truncate.NONE, -1) + ).setWithSpecialTokens(true).build() + ) { + for (int i = 0; i < XLMRobertaTestVocab.MULTILINUGAL_TEXTS.length; i++) { + logger.info(i); + TokenizationResult.Tokens tokenization = tokenizer.tokenize( + XLMRobertaTestVocab.MULTILINUGAL_TEXTS[i], + Tokenization.Truncate.FIRST, + -1, + 0, + null + ).get(0); + assertArrayEquals(XLMRobertaTestVocab.EXPECTED_TOKENS[i], tokenization.tokenIds()); + } + } + } + public void testTokenizeWithNeverSplit() throws IOException { try ( XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( diff --git a/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json b/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json new file mode 100644 index 0000000000000..99f7d2ae34a04 --- /dev/null +++ b/x-pack/plugin/ml/src/test/resources/org/elasticsearch/xpack/ml/inference/tokenizers/xlm_roberta_test_vocabulary.json @@ -0,0 +1,2172 @@ +{ + "model_id": "anything_to_satisfy_the_parser", + "vocabulary": [ + "", + "", + "", + "", + "、", + "▁نسبت", + "。", + "▁ھن", + "▁ھو", + "「", + "」", + "فض", + "▁سار", + "▁پہنچے", + "’", + "▁Nakon", + "nova", + "jena", + "!", + "▁حديث", + "工業", + "'", + "(", + "▁soba", + ")", + "▁شوق", + "في", + "▁pod", + "株", + ",", + ".", + "قر", + "進", + "3", + "8", + "▁پر", + "▁pot", + ":", + "▁مختار", + "格", + "▁pos", + "▁مزاج", + "قل", + "قم", + "⁄", + "فسر", + "ću", + "く", + "▁bila", + "あげ", + "▁طرح", + "し", + "▁vrhunski", + "▁شعار", + "▁jednu", + "▁والی", + "▁والے", + "▁ہر", + "た", + "a", + "▁ٿي", + "▁طبیعت", + "b", + "▁ڪري", + "▁حدیث", + "e", + "▁ہو", + "g", + "h", + "▁ہم", + "k", + "に", + "l", + "ština", + "m", + "▁نبی", + "n", + "の", + "▁لکڻ", + "s", + "t", + "▁سان", + "▁سبب", + "▁میں", + "▁lit", + "へ", + "べ", + "لف", + "لق", + "لم", + "や", + "ثير", + "مت", + "لي", + "▁pri", + "▁راست", + "り", + "▁عطا", + "▁66", + "خدا", + "を", + "▁kuća", + "▁ڪرڻ", + "▁تھا", + "増", + "▁پکار", + "▁طا", + "▁طب", + "مل", + "▁راز", + "▁prenosi", + "نا", + "▁سہ", + "▁طرف", + "▁بلند", + "英文", + "vne", + "▁،", + "▁سی", + "ائين", + "▁زمين", + "▁zbog", + "点", + "▁stoljeća", + "jeti", + "▁ا", + "▁ب", + "ني", + "▁امير", + "▁آ", + "▁آئے", + "▁خ", + "vod", + "▁سے", + "▁ت", + "▁ج", + "▁ص", + "▁ض", + "▁ط", + "čenja", + "buku", + "▁ظ", + "▁90", + "▁ر", + "vom", + "▁ز", + "▁1990", + "▁ش", + "vor", + "ena", + "ologi", + "▁yn", + "الت", + "یا", + "▁ع", + "ene", + "الا", + "یت", + "وا", + "▁م", + "یح", + "▁ن", + "▁و", + "ید", + "▁صدق", + "▁عن", + "▁za", + "▁ف", + "▁ق", + "یر", + "▁ل", + "者が", + "ور", + "▁عبداللہ", + "▁obra", + "وع", + "الف", + "نٹ", + "ô", + "▁رسول", + "یل", + "▁کوئی", + "ین", + "▁zi", + "uncu", + "▁ٻي", + "لہ", + "▁انجام", + "▁fan", + "▁پنهنجو", + "▁اهو", + "▁vrh", + "بار", + "▁کیا", + "ć", + "▁مستقیم", + "لی", + "▁اور", + "▁اهي", + "لے", + "باط", + "▁پڑھ", + "▁اوهان", + "احمد", + "isto", + "▁ہاتھوں", + "▁پنھنجي", + "يء", + "▁lân", + "▁پ", + "مہ", + "يت", + "▁بال", + "يد", + "▁خرچ", + "ير", + "▁جذب", + "▁لا", + "نگ", + "وار", + "▁ڏ", + "▁شاہد", + "بان", + "▁قرار", + "vanja", + "يف", + "▁خلاف", + "▁بغداد", + "نہ", + "اني", + "ين", + "يه", + "社", + "▁njihova", + "▁مج", + "▁بازار", + "نے", + "▁ک", + "▁ڪ", + "▁تشریف", + "▁مؤ", + "▁ملي", + "ایت", + "▁مع", + "▁ملا", + "▁ہ", + "▁plod", + "یں", + "▁نب", + "▁ملت", + "▁ی", + "▁من", + "▁حیثیت", + "▁ali", + "660", + "وں", + "đe", + "یہ", + "▁لپ", + "đi", + "▁خطا", + "وی", + "▁پڇ", + "▁ایک", + "▁نام", + "šnje", + "▁پٽ", + "▁سلطنت", + "▁۱", + "▁کثیر", + "から", + "▁۽", + "▁dvije", + "▁breed", + "▁۾", + "▁200", + "▁Ana", + "▁مسلمانوں", + "oče", + "▁ukupno", + "▁حاصل", + "▁ترجیح", + "▁پڻ", + "ama", + "انہ", + "tru", + "انی", + "▁dokaz", + "▁آنکھیں", + "▁لکي", + "َّ", + "سلام", + "始", + "▁عرب", + "▁جهڙو", + "cre", + "▁لاء", + "نام", + "▁224", + "▁عورت", + "ُّ", + "▁نبي", + "▁kada", + "▁حضرت", + "▁تعالی", + "▁عدل", + "▁metara", + "▁چئي", + "▁moguće", + "▁مدینہ", + "plo", + "میر", + "▁عبادت", + "صاف", + "▁شهر", + "▁والد", + "▁اسلام", + "出", + "▁آمدن", + "موا", + "世界", + "▁ہے", + "▁نے", + "ِّ", + "▁godina", + "▁هڪ", + "nih", + "▁ostvari", + "ضرار", + "jal", + "▁razm", + "▁prostor", + "▁گئے", + "させる", + "▁لباس", + "▁دعوت", + "▁زمرہ", + "tvr", + "▁اصحاب", + "▁(2)", + "▁وہ", + "▁قسم", + "▁یہ", + "▁deset", + "▁عمر", + "▁المسلمين", + "سار", + "▁شروع", + "▁دولت", + "▁سعد", + "đeni", + "▁تور", + "▁عباس", + "▁خواب", + "▁شریف", + "ودي", + "ڑی", + "▁عمو", + "stavlja", + "的な", + "ary", + "▁وقت", + "海外", + "▁پاڻ", + "▁حجر", + "▁uko", + "▁فرماتے", + "▁pomaže", + "▁عالم", + "▁علم", + "▁گا۔", + "▁رضي", + "▁لانے", + "tili", + "▁عهد", + "▁مخلوق", + "▁ہے۔", + "企業", + "ندا", + "rum", + "▁ڏانهن", + "▁بنا", + "▁بند", + "▁ہزار", + "▁ispod", + "▁نالو", + "وري", + "▁نالي", + "▁خوبی", + "▁sklad", + "▁konačno", + "▁اپنے", + "▁لگ", + "▁ود", + "▁ور", + "▁هي", + "▁خاندان", + "▁generacij", + "lji", + "▁ثروت", + "▁هن", + "▁وا", + "▁هو", + "▁معرفت", + "حضور", + "قار", + "قاب", + "▁قرآن", + "▁عنه", + "▁%", + "▁druge", + "▁'", + "▁(", + "▁-", + "▁پہنچ", + "građ", + "▁materijal", + "nom", + "ڙو", + "▁6", + "▁چھوڑ", + "▁يا", + "▁1", + "▁هٿ", + "動", + "▁4", + "قام", + "1959", + "šne", + "▁:", + "▁پہلے", + "▁یقین", + "拠", + "▁H", + "▁خالی", + "▁مرتبہ", + "▁گذر", + "▁آھي", + "▁اظہار", + "▁مذهبي", + "▁U", + "▁građevin", + "▁نہ", + "▁وچ", + "太平洋", + "▁صفت", + "▁فرما", + "▁tijekom", + "▁بعض", + "▁a", + "‘‘", + "▁سوره", + "▁برکت", + "▁o", + "▁p", + "▁i", + "▁l", + "▁u", + "▁رسالت", + "jit", + "▁veličine", + "▁s", + "▁انتقال", + "化学", + "ša", + "▁صرف", + "خلي", + "▁آغاز", + "▁سخت", + "▁onda", + "1987", + "▁بلکہ", + "ječ", + "▁puno", + "▁traje", + "▁بعد", + "خلق", + "▁کریم", + "iranju", + "▁Ta", + "▁عہد", + "log", + "▁ispred", + "▁ceremoni", + "▁خوا", + "▁liker", + "azi", + "▁شمار", + "▁سلام", + "pane", + "▁što", + "▁علاوه", + "▁کرنے", + "▁راہ", + "▁ہاتھ", + "آخر", + "▁تھے", + "▁dat", + "ستان", + "عادت", + "ješ", + "▁šum", + "▁kruh", + "▁تمہاری", + "▁ہدایت", + "▁احمد", + "▁ڏينهن", + "▁شریعت", + "▁لکھتے", + "▁ٿيا", + "dono", + "icama", + "رحم", + "▁لیکن", + "▁wie", + "▁کرنا", + "化粧品", + "جنب", + "▁ہوگئے", + "▁علاوہ", + "▁پات", + "▁ہیں", + "▁dei", + "▁پاس", + "▁حال", + "▁سندس", + "▁بھی", + "▁stab", + "▁klim", + "▁ٿيو", + "▁محبت", + "▁12.", + "▁ولادت", + "▁zraka", + "以降", + "▁المو", + "▁چيو", + "▁sti", + "den", + "名", + "▁عمرو", + "▁سید", + "▁سیر", + "bina", + "▁nešto", + "▁غربت", + "▁ž", + "▁Zi", + "▁امام", + "▁قلب", + "▁الله", + "▁قلت", + "▁رہے", + "▁13.", + "▁المج", + "▁گھر", + "▁شام", + "▁مکہ", + "عفو", + "▁kraja", + "▁preciz", + "▁دفع", + "▁آهن", + "▁مضمون", + "▁بڑی", + "▁stol", + "▁پٿر", + "▁انور", + "▁čet", + "▁هجڻ", + "وفا", + "▁svojih", + "▁یعنی", + "▁میرے", + "▁کرم", + "seo", + "▁خدمت", + "ce", + "▁جڏهن", + "ch", + "▁متو", + "ACI", + "つく", + "▁بجائے", + "▁مطابق", + "رسل", + "REP", + "▁انهن", + "株式会社", + "▁لیے", + "▁خلافت", + "ruše", + "wol", + "▁جيڪو", + "▁انہوں", + "کر", + "en", + "ڪاري", + "▁مختلف", + "▁لوگ", + "et", + "▁ڏيندو", + "ولت", + "▁انہیں", + "کل", + "▁metodo", + "▁منزل", + "▁حفظ", + "يون", + "ولو", + "2002", + "▁اللہ", + "ڪن", + "حدث", + "▁وسلم", + "اب", + "▁svojem", + "▁مسجد", + "ئي", + "▁znanja", + "اد", + "▁پیدا", + "اس", + "عون", + "▁stik", + "ومن", + "▁’’", + "▁št", + "▁زمان", + "he", + "▁قبائل", + "ال", + "با", + "ان", + "▁عادت", + "▁هئا", + "▁بيت", + "▁lokal", + "▁be", + "▁Kiva", + "▁واپسی", + "▁dok", + "▁zemlju", + "▁کار", + "ؤں", + "▁سختی", + "▁طیب", + "▁slo", + "بل", + "نہیں", + "بن", + "▁آس", + "当初", + "▁گرو", + "im", + "▁بین", + "▁آن", + "▁آم", + "▁dos", + "▁ci", + "▁akumul", + "▁oblika", + "zimi", + "▁محمود", + "改", + "zima", + "je", + "ji", + "▁do", + "▁ابن", + "▁ويو", + "کی", + "▁سفر", + "تز", + "▁دیتی", + "▁بزرگ", + "کار", + "ju", + "▁اطمینان", + "▁de", + "▁آپ", + "tari", + "▁کا", + "لفت", + "ka", + "گا", + "▁dy", + "ائي", + "ته", + "▁ٿيون", + "▁کان", + "▁کش", + "には", + "صلى", + "▁کر", + "▁en", + "ثر", + "▁بنی", + "▁کد", + "▁فرمایا", + "اؤں", + "成長", + "ئے", + "▁لقب", + "▁ڳالھ", + "会社", + "▁هئي", + "▁fur", + "اں", + "la", + "le", + "uru", + "جا", + "li", + "▁صلى", + "ثي", + "▁", + "lo", + "▁nije", + "جر", + "▁تعبير", + "جز", + "▁اگر", + "ا۔", + "▁دریافت", + "創業", + "▁کئے", + "ma", + "▁Kao", + "▁Men", + "▁konstrukcij", + "AMO", + "بی", + "mu", + "▁ممتاز", + "▁سڄو", + "kron", + "ne", + "▁سياست", + "حن", + "خا", + "▁men", + "krov", + "▁سڄي", + "▁رهيا", + "▁mei", + "▁سالم", + "▁سالن", + "no", + "▁عليه", + "▁بیان", + "▁گمراہ", + "▁سرفراز", + "▁مال", + "▁ٻنهي", + "nu", + "▁مان", + "▁رفع", + "▁هجر", + "▁اختلاف", + "of", + "حافظ", + "▁سنا", + "▁جہاد", + "▁društva", + "ائی", + "نون", + "om", + "▁osjeća", + "▁وڏو", + "irano", + "ئين", + "▁زيارت", + "irana", + "مار", + "دو", + "▁طريق", + "▁ar", + "▁poput", + "▁کے", + "▁کی", + "▁jo", + "،", + "▁gli", + "▁کرتے", + "▁koje", + "▁تم", + "▁تو", + "▁ki", + "▁ته", + "ؓ", + "▁گا", + "lā", + "جہ", + "را", + "iš", + "▁دیکھ", + "رج", + "مام", + "رح", + "رخ", + "▁تع", + "▁vje", + "آ", + "▁između", + "دھی", + "ا", + "ب", + "ت", + "▁تي", + "ج", + "▁koja", + "ح", + "▁اک", + "▁جان", + "د", + "▁فرم", + "ذ", + "▁جب", + "ر", + "▁جا", + "▁pojedin", + "ز", + "olo", + "س", + "▁گه", + "ش", + "▁حضور", + "ض", + "▁گو", + "▁Verde", + "▁غالب", + "ري", + "ع", + "▁گم", + "ڪنھن", + "▁گل", + "لوب", + "▁يزيد", + "▁فري", + "ndro", + "▁خدا", + "▁اے", + "ف", + "ق", + "ru", + "▁nepo", + "ل", + "م", + "ن", + "ه", + "▁dizajn", + "و", + "ي", + "▁جر", + "ٌ", + "▁جائے", + "ٍ", + "long", + "َ", + "ُ", + "▁جو", + "ِ", + "ّ", + "se", + "▁جي", + "ْ", + "▁جن", + "ٖ", + "▁بہ", + "sk", + "▁Tim", + "▁غیر", + "▁lang", + "▁kamen", + "▁my", + "▁ہوجائیں", + "ارا", + "▁حس", + "▁بخاری", + "▁ہیں۔", + "▁قابل", + "ٰ", + "te", + "دہ", + "سن", + "年", + "ادي", + "▁na", + "stan", + "ٹ", + "ٺ", + "▁حق", + "پ", + "▁الآ", + "دے", + "▁fo", + "▁struktur", + "ڇ", + "▁کو", + "▁الا", + "▁ممنوع", + "ڍ", + "شق", + "▁کي", + "▁ڪن", + "▁بھیج", + "▁تمہیں", + "dali", + "عطاء", + "▁Gra", + "صح", + "un", + "ڙ", + "▁gr", + "▁اس", + "▁از", + "▁ار", + "قول", + "ک", + "▁اح", + "ve", + "▁اب", + "▁ٻيهر", + "▁ام", + "▁vodo", + "▁ال", + "▁فضا", + "▁مذکور", + "▁klimat", + "▁تقوی", + "ڻ", + "ھ", + "▁ڪيائين", + "ہ", + "رے", + "▁hladno", + "ۃ", + "▁بد", + "▁بہتر", + "▁بر", + "روا", + "ی", + "روج", + "▁با", + "▁ان", + "▁بات", + "▁او", + "▁بن", + "ے", + "ûn", + "۔", + "▁بار", + "▁باس", + "dane", + "▁الق", + "▁najpoznatij", + "▁الل", + "▁الم", + "▁Vrh", + "▁is", + "oblikovan", + "▁in", + "▁الطب", + "ڻي", + "▁je", + "ادی", + "▁کہ", + "へと", + "▁صفات", + "▁غنی", + "▁nekoliko", + "▁کڻ", + "▁معاف", + "سے", + "変更", + "▁sa", + "▁سرانجام", + "▁فتح", + "月", + "▁vremena", + "عب", + "▁کڻي", + "عت", + "▁سر", + "▁دور", + "▁ست", + "عر", + "▁اسی", + "▁سب", + "▁se", + "▁protek", + "本", + "▁su", + "jedi", + "▁40", + "ھر", + "国際", + "▁سو", + "▁te", + "▁ساتھ", + "▁tink", + "ھل", + "oksen", + "غو", + "▁ڏنو", + "ں۔", + "▁شن", + "علامه", + "▁ži", + "▁va", + "حمد", + "ija", + "▁محمد", + "▁سڀ", + "حضرت", + "ije", + "▁دون", + "▁ڏنا", + "▁دکھا", + "▁koe", + "▁حلق", + "▁سڏ", + "▁مشرف", + "ٹے", + "▁خواہش", + "kom", + "▁ili", + "▁تک", + "▁ima", + "رائي", + "صحاب", + "▁of", + "▁تہ", + "▁ob", + "▁نہیں", + "▁od", + "▁خل", + "▁جنهن", + "▁Mesa", + "▁gradi", + "▁قائم", + "▁رکھا", + "▁دين", + "اعات", + "▁آواز", + "izaci", + "▁اٹھا", + "▁دل", + "ima", + "▁danas", + "▁گھ", + "▁گهر", + "▁broja", + "▁رکھنے", + "▁حکمت", + "▁po", + "▁قبول", + "ٽڪ", + "▁پيء", + "▁مجتمع", + "▁اعمال", + "ine", + "FIC", + "▁احسان", + "▁حین", + "gje", + "▁18", + "▁ہوئی", + "▁کہا", + "▁قبیل", + "▁ro", + "▁دی۔", + "▁civilizaci", + "▁teori", + "めた", + "▁okrug", + "▁današnji", + "▁گذري", + "▁ہوئے", + "ega", + "▁تمام" + ], + "scores": [ + 0.0, + 0.0, + 0.0, + 0.0, + -6.610896110534668, + -11.903949737548828, + -6.411019802093506, + -13.111821174621582, + -12.475632667541504, + -8.94989013671875, + -8.913808822631836, + -12.612136840820312, + -13.197681427001953, + -14.200822830200195, + -6.379403591156006, + -12.10725212097168, + -11.451247215270996, + -12.569819450378418, + -6.61658239364624, + -12.716913223266602, + -12.647109031677246, + -6.345553398132324, + -7.722129821777344, + -13.328119277954102, + -5.9974517822265625, + -13.542387008666992, + -11.525911331176758, + -9.303495407104492, + -13.15868091583252, + -3.4635426998138428, + -3.625642776489258, + -12.082132339477539, + -11.200728416442871, + -8.533885955810547, + -9.478791236877441, + -8.830430030822754, + -9.85542106628418, + -5.629745960235596, + -10000.0, + -11.07493782043457, + -10.675272941589355, + -14.053406715393066, + -12.350106239318848, + -13.201828002929688, + -15.362364768981934, + -14.316963195800781, + -11.72597599029541, + -10.451481819152832, + -10.200002670288086, + -12.970951080322266, + -10.799960136413574, + -9.750066757202148, + -14.09317684173584, + -13.132211685180664, + -12.164549827575684, + -11.790772438049316, + -11.185127258300781, + -11.52169418334961, + -10.283937454223633, + -5.5477118492126465, + -10.650121688842773, + -13.610538482666016, + -8.289443016052246, + -10.517338752746582, + -10000.0, + -5.701941967010498, + -10.288777351379395, + -7.932966709136963, + -7.701241970062256, + -10.9612398147583, + -7.4715776443481445, + -8.438796997070312, + -7.762022495269775, + -13.653663635253906, + -6.647110939025879, + -13.096002578735352, + -6.093497276306152, + -7.835560321807861, + -13.766554832458496, + -5.072621822357178, + -6.071900844573975, + -10.290907859802246, + -11.627830505371094, + -8.404854774475098, + -11.820650100708008, + -11.6625394821167, + -12.946660041809082, + -11.946572303771973, + -13.20298957824707, + -12.11659049987793, + -9.643321990966797, + -12.949349403381348, + -11.78995132446289, + -10.989119529724121, + -8.787092208862305, + -12.648849487304688, + -10.379737854003906, + -13.063958168029785, + -11.535991668701172, + -13.839150428771973, + -8.22523021697998, + -13.274272918701172, + -11.137674331665039, + -10.805622100830078, + -12.990604400634766, + -14.285995483398438, + -13.078483581542969, + -12.852004051208496, + -11.508638381958008, + -12.764389991760254, + -13.693453788757324, + -9.525500297546387, + -13.369109153747559, + -10.901957511901855, + -12.365242004394531, + -12.302881240844727, + -12.062744140625, + -9.150372505187988, + -10.726777076721191, + -12.626052856445312, + -12.744816780090332, + -11.537252426147461, + -10.271102905273438, + -13.577858924865723, + -12.193032264709473, + -9.33310604095459, + -9.089756965637207, + -10.834887504577637, + -13.551883697509766, + -10.807448387145996, + -12.546935081481934, + -10.950186729431152, + -11.474028587341309, + -8.803434371948242, + -9.171648025512695, + -10.806365966796875, + -10.984315872192383, + -12.26717758178711, + -11.871655464172363, + -13.023716926574707, + -13.473764419555664, + -13.253439903259277, + -10.311766624450684, + -10.744394302368164, + -12.47635269165039, + -11.38111400604248, + -11.568384170532227, + -10000.0, + -10.497817039489746, + -10.765369415283203, + -11.620940208435059, + -8.651301383972168, + -12.216012001037598, + -11.396681785583496, + -10.353537559509277, + -9.594635963439941, + -12.703508377075195, + -10.891910552978516, + -9.981459617614746, + -9.07016372680664, + -13.325227737426758, + -9.9458646774292, + -7.174049377441406, + -10.452103614807129, + -13.543808937072754, + -10000.0, + -7.37307596206665, + -10000.0, + -10.899341583251953, + -11.181215286254883, + -9.23928451538086, + -13.12946605682373, + -10.758359909057617, + -14.190896987915039, + -10.881155967712402, + -11.799145698547363, + -13.552739143371582, + -12.35738754272461, + -10.790441513061523, + -11.642875671386719, + -11.203944206237793, + -11.164298057556152, + -10.391376495361328, + -10.602131843566895, + -13.296408653259277, + -12.801287651062012, + -13.29976749420166, + -11.127630233764648, + -9.635873794555664, + -13.025283813476562, + -11.522773742675781, + -12.629497528076172, + -11.675955772399902, + -10.100441932678223, + -9.490818977355957, + -12.654541015625, + -11.304871559143066, + -8.778549194335938, + -12.341999053955078, + -12.693137168884277, + -12.534963607788086, + -12.560155868530273, + -12.807035446166992, + -14.408479690551758, + -10.91116714477539, + -13.606574058532715, + -13.693682670593262, + -11.006491661071777, + -13.511610984802246, + -10.69263744354248, + -10000.0, + -10.88202953338623, + -9.945484161376953, + -11.082690238952637, + -13.169434547424316, + -10.761433601379395, + -12.539514541625977, + -9.714284896850586, + -10.531815528869629, + -11.411252975463867, + -12.159621238708496, + -13.439103126525879, + -12.159096717834473, + -10.569905281066895, + -11.485320091247559, + -11.685418128967285, + -11.131010055541992, + -13.32590389251709, + -12.843395233154297, + -11.425615310668945, + -9.176533699035645, + -10.86955738067627, + -11.128808975219727, + -13.251603126525879, + -12.20699405670166, + -11.551314353942871, + -10.626527786254883, + -11.38455581665039, + -11.614538192749023, + -14.187246322631836, + -12.982544898986816, + -11.797250747680664, + -10000.0, + -9.858101844787598, + -12.285886764526367, + -12.553010940551758, + -13.370101928710938, + -10.696676254272461, + -12.74817180633545, + -12.134454727172852, + -11.036406517028809, + -8.165318489074707, + -13.548136711120605, + -9.375162124633789, + -13.292466163635254, + -9.353793144226074, + -11.82857894897461, + -11.406195640563965, + -13.611187934875488, + -12.325207710266113, + -13.719786643981934, + -11.11467170715332, + -13.631454467773438, + -9.855673789978027, + -10.353020668029785, + -13.05349349975586, + -13.028356552124023, + -13.965872764587402, + -12.046480178833008, + -10000.0, + -9.703826904296875, + -9.335156440734863, + -12.247420310974121, + -13.311925888061523, + -9.245621681213379, + -9.983458518981934, + -10.990195274353027, + -12.795949935913086, + -13.135777473449707, + -13.378888130187988, + -11.403210639953613, + -13.6084566116333, + -12.680025100708008, + -10.440314292907715, + -12.222440719604492, + -11.42122745513916, + -11.383726119995117, + -12.9137601852417, + -14.476696968078613, + -13.467201232910156, + -12.135478973388672, + -12.71440601348877, + -12.663864135742188, + -11.900956153869629, + -13.902737617492676, + -11.216065406799316, + -10.742656707763672, + -10000.0, + -13.595476150512695, + -12.485321044921875, + -13.868348121643066, + -13.327157020568848, + -11.195040702819824, + -11.418901443481445, + -12.40562915802002, + -13.329121589660645, + -13.79185962677002, + -13.747245788574219, + -12.834506034851074, + -14.360300064086914, + -11.659099578857422, + -12.826003074645996, + -13.300061225891113, + -13.438057899475098, + -10.775205612182617, + -11.984052658081055, + -11.038277626037598, + -9.496801376342773, + -13.454340934753418, + -12.906173706054688, + -10.528743743896484, + -9.034796714782715, + -9.208064079284668, + -13.150303840637207, + -11.386055946350098, + -10.825066566467285, + -9.702229499816895, + -13.07643985748291, + -13.973017692565918, + -10.952235221862793, + -13.427935600280762, + -11.593998908996582, + -11.631296157836914, + -12.894272804260254, + -12.372183799743652, + -12.643047332763672, + -13.63158893585205, + -12.459877967834473, + -14.055868148803711, + -9.961138725280762, + -10.343199729919434, + -11.893840789794922, + -10.064606666564941, + -12.285531997680664, + -11.363245010375977, + -13.014981269836426, + -12.707101821899414, + -11.332316398620605, + -11.302778244018555, + -13.055937767028809, + -13.674442291259766, + -11.654837608337402, + -12.616765975952148, + -12.207862854003906, + -11.831457138061523, + -13.68111801147461, + -12.976818084716797, + -14.430807113647461, + -12.421124458312988, + -11.50658893585205, + -11.110703468322754, + -10.588051795959473, + -11.868654251098633, + -12.110957145690918, + -13.921645164489746, + -13.405242919921875, + -13.995997428894043, + -14.098799705505371, + -12.004497528076172, + -11.506338119506836, + -12.163915634155273, + -13.178263664245605, + -14.001510620117188, + -12.172317504882812, + -13.293499946594238, + -10000.0, + -10000.0, + -11.053098678588867, + -11.700507164001465, + -9.89022159576416, + -13.137804985046387, + -11.584470748901367, + -11.47780704498291, + -12.676255226135254, + -13.47961711883545, + -13.04322338104248, + -12.198275566101074, + -13.25001335144043, + -12.475261688232422, + -12.113178253173828, + -13.952543258666992, + -10.791595458984375, + -12.600780487060547, + -12.942652702331543, + -11.553879737854004, + -10.698843002319336, + -12.703505516052246, + -13.684463500976562, + -12.352499961853027, + -13.843332290649414, + -10.838998794555664, + -10.505772590637207, + -9.910398483276367, + -10000.0, + -13.492959976196289, + -12.739365577697754, + -12.663825035095215, + -11.77187728881836, + -12.291872024536133, + -9.96491813659668, + -11.795421600341797, + -8.20328426361084, + -5.926211357116699, + -6.1764984130859375, + -12.557028770446777, + -13.187960624694824, + -13.275311470031738, + -9.935545921325684, + -12.842405319213867, + -8.376090049743652, + -12.797646522521973, + -10.724736213684082, + -7.551031589508057, + -12.609972953796387, + -10.906079292297363, + -8.062232971191406, + -12.41627025604248, + -14.217342376708984, + -13.32412338256836, + -7.829031944274902, + -11.670573234558105, + -13.152725219726562, + -13.748356819152832, + -8.923832893371582, + -13.297384262084961, + -13.406578063964844, + -12.794132232666016, + -12.182308197021484, + -12.669015884399414, + -13.825421333312988, + -8.497254371643066, + -13.710942268371582, + -10.821403503417969, + -12.96695327758789, + -13.50233268737793, + -14.252979278564453, + -12.690325736999512, + -12.644253730773926, + -11.217681884765625, + -5.530364990234375, + -11.728997230529785, + -14.01970100402832, + -14.183956146240234, + -6.582267761230469, + -9.365259170532227, + -6.0461626052856445, + -7.482025623321533, + -7.319528579711914, + -13.754130363464355, + -12.329744338989258, + -14.684525489807129, + -7.452380657196045, + -12.367213249206543, + -13.013487815856934, + -10.484649658203125, + -11.331403732299805, + -13.430648803710938, + -11.846324920654297, + -11.766498565673828, + -11.94919490814209, + -13.64667797088623, + -12.330714225769043, + -12.643916130065918, + -12.118013381958008, + -12.920206069946289, + -9.852779388427734, + -13.62667465209961, + -12.794713020324707, + -13.23983097076416, + -9.644025802612305, + -13.41153335571289, + -10.862459182739258, + -13.595255851745605, + -12.811727523803711, + -12.114456176757812, + -12.617325782775879, + -12.534378051757812, + -12.435154914855957, + -11.79420280456543, + -13.141073226928711, + -10.228925704956055, + -12.648173332214355, + -10.5259370803833, + -13.075540542602539, + -12.833207130432129, + -12.930810928344727, + -11.625775337219238, + -8.988334655761719, + -11.492377281188965, + -14.40893268585205, + -12.106353759765625, + -13.25969409942627, + -13.190732955932617, + -14.228679656982422, + -13.389674186706543, + -11.702837944030762, + -12.21057415008545, + -13.743621826171875, + -14.52221393585205, + -13.440570831298828, + -12.3108491897583, + -13.243945121765137, + -13.412277221679688, + -11.340847969055176, + -9.651451110839844, + -11.63448429107666, + -14.10894775390625, + -13.651695251464844, + -13.614228248596191, + -12.83806037902832, + -13.661396026611328, + -9.87491512298584, + -9.8951416015625, + -11.809096336364746, + -10000.0, + -11.665786743164062, + -9.869390487670898, + -13.02005672454834, + -13.772218704223633, + -12.269754409790039, + -10000.0, + -10.893101692199707, + -14.356070518493652, + -13.704068183898926, + -13.16357707977295, + -13.131183624267578, + -11.631052017211914, + -11.685710906982422, + -8.907776832580566, + -9.99026870727539, + -14.045997619628906, + -12.187337875366211, + -13.198140144348145, + -11.964822769165039, + -12.250166893005371, + -14.266410827636719, + -11.802629470825195, + -11.381916046142578, + -11.162945747375488, + -12.44157886505127, + -9.786359786987305, + -10000.0, + -11.233309745788574, + -11.009201049804688, + -12.883721351623535, + -11.877254486083984, + -12.20182991027832, + -14.277528762817383, + -13.7249755859375, + -12.742781639099121, + -12.661029815673828, + -12.547115325927734, + -10.75460147857666, + -12.907571792602539, + -12.363632202148438, + -11.848713874816895, + -14.31727123260498, + -14.046844482421875, + -12.718457221984863, + -13.913930892944336, + -13.322484016418457, + -12.36288833618164, + -11.979717254638672, + -12.366744041442871, + -13.646872520446777, + -13.255087852478027, + -12.110769271850586, + -8.450657844543457, + -12.003588676452637, + -8.075675010681152, + -13.522724151611328, + -13.485895156860352, + -13.05333423614502, + -13.399734497070312, + -11.368695259094238, + -13.210000038146973, + -13.97340202331543, + -11.725092887878418, + -13.334667205810547, + -10.739959716796875, + -13.98511791229248, + -13.574196815490723, + -12.201776504516602, + -12.244017601013184, + -11.784577369689941, + -11.283102035522461, + -6.465692520141602, + -13.550567626953125, + -10000.0, + -11.969606399536133, + -7.697822093963623, + -14.338610649108887, + -13.137377738952637, + -12.166990280151367, + -11.836442947387695, + -13.03665542602539, + -10000.0, + -12.438132286071777, + -11.183541297912598, + -12.041357040405273, + -12.614006996154785, + -11.056611061096191, + -12.743069648742676, + -12.888111114501953, + -11.329586029052734, + -10.89394760131836, + -13.152234077453613, + -12.330127716064453, + -11.13021469116211, + -12.93645191192627, + -11.057968139648438, + -10000.0, + -10.721052169799805, + -13.35482406616211, + -12.192888259887695, + -13.680505752563477, + -11.670418739318848, + -11.871618270874023, + -11.242685317993164, + -9.557723999023438, + -10000.0, + -10000.0, + -11.125853538513184, + -9.22323226928711, + -13.24915599822998, + -11.91252613067627, + -12.721294403076172, + -11.174097061157227, + -7.990510940551758, + -13.991429328918457, + -13.859502792358398, + -11.520356178283691, + -13.971871376037598, + -10.134190559387207, + -13.07124137878418, + -13.591009140014648, + -10000.0, + -12.195642471313477, + -11.237064361572266, + -14.172703742980957, + -12.285969734191895, + -12.486908912658691, + -13.344427108764648, + -14.147533416748047, + -8.739598274230957, + -10.91167163848877, + -9.65263843536377, + -12.717270851135254, + -9.05600643157959, + -9.518941879272461, + -13.623188972473145, + -13.740755081176758, + -12.843908309936523, + -12.340595245361328, + -11.486748695373535, + -12.710258483886719, + -8.179301261901855, + -9.062299728393555, + -6.97949743270874, + -11.858112335205078, + -11.286624908447266, + -11.264288902282715, + -11.513409614562988, + -12.92937183380127, + -13.888775825500488, + -11.602699279785156, + -11.45335578918457, + -8.737150192260742, + -13.517394065856934, + -5.306643009185791, + -10.628350257873535, + -11.167513847351074, + -8.796040534973145, + -10000.0, + -7.871254920959473, + -12.365166664123535, + -10.174576759338379, + -11.040783882141113, + -10.433000564575195, + -13.301560401916504, + -10.412057876586914, + -12.430901527404785, + -10.438301086425781, + -12.994340896606445, + -9.674080848693848, + -6.31805419921875, + -10000.0, + -13.583410263061523, + -12.609077453613281, + -12.849180221557617, + -13.826027870178223, + -12.031959533691406, + -11.845420837402344, + -13.824118614196777, + -14.176135063171387, + -11.988388061523438, + -11.812614440917969, + -11.836989402770996, + -11.457304000854492, + -7.770702362060547, + -7.727717399597168, + -11.643401145935059, + -11.470467567443848, + -7.818939208984375, + -11.843179702758789, + -13.107500076293945, + -3.9299705028533936, + -8.545232772827148, + -10.604230880737305, + -11.725444793701172, + -14.248205184936523, + -12.327702522277832, + -10.582293510437012, + -13.244439125061035, + -12.059713363647461, + -12.921284675598145, + -12.883295059204102, + -8.039029121398926, + -12.784309387207031, + -9.565606117248535, + -13.330506324768066, + -13.591753005981445, + -11.633308410644531, + -8.91528034210205, + -13.258749961853027, + -14.396084785461426, + -12.985039710998535, + -7.417489528656006, + -13.149593353271484, + -12.515260696411133, + -12.587913513183594, + -8.725703239440918, + -12.955209732055664, + -13.303566932678223, + -12.77481460571289, + -10.94013786315918, + -12.95765495300293, + -12.781828880310059, + -7.8542914390563965, + -10.790460586547852, + -11.324527740478516, + -10000.0, + -14.37807559967041, + -11.92667007446289, + -13.613455772399902, + -8.873832702636719, + -10.859443664550781, + -10000.0, + -13.676170349121094, + -10000.0, + -10.775018692016602, + -12.95517349243164, + -13.232227325439453, + -14.387094497680664, + -12.78573989868164, + -11.10414981842041, + -12.770772933959961, + -8.37712574005127, + -13.284975051879883, + -12.99605655670166, + -12.795123100280762, + -12.59602165222168, + -14.046062469482422, + -12.717855453491211, + -11.992505073547363, + -11.261098861694336, + -11.927038192749023, + -7.993113040924072, + -12.678472518920898, + -8.111339569091797, + -8.321968078613281, + -9.167539596557617, + -6.938859939575195, + -10.086030006408691, + -11.12663459777832, + -10.549439430236816, + -10.726285934448242, + -9.660466194152832, + -8.06203842163086, + -9.214642524719238, + -13.663848876953125, + -11.201567649841309, + -11.692606925964355, + -10000.0, + -11.061088562011719, + -11.801623344421387, + -12.245379447937012, + -12.138701438903809, + -12.882545471191406, + -12.515228271484375, + -13.013315200805664, + -10000.0, + -12.440690994262695, + -11.182541847229004, + -12.293285369873047, + -13.289390563964844, + -8.25012493133545, + -9.714176177978516, + -8.54870319366455, + -10.123932838439941, + -10.123574256896973, + -10.787864685058594, + -10.535843849182129, + -12.084434509277344, + -11.367830276489258, + -9.272168159484863, + -12.730428695678711, + -10.77696418762207, + -11.299437522888184, + -9.2794771194458, + -10.121893882751465, + -12.715132713317871, + -9.817319869995117, + -11.207874298095703, + -9.38442325592041, + -12.960518836975098, + -9.32898998260498, + -11.51174259185791, + -10.884286880493164, + -11.72599983215332, + -13.018866539001465, + -13.688644409179688, + -11.104406356811523, + -9.628833770751953, + -10000.0, + -14.26093864440918, + -11.645624160766602, + -13.037396430969238, + -13.70888900756836, + -14.17890739440918, + -12.572925567626953, + -11.417001724243164, + -11.92243480682373, + -10000.0, + -10000.0, + -9.334487915039062, + -11.304604530334473, + -8.963071823120117, + -8.566967010498047, + -8.42164134979248, + -7.929381370544434, + -12.258378028869629, + -8.675025939941406, + -7.946563243865967, + -12.063398361206055, + -11.749732971191406, + -10000.0, + -10000.0, + -11.486349105834961, + -8.294340133666992, + -8.934319496154785, + -9.14141845703125, + -8.774731636047363, + -9.606856346130371, + -7.985258102416992, + -8.77881145477295, + -9.519185066223145, + -11.011818885803223, + -10000.0, + -12.663806915283203, + -9.498204231262207, + -11.668294906616211, + -11.114726066589355, + -10.064451217651367, + -12.4163236618042, + -9.18289566040039, + -14.760283470153809, + -12.412688255310059, + -12.485615730285645, + -13.954992294311523, + -10.590344429016113, + -11.5427827835083, + -10.981088638305664, + -7.230983257293701, + -12.179085731506348, + -11.862399101257324, + -8.748784065246582, + -12.161140441894531, + -6.327768802642822, + -10.773768424987793, + -11.07607364654541, + -12.150201797485352, + -11.026751518249512, + -10.101223945617676, + -12.312088966369629, + -13.726814270019531, + -10.998577117919922, + -11.658036231994629, + -12.724591255187988, + -8.90641975402832, + -10.587677001953125, + -13.232854843139648, + -12.673201560974121, + -12.671024322509766, + -9.579854965209961, + -12.255494117736816, + -13.195446968078613, + -13.792229652404785, + -12.136938095092773, + -14.033784866333008, + -11.097009658813477, + -12.227850914001465, + -8.381165504455566, + -11.749275207519531, + -10.788141250610352, + -9.176112174987793, + -8.276718139648438, + -12.19137954711914, + -12.501619338989258, + -10000.0, + -12.824344635009766, + -8.281817436218262, + -11.095662117004395, + -14.165250778198242, + -11.348094940185547, + -12.567456245422363, + -8.52457332611084, + -12.733758926391602, + -14.122416496276855, + -12.041360855102539, + -14.223989486694336, + -10.91444206237793, + -11.301746368408203, + -13.864544868469238, + -9.358236312866211, + -12.629053115844727, + -13.954301834106445, + -12.757906913757324, + -11.3294677734375, + -12.579774856567383, + -9.734641075134277, + -12.667106628417969, + -7.761864185333252, + -13.73751163482666, + -8.641522407531738, + -9.142242431640625, + -11.219501495361328, + -8.902569770812988, + -10.32934284210205, + -9.886985778808594, + -12.262642860412598, + -10.265408515930176, + -10.638322830200195, + -13.340587615966797, + -13.356618881225586, + -11.643696784973145, + -14.654583930969238, + -11.425148963928223, + -9.355377197265625, + -13.895018577575684, + -7.402246475219727, + -14.136250495910645, + -6.3201680183410645, + -13.139766693115234, + -12.429423332214355, + -6.986958980560303, + -12.735187530517578, + -9.379314422607422, + -13.72009563446045, + -14.089472770690918, + -13.748627662658691, + -11.978914260864258, + -13.91929817199707, + -13.818605422973633, + -12.800165176391602, + -12.482606887817383, + -7.0481696128845215, + -13.999007225036621, + -12.334272384643555, + -9.02311897277832, + -12.519035339355469, + -12.279437065124512, + -13.015851020812988, + -11.549497604370117, + -10.443224906921387, + -11.093907356262207, + -11.322037696838379, + -12.739632606506348, + -11.954368591308594, + -11.006121635437012, + -6.716159820556641, + -13.072667121887207, + -9.844562530517578, + -7.880402088165283, + -12.887127876281738, + -9.566628456115723, + -12.454107284545898, + -13.231634140014648, + -10.958243370056152, + -7.653661727905273, + -10.862187385559082, + -12.770365715026855, + -13.171697616577148, + -12.640562057495117, + -12.947507858276367, + -12.908676147460938, + -13.518932342529297, + -12.32068157196045, + -13.127158164978027, + -11.459029197692871, + -8.377157211303711, + -10000.0, + -10.775200843811035, + -10.465039253234863, + -12.271913528442383, + -14.012896537780762, + -11.834538459777832, + -11.865020751953125, + -13.912064552307129, + -13.423148155212402, + -12.969127655029297, + -13.150151252746582, + -13.349272727966309, + -13.358369827270508, + -13.805335998535156, + -10000.0, + -9.97810173034668, + -9.83073902130127, + -10.826997756958008, + -10.815749168395996, + -13.47970962524414, + -13.573468208312988, + -7.573562145233154, + -13.315520286560059, + -9.457472801208496, + -10.05649185180664, + -8.165127754211426, + -12.03433609008789, + -11.478641510009766, + -13.337766647338867, + -12.477649688720703, + -12.112797737121582, + -13.071025848388672, + -13.196009635925293, + -13.268590927124023, + -12.347124099731445, + -13.836188316345215, + -12.877021789550781, + -11.046356201171875, + -9.727120399475098, + -11.82292366027832, + -12.20386028289795, + -11.933242797851562, + -13.743108749389648, + -10000.0, + -13.26471996307373, + -7.864959716796875, + -12.188377380371094, + -13.456745147705078, + -13.93160629272461, + -10000.0, + -12.802282333374023, + -9.642523765563965, + -13.602763175964355, + -13.777410507202148, + -14.326563835144043, + -12.257325172424316, + -8.880353927612305, + -11.984357833862305, + -10.867341995239258, + -13.713247299194336, + -10.132527351379395, + -13.895491600036621, + -14.169499397277832, + -11.725127220153809, + -13.49624252319336, + -13.843846321105957, + -13.832304000854492, + -13.618553161621094, + -11.058784484863281, + -10.402983665466309, + -10.856165885925293 + ] +} + From 22d730815d0d7d3a485dc2e38d498bce353ee99f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 8 May 2024 13:21:56 +0200 Subject: [PATCH 057/117] [Inference API] Remove unused class AzureOpenAiAccount (#108356) --- .../azureopenai/AzureOpenAiAccount.java | 40 ------------------- .../AzureOpenAiEmbeddingsRequest.java | 3 -- 2 files changed, 43 deletions(-) delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java deleted file mode 100644 index db1f91cc751ee..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/azureopenai/AzureOpenAiAccount.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.azureopenai; - -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; - -import java.util.Objects; - -public record AzureOpenAiAccount( - String resourceName, - String deploymentId, - String apiVersion, - @Nullable SecureString apiKey, - @Nullable SecureString entraId -) { - - public AzureOpenAiAccount { - Objects.requireNonNull(resourceName); - Objects.requireNonNull(deploymentId); - Objects.requireNonNull(apiVersion); - Objects.requireNonNullElse(apiKey, entraId); - } - - public static AzureOpenAiAccount fromModel(AzureOpenAiEmbeddingsModel model) { - return new AzureOpenAiAccount( - model.getServiceSettings().resourceName(), - model.getServiceSettings().deploymentId(), - model.getServiceSettings().apiVersion(), - model.getSecretSettings().apiKey(), - model.getSecretSettings().entraId() - ); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java index f60d0130a01b6..f20398fec0e57 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; -import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiAccount; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; @@ -34,14 +33,12 @@ public class AzureOpenAiEmbeddingsRequest implements AzureOpenAiRequest { "The request does not have any authentication methods set. One of [%s] or [%s] is required."; private final Truncator truncator; - private final AzureOpenAiAccount account; private final Truncator.TruncationResult truncationResult; private final URI uri; private final AzureOpenAiEmbeddingsModel model; public AzureOpenAiEmbeddingsRequest(Truncator truncator, Truncator.TruncationResult input, AzureOpenAiEmbeddingsModel model) { this.truncator = Objects.requireNonNull(truncator); - this.account = AzureOpenAiAccount.fromModel(model); this.truncationResult = Objects.requireNonNull(input); this.model = Objects.requireNonNull(model); this.uri = model.getUri(); From 8864058f83bc485ce6c89a196e9d64b04e79b1cf Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 8 May 2024 14:51:02 +0200 Subject: [PATCH 058/117] ESQL: Add more time span units (#108300) This adds `nanosecond`, `microsecond` and `quarter` to the set of supported time spans. It also adds a few standard and common abbreviations to some existing ones. --- docs/changelog/108300.yaml | 5 +++ docs/reference/esql/esql-syntax.asciidoc | 17 +++++----- .../src/main/resources/date.csv-spec | 34 +++++++++++++++++++ .../xpack/esql/plugin/EsqlFeatures.java | 8 ++++- .../esql/type/EsqlDataTypeConverter.java | 20 ++++++----- .../xpack/esql/parser/ExpressionTests.java | 15 ++++++++ 6 files changed, 81 insertions(+), 18 deletions(-) create mode 100644 docs/changelog/108300.yaml diff --git a/docs/changelog/108300.yaml b/docs/changelog/108300.yaml new file mode 100644 index 0000000000000..c4d6e468113a4 --- /dev/null +++ b/docs/changelog/108300.yaml @@ -0,0 +1,5 @@ +pr: 108300 +summary: "ESQL: Add more time span units" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/esql-syntax.asciidoc b/docs/reference/esql/esql-syntax.asciidoc index c5d56ef15fdfd..c7f741d064310 100644 --- a/docs/reference/esql/esql-syntax.asciidoc +++ b/docs/reference/esql/esql-syntax.asciidoc @@ -160,14 +160,15 @@ Datetime intervals and timespans can be expressed using timespan literals. Timespan literals are a combination of a number and a qualifier. These qualifiers are supported: -* `millisecond`/`milliseconds` -* `second`/`seconds` -* `minute`/`minutes` -* `hour`/`hours` -* `day`/`days` -* `week`/`weeks` -* `month`/`months` -* `year`/`years` +* `millisecond`/`milliseconds`/`ms` +* `second`/`seconds`/`sec`/`s` +* `minute`/`minutes`/`min` +* `hour`/`hours`/`h` +* `day`/`days`/`d` +* `week`/`weeks`/`w` +* `month`/`months`/`mo` +* `quarter`/`quarters`/`q` +* `year`/`years`/`yr`/`y` Timespan literals are not whitespace sensitive. These expressions are all valid: diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 721cff076aeaa..8d54288de552d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -621,6 +621,40 @@ dt:datetime |plus_post:datetime |plus_pre:datetime 2100-01-01T01:01:01.001Z |null |null ; +datePlusQuarter +# "quarter" introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T01:01:01.000Z") +| eval plusQuarter = dt + 2 quarters +; + +dt:datetime | plusQuarter:datetime +2100-01-01T01:01:01.000Z | 2100-07-01T01:01:01.000Z +; + +datePlusAbbreviatedDurations +# abbreviations introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T00:00:00.000Z") +| eval plusDurations = dt + 1 h + 2 min + 2 sec + 1 s + 4 ms +; + +dt:datetime | plusDurations:datetime +2100-01-01T00:00:00.000Z | 2100-01-01T01:02:03.004Z +; + +datePlusAbbreviatedPeriods +# abbreviations introduced in 8.15 +required_feature: esql.timespan_abbreviations +row dt = to_dt("2100-01-01T00:00:00.000Z") +| eval plusDurations = dt + 0 yr + 1y + 2 q + 3 mo + 4 w + 3 d +; + +dt:datetime | plusDurations:datetime +2100-01-01T00:00:00.000Z | 2101-11-01T00:00:00.000Z +; + + dateMinusDuration row dt = to_dt("2100-01-01T01:01:01.001Z") | eval minus = dt - 1 hour - 1 minute - 1 second - 1 milliseconds; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index f6b534f7316df..059eec771efe8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -136,6 +136,11 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); + /** + * Support for timespan units abbreviations + */ + public static final NodeFeature TIMESPAN_ABBREVIATIONS = new NodeFeature("esql.timespan_abbreviations"); + @Override public Set getFeatures() { return Set.of( @@ -157,7 +162,8 @@ public Set getFeatures() { MV_ORDERING_SORTED_ASCENDING, METRICS_COUNTER_FIELDS, STRING_LITERAL_AUTO_CASTING_EXTENDED, - METADATA_FIELDS + METADATA_FIELDS, + TIMESPAN_ABBREVIATIONS ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index e4c7983d9a83a..e1360c67976ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -234,18 +234,20 @@ public static DataType commonType(DataType left, DataType right) { return DataTypeConverter.commonType(left, right); } + // generally supporting abbreviations from https://en.wikipedia.org/wiki/Unit_of_time public static TemporalAmount parseTemporalAmout(Number value, String qualifier, Source source) throws InvalidArgumentException, ArithmeticException, ParsingException { return switch (qualifier) { - case "millisecond", "milliseconds" -> Duration.ofMillis(safeToLong(value)); - case "second", "seconds" -> Duration.ofSeconds(safeToLong(value)); - case "minute", "minutes" -> Duration.ofMinutes(safeToLong(value)); - case "hour", "hours" -> Duration.ofHours(safeToLong(value)); - - case "day", "days" -> Period.ofDays(safeToInt(safeToLong(value))); - case "week", "weeks" -> Period.ofWeeks(safeToInt(safeToLong(value))); - case "month", "months" -> Period.ofMonths(safeToInt(safeToLong(value))); - case "year", "years" -> Period.ofYears(safeToInt(safeToLong(value))); + case "millisecond", "milliseconds", "ms" -> Duration.ofMillis(safeToLong(value)); + case "second", "seconds", "sec", "s" -> Duration.ofSeconds(safeToLong(value)); + case "minute", "minutes", "min" -> Duration.ofMinutes(safeToLong(value)); + case "hour", "hours", "h" -> Duration.ofHours(safeToLong(value)); + + case "day", "days", "d" -> Period.ofDays(safeToInt(safeToLong(value))); + case "week", "weeks", "w" -> Period.ofWeeks(safeToInt(safeToLong(value))); + case "month", "months", "mo" -> Period.ofMonths(safeToInt(safeToLong(value))); + case "quarter", "quarters", "q" -> Period.ofMonths(safeToInt(Math.multiplyExact(3L, safeToLong(value)))); + case "year", "years", "yr", "y" -> Period.ofYears(safeToInt(safeToLong(value))); default -> throw new ParsingException(source, "Unexpected time interval qualifier: '{}'", qualifier); }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index a0f226946cc36..9157f186ade92 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -380,14 +380,18 @@ public void testDurationLiterals() { assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 second")); assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + "second")); assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " seconds")); + assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " sec")); + assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " s")); assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 minute")); assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + "minute")); assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " minutes")); + assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " min")); assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 hour")); assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + "hour")); assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + " hours")); + assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + " h")); assertEquals(l(Duration.ofHours(-value), TIME_DURATION), whereExpression("-" + value + " hours")); } @@ -395,22 +399,33 @@ public void testDurationLiterals() { public void testDatePeriodLiterals() { int value = randomInt(Integer.MAX_VALUE); int weeksValue = randomInt(Integer.MAX_VALUE / 7); + int quartersValue = randomInt(Integer.MAX_VALUE / 3); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 day")); assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + "day")); assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + " days")); + assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + " d")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0week")); assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + "week")); assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + " weeks")); + assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + " w")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 month")); assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + "month")); assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + " months")); + assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + " mo")); + + assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 quarter")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " quarter")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " quarters")); + assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " q")); assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0year")); assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + "year")); assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " years")); + assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " yr")); + assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " y")); assertEquals(l(Period.ofYears(-value), DATE_PERIOD), whereExpression("-" + value + " years")); } From e7350dce291921bded600cf306e4dca6138fdb25 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 8 May 2024 14:44:26 +0100 Subject: [PATCH 059/117] Add a capabilities API to check node and cluster capabilities (#106820) This adds a /_capabilities rest endpoint for checking the capabilities of a cluster - what endpoints, parameters, and endpoint capabilities the cluster supports --- docs/changelog/106820.yaml | 5 + .../elasticsearch/core/RestApiVersion.java | 11 ++ .../SimpleNodesCapabilitiesIT.java | 55 +++++++ server/src/main/java/module-info.java | 1 + .../elasticsearch/action/ActionModule.java | 5 + .../node/capabilities/NodeCapability.java | 43 ++++++ .../NodesCapabilitiesRequest.java | 75 ++++++++++ .../NodesCapabilitiesResponse.java | 46 ++++++ .../TransportNodesCapabilitiesAction.java | 140 ++++++++++++++++++ .../client/internal/ClusterAdminClient.java | 11 ++ .../elasticsearch/rest/BaseRestHandler.java | 8 + .../elasticsearch/rest/RestController.java | 26 ++++ .../org/elasticsearch/rest/RestHandler.java | 17 +++ .../cluster/RestNodesCapabilitiesAction.java | 60 ++++++++ .../xpack/security/operator/Constants.java | 1 + 15 files changed, 504 insertions(+) create mode 100644 docs/changelog/106820.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java create mode 100644 server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java diff --git a/docs/changelog/106820.yaml b/docs/changelog/106820.yaml new file mode 100644 index 0000000000000..d854e3984c13d --- /dev/null +++ b/docs/changelog/106820.yaml @@ -0,0 +1,5 @@ +pr: 106820 +summary: Add a capabilities API to check node and cluster capabilities +area: Infra/REST API +type: feature +issues: [] diff --git a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java index 5153ba688d6a9..74acb00925e5a 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java +++ b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java @@ -61,4 +61,15 @@ public static Predicate onOrAfter(RestApiVersion restApiVersion) }; } + public static RestApiVersion forMajor(int major) { + switch (major) { + case 7 -> { + return V_7; + } + case 8 -> { + return V_8; + } + default -> throw new IllegalArgumentException("Unknown REST API version " + major); + } + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java new file mode 100644 index 0000000000000..7e4ae040caeca --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.nodescapabilities; + +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesResponse; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +public class SimpleNodesCapabilitiesIT extends ESIntegTestCase { + + public void testNodesCapabilities() throws IOException { + internalCluster().startNodes(2); + + ClusterHealthResponse clusterHealth = clusterAdmin().prepareHealth().setWaitForGreenStatus().setWaitForNodes("2").get(); + logger.info("--> done cluster_health, status {}", clusterHealth.getStatus()); + + // check we support the capabilities API itself. Which we do. + NodesCapabilitiesResponse response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), is(true)); + + // check we support some parameters of the capabilities API + response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "path")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), is(true)); + + // check we don't support some other parameters of the capabilities API + response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "invalid")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), is(false)); + + // check we don't support a random invalid api + // TODO this is not working yet - see https://github.com/elastic/elasticsearch/issues/107425 + /*response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_invalid")) + .actionGet(); + assertThat(response.getNodes(), hasSize(2)); + assertThat(response.isSupported(), is(false));*/ + } +} diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 475158c7a8709..e6b944262094d 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -65,6 +65,7 @@ exports org.elasticsearch.action.admin.cluster.desirednodes; exports org.elasticsearch.action.admin.cluster.health; exports org.elasticsearch.action.admin.cluster.migration; + exports org.elasticsearch.action.admin.cluster.node.capabilities; exports org.elasticsearch.action.admin.cluster.node.hotthreads; exports org.elasticsearch.action.admin.cluster.node.info; exports org.elasticsearch.action.admin.cluster.node.reload; diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index ef73d0470b43e..ab93f98c5648b 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.cluster.migration.PostFeatureUpgradeAction; import org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction; import org.elasticsearch.action.admin.cluster.migration.TransportPostFeatureUpgradeAction; +import org.elasticsearch.action.admin.cluster.node.capabilities.TransportNodesCapabilitiesAction; import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.admin.cluster.node.reload.TransportNodesReloadSecureSettingsAction; @@ -284,6 +285,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestGetStoredScriptAction; import org.elasticsearch.rest.action.admin.cluster.RestGetTaskAction; import org.elasticsearch.rest.action.admin.cluster.RestListTasksAction; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesHotThreadsAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesInfoAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesStatsAction; @@ -616,6 +618,7 @@ public void reg actions.register(TransportNodesInfoAction.TYPE, TransportNodesInfoAction.class); actions.register(TransportRemoteInfoAction.TYPE, TransportRemoteInfoAction.class); + actions.register(TransportNodesCapabilitiesAction.TYPE, TransportNodesCapabilitiesAction.class); actions.register(RemoteClusterNodesAction.TYPE, RemoteClusterNodesAction.TransportAction.class); actions.register(TransportNodesStatsAction.TYPE, TransportNodesStatsAction.class); actions.register(TransportNodesUsageAction.TYPE, TransportNodesUsageAction.class); @@ -833,6 +836,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestClearVotingConfigExclusionsAction()); registerHandler.accept(new RestNodesInfoAction(settingsFilter)); registerHandler.accept(new RestRemoteClusterInfoAction()); + registerHandler.accept(new RestNodesCapabilitiesAction()); registerHandler.accept(new RestNodesStatsAction()); registerHandler.accept(new RestNodesUsageAction()); registerHandler.accept(new RestNodesHotThreadsAction()); @@ -1029,6 +1033,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< @Override protected void configure() { + bind(RestController.class).toInstance(restController); bind(ActionFilters.class).toInstance(actionFilters); bind(DestructiveOperations.class).toInstance(destructiveOperations); bind(new TypeLiteral>() { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java new file mode 100644 index 0000000000000..c26aa673d13fd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodeCapability.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class NodeCapability extends BaseNodeResponse { + + private final boolean supported; + + public NodeCapability(StreamInput in) throws IOException { + super(in); + + supported = in.readBoolean(); + } + + public NodeCapability(boolean supported, DiscoveryNode node) { + super(node); + this.supported = supported; + } + + public boolean isSupported() { + return supported; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + + out.writeBoolean(supported); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java new file mode 100644 index 0000000000000..c69d273727238 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesRequest.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.rest.RestRequest; + +import java.util.Set; + +public class NodesCapabilitiesRequest extends BaseNodesRequest { + + private RestRequest.Method method = RestRequest.Method.GET; + private String path = "/"; + private Set parameters = Set.of(); + private Set capabilities = Set.of(); + private RestApiVersion restApiVersion = RestApiVersion.current(); + + public NodesCapabilitiesRequest() { + // always send to all nodes + super(Strings.EMPTY_ARRAY); + } + + public NodesCapabilitiesRequest path(String path) { + this.path = path; + return this; + } + + public String path() { + return path; + } + + public NodesCapabilitiesRequest method(RestRequest.Method method) { + this.method = method; + return this; + } + + public RestRequest.Method method() { + return method; + } + + public NodesCapabilitiesRequest parameters(String... parameters) { + this.parameters = Set.of(parameters); + return this; + } + + public Set parameters() { + return parameters; + } + + public NodesCapabilitiesRequest capabilities(String... capabilities) { + this.capabilities = Set.of(capabilities); + return this; + } + + public Set capabilities() { + return capabilities; + } + + public NodesCapabilitiesRequest restApiVersion(RestApiVersion restApiVersion) { + this.restApiVersion = restApiVersion; + return this; + } + + public RestApiVersion restApiVersion() { + return restApiVersion; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java new file mode 100644 index 0000000000000..63fdb9f7da08a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; + +public class NodesCapabilitiesResponse extends BaseNodesResponse implements ToXContentFragment { + protected NodesCapabilitiesResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return TransportAction.localOnly(); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + TransportAction.localOnly(); + } + + public boolean isSupported() { + return getNodes().isEmpty() == false && getNodes().stream().allMatch(NodeCapability::isSupported); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field("supported", isSupported()); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java new file mode 100644 index 0000000000000..7e392775bf42e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.node.capabilities; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +public class TransportNodesCapabilitiesAction extends TransportNodesAction< + NodesCapabilitiesRequest, + NodesCapabilitiesResponse, + TransportNodesCapabilitiesAction.NodeCapabilitiesRequest, + NodeCapability> { + + public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/capabilities"); + + private final RestController restController; + + @Inject + public TransportNodesCapabilitiesAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + RestController restController + ) { + super( + TYPE.name(), + clusterService, + transportService, + actionFilters, + NodeCapabilitiesRequest::new, + threadPool.executor(ThreadPool.Names.MANAGEMENT) + ); + this.restController = restController; + } + + @Override + protected NodesCapabilitiesResponse newResponse( + NodesCapabilitiesRequest request, + List responses, + List failures + ) { + return new NodesCapabilitiesResponse(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeCapabilitiesRequest newNodeRequest(NodesCapabilitiesRequest request) { + return new NodeCapabilitiesRequest( + request.method(), + request.path(), + request.parameters(), + request.capabilities(), + request.restApiVersion() + ); + } + + @Override + protected NodeCapability newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new NodeCapability(in); + } + + @Override + protected NodeCapability nodeOperation(NodeCapabilitiesRequest request, Task task) { + boolean supported = restController.checkSupported( + request.method, + request.path, + request.parameters, + request.capabilities, + request.restApiVersion + ); + return new NodeCapability(supported, transportService.getLocalNode()); + } + + public static class NodeCapabilitiesRequest extends TransportRequest { + private final RestRequest.Method method; + private final String path; + private final Set parameters; + private final Set capabilities; + private final RestApiVersion restApiVersion; + + public NodeCapabilitiesRequest(StreamInput in) throws IOException { + super(in); + + method = in.readEnum(RestRequest.Method.class); + path = in.readString(); + parameters = in.readCollectionAsImmutableSet(StreamInput::readString); + capabilities = in.readCollectionAsImmutableSet(StreamInput::readString); + restApiVersion = RestApiVersion.forMajor(in.readVInt()); + } + + public NodeCapabilitiesRequest( + RestRequest.Method method, + String path, + Set parameters, + Set capabilities, + RestApiVersion restApiVersion + ) { + this.method = method; + this.path = path; + this.parameters = Set.copyOf(parameters); + this.capabilities = Set.copyOf(capabilities); + this.restApiVersion = restApiVersion; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + + out.writeEnum(method); + out.writeString(path); + out.writeCollection(parameters, StreamOutput::writeString); + out.writeCollection(capabilities, StreamOutput::writeString); + out.writeVInt(restApiVersion.major); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java index f2b9c5ef9631e..daae078ed9a68 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java @@ -21,6 +21,9 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesResponse; +import org.elasticsearch.action.admin.cluster.node.capabilities.TransportNodesCapabilitiesAction; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -248,6 +251,14 @@ public NodesStatsRequestBuilder prepareNodesStats(String... nodesIds) { return new NodesStatsRequestBuilder(this).setNodesIds(nodesIds); } + public ActionFuture nodesCapabilities(final NodesCapabilitiesRequest request) { + return execute(TransportNodesCapabilitiesAction.TYPE, request); + } + + public void nodesCapabilities(final NodesCapabilitiesRequest request, final ActionListener listener) { + execute(TransportNodesCapabilitiesAction.TYPE, request, listener); + } + public void nodesUsage(final NodesUsageRequest request, final ActionListener listener) { execute(TransportNodesUsageAction.TYPE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index d075983464f76..70801cdef560b 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; @@ -77,6 +78,13 @@ public final long getUsageCount() { @Override public final void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + // check if the query has any parameters that are not in the supported set (if declared) + Set supported = supportedQueryParameters(); + if (supported != null && supported.containsAll(request.params().keySet()) == false) { + Set unsupported = Sets.difference(request.params().keySet(), supported); + throw new IllegalArgumentException(unrecognized(request, unsupported, supported, "parameter")); + } + // prepare the request for execution; has the side effect of touching the request parameters try (var action = prepareRequest(request, client)) { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 8ce9b08eba205..16813f1141e12 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -365,6 +365,32 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } } + public boolean checkSupported( + RestRequest.Method method, + String path, + Set parameters, + Set capabilities, + RestApiVersion restApiVersion + ) { + Iterator allHandlers = getAllHandlers(null, path); + while (allHandlers.hasNext()) { + RestHandler handler; + MethodHandlers handlers = allHandlers.next(); + if (handlers == null) { + handler = null; + } else { + handler = handlers.getHandler(method, restApiVersion); + } + + if (handler != null) { + var supportedParams = handler.supportedQueryParameters(); + return (supportedParams == null || supportedParams.containsAll(parameters)) + && handler.supportedCapabilities().containsAll(capabilities); + } + } + return false; + } + @Override public Map getStats() { final Iterator methodHandlersIterator = handlers.allNodeValues(); diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index c66fd72279670..4ab89618643f5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -18,6 +18,7 @@ import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Set; /** * Handler for REST requests @@ -85,6 +86,22 @@ default List routes() { return Collections.emptyList(); } + /** + * The set of query parameters accepted by this rest handler, + * {@code null} if query parameters should not be checked nor validated. + * TODO - make this not nullable when all handlers have been updated + */ + default @Nullable Set supportedQueryParameters() { + return null; + } + + /** + * The set of capabilities this rest handler supports. + */ + default Set supportedCapabilities() { + return Set.of(); + } + /** * Controls whether requests handled by this class are allowed to to access system indices by default. * @return {@code true} if requests handled by this class should be allowed to access system indices. diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java new file mode 100644 index 0000000000000..9b89a6a932dd3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.rest.action.admin.cluster; + +import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestActions.NodesResponseRestListener; + +import java.io.IOException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Set; + +@ServerlessScope(Scope.INTERNAL) +public class RestNodesCapabilitiesAction extends BaseRestHandler { + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, "/_capabilities")); + } + + @Override + public Set supportedQueryParameters() { + return Set.of("timeout", "method", "path", "parameters", "capabilities"); + } + + @Override + public String getName() { + return "nodes_capabilities_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + NodesCapabilitiesRequest r = new NodesCapabilitiesRequest().timeout(request.paramAsTime("timeout", null)) + .method(RestRequest.Method.valueOf(request.param("method", "GET"))) + .path(URLDecoder.decode(request.param("path"), StandardCharsets.UTF_8)) + .parameters(request.paramAsStringArray("parameters", Strings.EMPTY_ARRAY)) + .capabilities(request.paramAsStringArray("capabilities", Strings.EMPTY_ARRAY)) + .restApiVersion(request.getRestApiVersion()); + + return channel -> client.admin().cluster().nodesCapabilities(r, new NodesResponseRestListener<>(channel)); + } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 65651b4a7eb65..2fc894c69aa4c 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -341,6 +341,7 @@ public class Constants { "cluster:monitor/update/health/info", "cluster:monitor/ingest/geoip/stats", "cluster:monitor/main", + "cluster:monitor/nodes/capabilities", "cluster:monitor/nodes/data_tier_usage", "cluster:monitor/nodes/hot_threads", "cluster:monitor/nodes/info", From 2d8faa875c25c3173caac70596103af94a050dc6 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Wed, 8 May 2024 10:31:57 -0400 Subject: [PATCH 060/117] [ES|QL] Create MockBigArrays with CircuitBreaker in AbstractFunctionTestCases (#108195) * create mockbigarrays with circuit breaker --- .../compute/data/BytesRefBlockBuilder.java | 4 --- .../compute/data/X-BlockBuilder.java.st | 4 --- .../function/AbstractFunctionTestCase.java | 25 ++++++++++++++----- 3 files changed, 19 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 49075789ed4a4..6232cbdd2717c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -21,10 +21,6 @@ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRe private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); - } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 0d3d2293a1bb1..8397a0f5274f1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -31,10 +31,6 @@ final class $Type$BlockBuilder extends AbstractBlockBuilder implements $Type$Blo $if(BytesRef)$ private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); - } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 1a410c518e9b1..1fd7cfe368068 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -307,7 +307,13 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { *

*/ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -315,7 +321,13 @@ public final void testEvaluateBlockWithoutNulls() { * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -1543,17 +1555,18 @@ private static void writeToTempDir(String subdir, String str, String extension) private final List breakers = Collections.synchronizedList(new ArrayList<>()); protected final DriverContext driverContext() { - MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } protected final DriverContext crankyContext() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()) + .withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } @After From e2c19f2ac8127f12eba7d045064cb1d580d8e64e Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 8 May 2024 17:24:25 +0200 Subject: [PATCH 061/117] Fix semantic text for non snapshot tests (#108372) --- x-pack/plugin/inference/build.gradle | 8 ++++++++ x-pack/plugin/ml/build.gradle | 7 ------- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 0aef8601ffcc6..3e2171d0654d5 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -4,6 +4,8 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' @@ -36,6 +38,12 @@ dependencies { api "com.ibm.icu:icu4j:${versions.icu4j}" } +if (BuildParams.isSnapshotBuild() == false) { + tasks.named("test").configure { + systemProperty 'es.semantic_text_feature_flag_enabled', 'true' + } +} + tasks.named('yamlRestTest') { usesDefaultDistribution() } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 26f5ea053771c..f42dcc6179d04 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,6 +1,5 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.dra.DraResolvePlugin -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -115,12 +114,6 @@ artifacts { archives tasks.named("jar") } -if (BuildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.semantic_text_feature_flag_enabled', 'true' - } -} - tasks.register("extractNativeLicenses", Copy) { dependsOn configurations.nativeBundle into "${buildDir}/extractedNativeLicenses" From 2f94aeea0c8dfe1f448381171f80fa6ffbbcfaa5 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 8 May 2024 08:25:44 -0700 Subject: [PATCH 062/117] Refactor rolling upgrade tests to make it easier to customize (#108393) --- .../AbstractRollingUpgradeTestCase.java | 54 +++++++++++++++++++ .../upgrades/ClusterFeatureMigrationIT.java | 2 +- .../upgrades/DesiredNodesUpgradeIT.java | 2 +- .../elasticsearch/upgrades/DownsampleIT.java | 2 +- .../upgrades/FeatureUpgradeIT.java | 2 +- .../elasticsearch/upgrades/FieldCapsIT.java | 2 +- .../upgrades/HealthNodeUpgradeIT.java | 2 +- .../IgnoredMetaFieldRollingUpgradeIT.java | 2 +- .../elasticsearch/upgrades/IndexingIT.java | 2 +- .../ParameterizedRollingUpgradeTestCase.java | 49 ++++------------- .../upgrades/SnapshotBasedRecoveryIT.java | 2 +- .../upgrades/SystemIndicesUpgradeIT.java | 2 +- .../org/elasticsearch/upgrades/TsdbIT.java | 2 +- .../UpgradeWithOldIndexSettingsIT.java | 2 +- .../upgrades/VectorSearchIT.java | 2 +- .../org/elasticsearch/upgrades/XPackIT.java | 2 +- .../application/InferenceUpgradeTestCase.java | 4 +- 17 files changed, 80 insertions(+), 55 deletions(-) create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java new file mode 100644 index 0000000000000..4837afbf6ccd2 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.util.function.Supplier; + +public abstract class AbstractRollingUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { + + private static final TemporaryFolder repoDirectory = new TemporaryFolder(); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(NODE_NUM) + .setting("path.repo", new Supplier<>() { + @Override + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + public String get() { + return repoDirectory.getRoot().getPath(); + } + }) + .setting("xpack.security.enabled", "false") + .feature(FeatureFlag.TIME_SERIES_MODE) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + protected AbstractRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java index 0487b282179a9..73abb634dfd76 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; -public class ClusterFeatureMigrationIT extends ParameterizedRollingUpgradeTestCase { +public class ClusterFeatureMigrationIT extends AbstractRollingUpgradeTestCase { @Before public void checkMigrationVersion() { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 73d91ac41fcb7..c7f99b3525f74 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -public class DesiredNodesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { private final int desiredNodesVersion; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java index 757f793ac4c46..488cd966ed65e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class DownsampleIT extends ParameterizedRollingUpgradeTestCase { +public class DownsampleIT extends AbstractRollingUpgradeTestCase { private static final String FIXED_INTERVAL = "1h"; private String index; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java index 4fe45c05b157b..fc77eef0ae8bb 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class FeatureUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class FeatureUpgradeIT extends AbstractRollingUpgradeTestCase { public FeatureUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java index 860cd2c0e8617..306447d8cc2cd 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java @@ -40,7 +40,7 @@ * the co-ordinating node if older nodes were included in the system */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103473") -public class FieldCapsIT extends ParameterizedRollingUpgradeTestCase { +public class FieldCapsIT extends AbstractRollingUpgradeTestCase { public FieldCapsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 0f210ee4b2450..6647cb413c9f5 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -20,7 +20,7 @@ import static org.hamcrest.CoreMatchers.equalTo; -public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class HealthNodeUpgradeIT extends AbstractRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java index 874fac615b9b1..1477e2b63cf03 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java @@ -26,7 +26,7 @@ import java.util.Locale; import java.util.Map; -public class IgnoredMetaFieldRollingUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class IgnoredMetaFieldRollingUpgradeIT extends AbstractRollingUpgradeTestCase { private static final String TERMS_AGG_QUERY = Strings.format(""" { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 82485130f05ce..157e2293b69ae 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -51,7 +51,7 @@ * xpack rolling restart tests. We should work on a way to remove this * duplication but for now we have no real way to share code. */ -public class IndexingIT extends ParameterizedRollingUpgradeTestCase { +public class IndexingIT extends AbstractRollingUpgradeTestCase { public IndexingIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index 63ed54d05adf2..d5f645c387d61 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -14,74 +14,45 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.TestFeatureService; import org.junit.AfterClass; import org.junit.Before; -import org.junit.ClassRule; -import org.junit.rules.RuleChain; -import org.junit.rules.TemporaryFolder; -import org.junit.rules.TestRule; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.function.Supplier; import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; public abstract class ParameterizedRollingUpgradeTestCase extends ESRestTestCase { + protected static final int NODE_NUM = 3; private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); - - private static final TemporaryFolder repoDirectory = new TemporaryFolder(); - - private static final int NODE_NUM = 3; - - private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) - .nodes(NODE_NUM) - .setting("path.repo", new Supplier<>() { - @Override - @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") - public String get() { - return repoDirectory.getRoot().getPath(); - } - }) - .setting("xpack.security.enabled", "false") - .feature(FeatureFlag.TIME_SERIES_MODE) - .build(); - - @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); - - @ParametersFactory(shuffle = false) - public static Iterable parameters() { - return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); - } - private static final Set upgradedNodes = new HashSet<>(); private static TestFeatureService oldClusterTestFeatureService = null; private static boolean upgradeFailed = false; private static IndexVersion oldIndexVersion; - private final int requestedUpgradedNodes; protected ParameterizedRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { this.requestedUpgradedNodes = upgradedNodes; } + @ParametersFactory(shuffle = false) + public static Iterable parameters() { + return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); + } + + protected abstract ElasticsearchCluster getUpgradeCluster(); + @Before public void extractOldClusterFeatures() { if (isOldCluster() && oldClusterTestFeatureService == null) { @@ -135,7 +106,7 @@ public void upgradeNode() throws Exception { if (upgradedNodes.add(n)) { try { logger.info("Upgrading node {} to version {}", n, Version.CURRENT); - cluster.upgradeNodeToVersion(n, Version.CURRENT); + getUpgradeCluster().upgradeNodeToVersion(n, Version.CURRENT); } catch (Exception e) { upgradeFailed = true; throw e; @@ -199,7 +170,7 @@ protected static boolean isUpgradedCluster() { @Override protected String getTestRestCluster() { - return cluster.getHttpAddresses(); + return getUpgradeCluster().getHttpAddresses(); } @Override diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index ef80643c82c0d..593630546845d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; -public class SnapshotBasedRecoveryIT extends ParameterizedRollingUpgradeTestCase { +public class SnapshotBasedRecoveryIT extends AbstractRollingUpgradeTestCase { public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java index fbd6ee8aa3759..a2e3b03c9036f 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class SystemIndicesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class SystemIndicesUpgradeIT extends AbstractRollingUpgradeTestCase { public SystemIndicesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 3ce0fc79087c2..2889885f83984 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class TsdbIT extends ParameterizedRollingUpgradeTestCase { +public class TsdbIT extends AbstractRollingUpgradeTestCase { public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index 3af344051030b..8dc3b43abf3e1 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -24,7 +24,7 @@ import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.is; -public class UpgradeWithOldIndexSettingsIT extends ParameterizedRollingUpgradeTestCase { +public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { public UpgradeWithOldIndexSettingsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java index e78e0978b1d80..21dbad9487d4e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; -public class VectorSearchIT extends ParameterizedRollingUpgradeTestCase { +public class VectorSearchIT extends AbstractRollingUpgradeTestCase { public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java index dade5b53addae..6379a8875dfb4 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java @@ -22,7 +22,7 @@ * Basic tests for simple xpack functionality that are only run if the * cluster is the on the default distribution. */ -public class XPackIT extends ParameterizedRollingUpgradeTestCase { +public class XPackIT extends AbstractRollingUpgradeTestCase { public XPackIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java index fe08db9b94b89..ecfec2304c8a1 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.upgrades.ParameterizedRollingUpgradeTestCase; +import org.elasticsearch.upgrades.AbstractRollingUpgradeTestCase; import java.io.IOException; import java.util.List; @@ -21,7 +21,7 @@ import static org.elasticsearch.core.Strings.format; -public class InferenceUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { +public class InferenceUpgradeTestCase extends AbstractRollingUpgradeTestCase { public InferenceUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); From 616e71963e195ed3306fb2721c139f6477b33e8f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 8 May 2024 18:02:02 +0200 Subject: [PATCH 063/117] [Inference API] Add Azure OpenAI completion support (#108352) --- .../org/elasticsearch/TransportVersions.java | 1 + .../org/elasticsearch/test/ESTestCase.java | 6 + .../azureopenai/AzureOpenAiActionCreator.java | 7 + .../azureopenai/AzureOpenAiActionVisitor.java | 3 + .../AzureOpenAiCompletionAction.java | 67 ++++++ .../AzureOpenAiCompletionRequestManager.java | 58 +++++ .../AzureOpenAiCompletionRequest.java | 70 ++++++ .../AzureOpenAiCompletionRequestEntity.java | 64 +++++ .../AzureOpenAiEmbeddingsRequest.java | 27 +-- .../azureopenai/AzureOpenAiRequest.java | 36 ++- .../request/azureopenai/AzureOpenAiUtils.java | 2 + .../external/response/XContentUtils.java | 2 +- .../AzureOpenAiCompletionResponseEntity.java | 114 +++++++++ .../azureopenai/AzureOpenAiModel.java | 41 ++++ .../AzureOpenAiSecretSettings.java | 31 ++- .../azureopenai/AzureOpenAiService.java | 29 ++- .../AzureOpenAiCompletionModel.java | 121 ++++++++++ ...reOpenAiCompletionRequestTaskSettings.java | 38 +++ .../AzureOpenAiCompletionServiceSettings.java | 183 ++++++++++++++ .../AzureOpenAiCompletionTaskSettings.java | 105 +++++++++ .../AzureOpenAiEmbeddingsModel.java | 37 +-- .../AzureOpenAiEmbeddingsServiceSettings.java | 2 +- .../AzureOpenAiActionCreatorTests.java | 223 ++++++++++++++++-- .../AzureOpenAiCompletionActionTests.java | 200 ++++++++++++++++ .../azureopenai/AzureOpenAiRequestTests.java | 62 +++++ ...ureOpenAiCompletionRequestEntityTests.java | 45 ++++ .../AzureOpenAiCompletionRequestTests.java | 100 ++++++++ ...ureOpenAiEmbeddingsRequestEntityTests.java | 3 +- .../AzureOpenAiEmbeddingsRequestTests.java | 53 +++-- .../external/response/XContentUtilsTests.java | 18 ++ ...reOpenAiCompletionResponseEntityTests.java | 220 +++++++++++++++++ ...enAiChatCompletionResponseEntityTests.java | 6 +- .../AzureOpenAiCompletionModelTests.java | 142 +++++++++++ ...nAiCompletionRequestTaskSettingsTests.java | 45 ++++ ...eOpenAiCompletionServiceSettingsTests.java | 92 ++++++++ ...zureOpenAiCompletionTaskSettingsTests.java | 99 ++++++++ .../AzureOpenAiEmbeddingsModelTests.java | 30 +++ 37 files changed, 2283 insertions(+), 99 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/{ => embeddings}/AzureOpenAiEmbeddingsRequestEntityTests.java (96%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/{ => embeddings}/AzureOpenAiEmbeddingsRequestTests.java (73%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1cc7e47cddda3..db43a12cf9014 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -195,6 +195,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); + public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index bea222a9d8341..804dbfbb2dc47 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -64,6 +64,7 @@ import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; @@ -1058,6 +1059,11 @@ public static String randomAlphaOfLength(int codeUnits) { return RandomizedTest.randomAsciiOfLength(codeUnits); } + public static SecureString randomSecureStringOfLength(int codeUnits) { + var randomAlpha = randomAlphaOfLength(codeUnits); + return new SecureString(randomAlpha.toCharArray()); + } + public static String randomNullOrAlphaOfLength(int codeUnits) { return randomBoolean() ? null : randomAlphaOfLength(codeUnits); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java index 39eaaceae08bc..73ba286c9031a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; @@ -32,4 +33,10 @@ public ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings) { + var overriddenModel = AzureOpenAiCompletionModel.of(model, taskSettings); + return new AzureOpenAiCompletionAction(sender, overriddenModel, serviceComponents); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java index 49d1ce61b12dd..f45c1d797085e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.inference.external.action.azureopenai; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; public interface AzureOpenAiActionVisitor { ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(AzureOpenAiCompletionModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java new file mode 100644 index 0000000000000..d38d02ef9620f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.AzureOpenAiCompletionRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class AzureOpenAiCompletionAction implements ExecutableAction { + + private final String errorMessage; + private final AzureOpenAiCompletionRequestManager requestCreator; + private final Sender sender; + + public AzureOpenAiCompletionAction(Sender sender, AzureOpenAiCompletionModel model, ServiceComponents serviceComponents) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.requestCreator = new AzureOpenAiCompletionRequestManager(model, serviceComponents.threadPool()); + this.errorMessage = constructFailedToSendRequestMessage(model.getUri(), "Azure OpenAI completion"); + } + + @Override + public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { + if (inferenceInputs instanceof DocumentsOnlyInput == false) { + listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); + return; + } + + var docsOnlyInput = (DocumentsOnlyInput) inferenceInputs; + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("Azure OpenAI completion only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestCreator, inferenceInputs, timeout, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java new file mode 100644 index 0000000000000..2811155f6f357 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.azureopenai.AzureOpenAiCompletionResponseEntity; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class AzureOpenAiCompletionRequestManager extends AzureOpenAiRequestManager { + + private static final Logger logger = LogManager.getLogger(AzureOpenAiCompletionRequestManager.class); + + private static final ResponseHandler HANDLER = createCompletionHandler(); + + private final AzureOpenAiCompletionModel model; + + private static ResponseHandler createCompletionHandler() { + return new AzureOpenAiResponseHandler("azure openai completion", AzureOpenAiCompletionResponseEntity::fromResponse); + } + + public AzureOpenAiCompletionRequestManager(AzureOpenAiCompletionModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = Objects.requireNonNull(model); + } + + @Override + public Runnable create( + @Nullable String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + AzureOpenAiCompletionRequest request = new AzureOpenAiCompletionRequest(input, model); + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java new file mode 100644 index 0000000000000..8854dc7950365 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class AzureOpenAiCompletionRequest implements AzureOpenAiRequest { + + private final List input; + + private final URI uri; + + private final AzureOpenAiCompletionModel model; + + public AzureOpenAiCompletionRequest(List input, AzureOpenAiCompletionModel model) { + this.input = input; + this.model = Objects.requireNonNull(model); + this.uri = model.getUri(); + } + + @Override + public HttpRequest createHttpRequest() { + var httpPost = new HttpPost(uri); + var requestEntity = Strings.toString(new AzureOpenAiCompletionRequestEntity(input, model.getTaskSettings().user())); + + ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); + httpPost.setEntity(byteEntity); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public URI getURI() { + return this.uri; + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public Request truncate() { + // No truncation for Azure OpenAI completion + return this; + } + + @Override + public boolean[] getTruncationInfo() { + // No truncation for Azure OpenAI completion + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java new file mode 100644 index 0000000000000..86614ef32855f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record AzureOpenAiCompletionRequestEntity(List messages, @Nullable String user) implements ToXContentObject { + + private static final String NUMBER_OF_RETURNED_CHOICES_FIELD = "n"; + + private static final String MESSAGES_FIELD = "messages"; + + private static final String ROLE_FIELD = "role"; + + private static final String CONTENT_FIELD = "content"; + + private static final String USER_FIELD = "user"; + + public AzureOpenAiCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(MESSAGES_FIELD); + + { + for (String message : messages) { + builder.startObject(); + + { + builder.field(ROLE_FIELD, USER_FIELD); + builder.field(CONTENT_FIELD, message); + } + + builder.endObject(); + } + } + + builder.endArray(); + + builder.field(NUMBER_OF_RETURNED_CHOICES_FIELD, 1); + + if (Strings.isNullOrEmpty(user) == false) { + builder.field(USER_FIELD, user); + } + + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java index f20398fec0e57..00af244fca913 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java @@ -7,13 +7,9 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; -import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.message.BasicHeader; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; @@ -23,14 +19,7 @@ import java.nio.charset.StandardCharsets; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; -import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; - public class AzureOpenAiEmbeddingsRequest implements AzureOpenAiRequest { - private static final String MISSING_AUTHENTICATION_ERROR_MESSAGE = - "The request does not have any authentication methods set. One of [%s] or [%s] is required."; private final Truncator truncator; private final Truncator.TruncationResult truncationResult; @@ -59,21 +48,7 @@ public HttpRequest createHttpRequest() { ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); httpPost.setEntity(byteEntity); - httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); - - var entraId = model.getSecretSettings().entraId(); - var apiKey = model.getSecretSettings().apiKey(); - - if (entraId != null && entraId.isEmpty() == false) { - httpPost.setHeader(createAuthBearerHeader(entraId)); - } else if (apiKey != null && apiKey.isEmpty() == false) { - httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); - } else { - // should never happen due to the checks on the secret settings, but just in case - ValidationException validationException = new ValidationException(); - validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); - throw validationException; - } + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); return new HttpRequest(httpPost, getInferenceEntityId()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java index edb7c70b3903e..79a0e4a4eba33 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java @@ -7,6 +7,40 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.message.BasicHeader; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -public interface AzureOpenAiRequest extends Request {} +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; + +public interface AzureOpenAiRequest extends Request { + + String MISSING_AUTHENTICATION_ERROR_MESSAGE = + "The request does not have any authentication methods set. One of [%s] or [%s] is required."; + + static void decorateWithAuthHeader(HttpPost httpPost, AzureOpenAiSecretSettings secretSettings) { + httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); + + var entraId = secretSettings.entraId(); + var apiKey = secretSettings.apiKey(); + + if (entraId != null && entraId.isEmpty() == false) { + httpPost.setHeader(createAuthBearerHeader(entraId)); + } else if (apiKey != null && apiKey.isEmpty() == false) { + httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); + } else { + // should never happen due to the checks on the secret settings, but just in case + ValidationException validationException = new ValidationException(); + validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); + throw validationException; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java index 16a02a4c06c1c..6e657640e27ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java @@ -13,6 +13,8 @@ public class AzureOpenAiUtils { public static final String OPENAI_PATH = "openai"; public static final String DEPLOYMENTS_PATH = "deployments"; public static final String EMBEDDINGS_PATH = "embeddings"; + public static final String CHAT_PATH = "chat"; + public static final String COMPLETIONS_PATH = "completions"; public static final String API_VERSION_PARAMETER = "api-version"; public static final String API_KEY_HEADER = "api-key"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 42fd0ddc812ec..55a7f35710cf6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -39,7 +39,7 @@ public static void moveToFirstToken(XContentParser parser) throws IOException { public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { XContentParser.Token token = parser.nextToken(); - while (token != null && token != XContentParser.Token.END_OBJECT) { + while (token != null) { if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { parser.nextToken(); return; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java new file mode 100644 index 0000000000000..ca1df7027cb40 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class AzureOpenAiCompletionResponseEntity { + + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Azure OpenAI completions response"; + + /** + * Parses the Azure OpenAI completion response. + * For a request like: + * + *
+     *     
+     *         {
+     *             "inputs": "Please summarize this text: some text"
+     *         }
+     *     
+     * 
+ * + * The response would look like: + * + *
+     *     
+     *         {
+     *     "choices": [
+     *         {
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             },
+     *             "finish_reason": "stop",
+     *             "index": 0,
+     *             "logprobs": null,
+     *             "message": {
+     *                 "content": "response",
+     *                 "role": "assistant"
+     *             }
+     *         }
+     *     ],
+     *     "created": 1714982782,
+     *     "id": "...",
+     *     "model": "gpt-4",
+     *     "object": "chat.completion",
+     *     "prompt_filter_results": [
+     *         {
+     *             "prompt_index": 0,
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             }
+     *         }
+     *     ],
+     *     "system_fingerprint": null,
+     *     "usage": { ... }
+     * }
+     *     
+     * 
+ */ + public static ChatCompletionResults fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "choices", FAILED_TO_FIND_FIELD_TEMPLATE); + + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); + + positionParserAtTokenAfterField(jsonParser, "message", FAILED_TO_FIND_FIELD_TEMPLATE); + + token = jsonParser.currentToken(); + + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + + XContentParser.Token contentToken = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); + String content = jsonParser.text(); + + return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content))); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java index 5e50229e25643..708088af54cc2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.azureopenai; +import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -14,11 +15,18 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.core.Strings.format; + public abstract class AzureOpenAiModel extends Model { protected URI uri; @@ -50,6 +58,30 @@ protected AzureOpenAiModel(AzureOpenAiModel model, ServiceSettings serviceSettin public abstract ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings); + public final URI buildUriString() throws URISyntaxException { + return AzureOpenAiModel.buildUri(resourceName(), deploymentId(), apiVersion(), operationPathSegments()); + } + + // use only for testing directly + public static URI buildUri(String resourceName, String deploymentId, String apiVersion, String... pathSegments) + throws URISyntaxException { + String hostname = format("%s.%s", resourceName, AzureOpenAiUtils.HOST_SUFFIX); + + return new URIBuilder().setScheme("https") + .setHost(hostname) + .setPathSegments(createPathSegmentsList(deploymentId, pathSegments)) + .addParameter(AzureOpenAiUtils.API_VERSION_PARAMETER, apiVersion) + .build(); + } + + private static List createPathSegmentsList(String deploymentId, String[] pathSegments) { + List pathSegmentsList = new ArrayList<>( + List.of(AzureOpenAiUtils.OPENAI_PATH, AzureOpenAiUtils.DEPLOYMENTS_PATH, deploymentId) + ); + pathSegmentsList.addAll(Arrays.asList(pathSegments)); + return pathSegmentsList; + } + public URI getUri() { return uri; } @@ -62,4 +94,13 @@ public void setUri(URI newUri) { public AzureOpenAiRateLimitServiceSettings rateLimitServiceSettings() { return rateLimitServiceSettings; } + + // TODO: can be inferred directly from modelConfigurations.getServiceSettings(); will be addressed with separate refactoring + public abstract String resourceName(); + + public abstract String deploymentId(); + + public abstract String apiVersion(); + + public abstract String[] operationPathSegments(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java index f871fe6c080a1..48e45f368bfe2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java @@ -25,12 +25,16 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalSecureString; -public record AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) implements SecretSettings { +public class AzureOpenAiSecretSettings implements SecretSettings { public static final String NAME = "azure_openai_secret_settings"; public static final String API_KEY = "api_key"; public static final String ENTRA_ID = "entra_id"; + private final SecureString entraId; + + private final SecureString apiKey; + public static AzureOpenAiSecretSettings fromMap(@Nullable Map map) { if (map == null) { return null; @@ -59,14 +63,24 @@ public static AzureOpenAiSecretSettings fromMap(@Nullable Map ma return new AzureOpenAiSecretSettings(secureApiToken, secureEntraId); } - public AzureOpenAiSecretSettings { + public AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) { Objects.requireNonNullElse(apiKey, entraId); + this.apiKey = apiKey; + this.entraId = entraId; } public AzureOpenAiSecretSettings(StreamInput in) throws IOException { this(in.readOptionalSecureString(), in.readOptionalSecureString()); } + public SecureString apiKey() { + return apiKey; + } + + public SecureString entraId() { + return entraId; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -98,4 +112,17 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalSecureString(apiKey); out.writeOptionalSecureString(entraId); } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiSecretSettings that = (AzureOpenAiSecretSettings) object; + return Objects.equals(entraId, that.entraId) && Objects.equals(apiKey, that.apiKey); + } + + @Override + public int hashCode() { + return Objects.hash(entraId, apiKey); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index c6b97e22b099d..e0e48ab20a86b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; @@ -121,19 +122,23 @@ private static AzureOpenAiModel createModel( String failureMessage, ConfigurationParseContext context ) { - if (taskType == TaskType.TEXT_EMBEDDING) { - return new AzureOpenAiEmbeddingsModel( - inferenceEntityId, - taskType, - NAME, - serviceSettings, - taskSettings, - secretSettings, - context - ); + switch (taskType) { + case TEXT_EMBEDDING -> { + return new AzureOpenAiEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); + } + case COMPLETION -> { + return new AzureOpenAiCompletionModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + } + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } - - throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java new file mode 100644 index 0000000000000..05cb663453542 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import java.net.URISyntaxException; +import java.util.Map; + +public class AzureOpenAiCompletionModel extends AzureOpenAiModel { + + public static AzureOpenAiCompletionModel of(AzureOpenAiCompletionModel model, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return model; + } + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap(taskSettings); + return new AzureOpenAiCompletionModel(model, AzureOpenAiCompletionTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets + ) { + this( + inferenceEntityId, + taskType, + service, + AzureOpenAiCompletionServiceSettings.fromMap(serviceSettings), + AzureOpenAiCompletionTaskSettings.fromMap(taskSettings), + AzureOpenAiSecretSettings.fromMap(secrets) + ); + } + + // Should only be used directly for testing + AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + AzureOpenAiCompletionServiceSettings serviceSettings, + AzureOpenAiCompletionTaskSettings taskSettings, + @Nullable AzureOpenAiSecretSettings secrets + ) { + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings + ); + try { + this.uri = buildUriString(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionServiceSettings serviceSettings) { + super(originalModel, serviceSettings); + } + + private AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionTaskSettings taskSettings) { + super(originalModel, taskSettings); + } + + @Override + public AzureOpenAiCompletionServiceSettings getServiceSettings() { + return (AzureOpenAiCompletionServiceSettings) super.getServiceSettings(); + } + + @Override + public AzureOpenAiCompletionTaskSettings getTaskSettings() { + return (AzureOpenAiCompletionTaskSettings) super.getTaskSettings(); + } + + @Override + public AzureOpenAiSecretSettings getSecretSettings() { + return (AzureOpenAiSecretSettings) super.getSecretSettings(); + } + + @Override + public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } + + @Override + public String resourceName() { + return getServiceSettings().resourceName(); + } + + @Override + public String deploymentId() { + return getServiceSettings().deploymentId(); + } + + @Override + public String apiVersion() { + return getServiceSettings().apiVersion(); + } + + @Override + public String[] operationPathSegments() { + return new String[] { AzureOpenAiUtils.CHAT_PATH, AzureOpenAiUtils.COMPLETIONS_PATH }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java new file mode 100644 index 0000000000000..5dd42bb1b911f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.USER; + +public record AzureOpenAiCompletionRequestTaskSettings(@Nullable String user) { + + public static final AzureOpenAiCompletionRequestTaskSettings EMPTY_SETTINGS = new AzureOpenAiCompletionRequestTaskSettings(null); + + public static AzureOpenAiCompletionRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionRequestTaskSettings(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java new file mode 100644 index 0000000000000..4100ce7358a3f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.API_VERSION; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.DEPLOYMENT_ID; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.RESOURCE_NAME; + +public class AzureOpenAiCompletionServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + AzureOpenAiRateLimitServiceSettings { + + public static final String NAME = "azure_openai_completions_service_settings"; + + /** + * Rate limit documentation can be found here: + * + * Limits per region per model id + * https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits + * + * How to change the limits + * https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota?tabs=rest + * + * Blog giving some examples + * https://techcommunity.microsoft.com/t5/fasttrack-for-azure/optimizing-azure-openai-a-guide-to-limits-quotas-and-best/ba-p/4076268 + * + * According to the docs 1000 tokens per minute (TPM) = 6 requests per minute (RPM). The limits change depending on the region + * and model. The lowest chat completions limit is 20k TPM, so we'll default to that. + * Calculation: 20K TPM = 20 * 6 = 120 requests per minute (used `francecentral` and `gpt-4` as basis for the calculation). + */ + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); + + public static AzureOpenAiCompletionServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + var settings = fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionServiceSettings(settings); + } + + private static AzureOpenAiCompletionServiceSettings.CommonFields fromMap( + Map map, + ValidationException validationException + ) { + String resourceName = extractRequiredString(map, RESOURCE_NAME, ModelConfigurations.SERVICE_SETTINGS, validationException); + String deploymentId = extractRequiredString(map, DEPLOYMENT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + + return new AzureOpenAiCompletionServiceSettings.CommonFields(resourceName, deploymentId, apiVersion, rateLimitSettings); + } + + private record CommonFields(String resourceName, String deploymentId, String apiVersion, RateLimitSettings rateLimitSettings) {} + + private final String resourceName; + private final String deploymentId; + private final String apiVersion; + + private final RateLimitSettings rateLimitSettings; + + public AzureOpenAiCompletionServiceSettings( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.resourceName = resourceName; + this.deploymentId = deploymentId; + this.apiVersion = apiVersion; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public AzureOpenAiCompletionServiceSettings(StreamInput in) throws IOException { + resourceName = in.readString(); + deploymentId = in.readString(); + apiVersion = in.readString(); + rateLimitSettings = new RateLimitSettings(in); + } + + private AzureOpenAiCompletionServiceSettings(AzureOpenAiCompletionServiceSettings.CommonFields fields) { + this(fields.resourceName, fields.deploymentId, fields.apiVersion, fields.rateLimitSettings); + } + + public String resourceName() { + return resourceName; + } + + public String deploymentId() { + return deploymentId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return DEFAULT_RATE_LIMIT_SETTINGS; + } + + public String apiVersion() { + return apiVersion; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.field(RESOURCE_NAME, resourceName); + builder.field(DEPLOYMENT_ID, deploymentId); + builder.field(API_VERSION, apiVersion); + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(resourceName); + out.writeString(deploymentId); + out.writeString(apiVersion); + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionServiceSettings that = (AzureOpenAiCompletionServiceSettings) object; + return Objects.equals(resourceName, that.resourceName) + && Objects.equals(deploymentId, that.deploymentId) + && Objects.equals(apiVersion, that.apiVersion) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(resourceName, deploymentId, apiVersion, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java new file mode 100644 index 0000000000000..6e9f77e1ade21 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; + +public class AzureOpenAiCompletionTaskSettings implements TaskSettings { + + public static final String NAME = "azure_openai_completion_task_settings"; + + public static final String USER = "user"; + + public static AzureOpenAiCompletionTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionTaskSettings(user); + } + + private final String user; + + public static AzureOpenAiCompletionTaskSettings of( + AzureOpenAiCompletionTaskSettings originalSettings, + AzureOpenAiCompletionRequestTaskSettings requestSettings + ) { + var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); + return new AzureOpenAiCompletionTaskSettings(userToUse); + } + + public AzureOpenAiCompletionTaskSettings(@Nullable String user) { + this.user = user; + } + + public AzureOpenAiCompletionTaskSettings(StreamInput in) throws IOException { + this.user = in.readOptionalString(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (user != null) { + builder.field(USER, user); + } + } + builder.endObject(); + return builder; + } + + public String user() { + return user; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(user); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionTaskSettings that = (AzureOpenAiCompletionTaskSettings) object; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java index 93d1e31a3bed1..377bb33f58619 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.azureopenai.embeddings; -import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -19,12 +18,9 @@ import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -import java.net.URI; import java.net.URISyntaxException; import java.util.Map; -import static org.elasticsearch.core.Strings.format; - public class AzureOpenAiEmbeddingsModel extends AzureOpenAiModel { public static AzureOpenAiEmbeddingsModel of(AzureOpenAiEmbeddingsModel model, Map taskSettings) { @@ -70,7 +66,7 @@ public AzureOpenAiEmbeddingsModel( serviceSettings ); try { - this.uri = getEmbeddingsUri(serviceSettings.resourceName(), serviceSettings.deploymentId(), serviceSettings.apiVersion()); + this.uri = buildUriString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -104,17 +100,24 @@ public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map requestMap, List input, @Nullable String user) { + public void testInfer_AzureOpenAiCompletion_WithOverriddenUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var originalUser = "original_user"; + var overriddenUser = "overridden_user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var model = createCompletionModel("resource", "deployment", "apiversion", originalUser, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var taskSettingsWithUserOverride = createRequestTaskSettingsMap(overriddenUser); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, taskSettingsWithUserOverride); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), overriddenUser); + + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_WithoutUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(null); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), null); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_FailsFromInvalidResponseFormat() throws IOException { + // timeout as zero for no retries + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + // "choices" missing + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + var userOverride = "overridden_user"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(userOverride); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer))) + ); + assertThat( + thrownException.getCause().getMessage(), + is("Failed to find required field [choices] in Azure OpenAI completions response") + ); + + assertThat(webServer.requests(), hasSize(1)); + validateRequestWithApiKey(webServer.requests().get(0), apiKey); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), userOverride); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + private void validateEmbeddingsRequestMapWithUser(Map requestMap, List input, @Nullable String user) { var expectedSize = user == null ? 1 : 2; assertThat(requestMap.size(), is(expectedSize)); @@ -446,6 +621,24 @@ private void validateRequestMapWithUser(Map requestMap, List requestMap, List input, @Nullable String user) { + assertThat("input for completions can only be of size 1", input.size(), equalTo(1)); + + var expectedSize = user == null ? 2 : 3; + + assertThat(requestMap.size(), is(expectedSize)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input.get(0))); + + if (user != null) { + assertThat(requestMap.get("user"), is(user)); + } + } + + @SuppressWarnings("unchecked") + public static String getContentOfMessageInRequestMap(Map requestMap) { + return ((Map) ((List) requestMap.get("messages")).get(0)).get("content").toString(); + } + private void validateRequestWithApiKey(MockRequest request, String apiKey) { assertNull(request.getUri().getQuery()); assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java new file mode 100644 index 0000000000000..96127841c17a8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java @@ -0,0 +1,200 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var user = "user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var action = createAction("resource", "deployment", "apiversion", user, apiKey, sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), is(XContentType.JSON.mediaType())); + assertThat(request.getHeader(AzureOpenAiUtils.API_KEY_HEADER), is(apiKey)); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + + var requestMap = entityAsMap(request.getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(completionInput)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + private AzureOpenAiCompletionAction createAction( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable String user, + String apiKey, + Sender sender, + String inferenceEntityId + ) { + try { + var model = createCompletionModel(resourceName, deploymentId, apiVersion, user, apiKey, null, inferenceEntityId); + model.setUri(new URI(getUrl(webServer))); + return new AzureOpenAiCompletionAction(sender, model, createWithEmptySettings(threadPool)); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java new file mode 100644 index 0000000000000..2d37f273e1de2 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiRequest.MISSING_AUTHENTICATION_ERROR_MESSAGE; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AzureOpenAiRequestTests extends ESTestCase { + + public void testDecorateWithAuthHeader_apiKeyPresent() { + var apiKey = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(apiKey, null); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var apiKeyHeader = httpPost.getFirstHeader(API_KEY_HEADER); + + assertThat(apiKeyHeader.getValue(), equalTo(apiKey.toString())); + } + + public void testDecorateWithAuthHeader_entraIdPresent() { + var entraId = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(null, entraId); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var authHeader = httpPost.getFirstHeader(HttpHeaders.AUTHORIZATION); + + assertThat(authHeader.getValue(), equalTo("Bearer " + entraId)); + } + + public void testDecorateWithAuthHeader_entraIdAndApiKeyMissing_throwMissingAuthValidationException() { + var httpPost = new HttpPost(); + var secretSettingsMock = mock(AzureOpenAiSecretSettings.class); + + when(secretSettingsMock.entraId()).thenReturn(null); + when(secretSettingsMock.apiKey()).thenReturn(null); + + ValidationException exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettingsMock) + ); + assertTrue(exception.getMessage().contains(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..7647a4983f4be --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequestEntity; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; + +public class AzureOpenAiCompletionRequestEntityTests extends ESTestCase { + + public void testXContent_WritesSingleMessage_DoesNotWriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1}""")); + } + + public void testXContent_WritesSingleMessage_WriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), "user"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1,"user":"user"}""")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java new file mode 100644 index 0000000000000..048d4ea16d56f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTests extends ESTestCase { + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + protected AzureOpenAiCompletionRequest createRequest( + String resource, + String deployment, + String apiVersion, + String apiKey, + String entraId, + String input, + String user + ) { + var completionModel = AzureOpenAiCompletionModelTests.createCompletionModel( + resource, + deployment, + apiVersion, + user, + apiKey, + entraId, + "id" + ); + + return new AzureOpenAiCompletionRequest(List.of(input), completionModel); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java similarity index 96% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java index 14283ed53eed9..f732a01c893e8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java @@ -5,13 +5,14 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequestEntity; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java similarity index 73% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java index 88e6880b72f0b..bbd8a49d65f46 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; @@ -14,56 +14,69 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; -import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequest; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests; import java.io.IOException; -import java.net.URISyntaxException; import java.util.List; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class AzureOpenAiEmbeddingsRequestTests extends ESTestCase { - public void testCreateRequest_WithApiKeyDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", "apikey", null, "abc", "user"); + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is("apikey")); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } - public void testCreateRequest_WithEntraIdDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", null, "entraId", "abc", "user"); + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer entraId")); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { @@ -87,7 +100,7 @@ public void testIsTruncated_ReturnsTrue() { assertTrue(truncatedRequest.getTruncationInfo()[0]); } - public static AzureOpenAiEmbeddingsRequest createRequest( + public AzureOpenAiEmbeddingsRequest createRequest( String resourceName, String deploymentId, String apiVersion, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java index 4f7cd9ea89a14..897c648eb942f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -106,6 +106,24 @@ public void testPositionParserAtTokenAfterField_ThrowsWithMalformedJSON() throws } } + public void testPositionParserAtTokenAfterField_ConsumesUntilEnd() throws IOException { + var json = """ + { + "key": { + "foo": "bar" + }, + "target": "value" + } + """; + + var errorFormat = "Error: %s"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "target", errorFormat); + assertEquals("value", parser.text()); + } + } + public void testConsumeUntilObjectEnd() throws IOException { var json = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java new file mode 100644 index 0000000000000..3afe4bd439e0f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java @@ -0,0 +1,220 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionResponseEntityTests extends ESTestCase { + + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "choices": [ + { + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion", + "prompt_filter_results": [ + { + "prompt_index": 0, + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + } + } + ], + "usage": { + "completion_tokens": 138, + "prompt_tokens": 11, + "total_tokens": 149 + } + }"""; + + ChatCompletionResults chatCompletionResults = AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + + ChatCompletionResults.Result result = chatCompletionResults.getResults().get(0); + assertThat(result.asMap().get(result.getResultsField()), is("response")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [choices] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotAnArray() { + String responseJson = """ + { + "choices": { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + }, + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [FIELD_NAME]") + ); + } + + public void testFromResponse_FailsWhenMessageDoesNotExist() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "not_message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [message] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenMessageValueIsAString() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": "string" + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [VALUE_STRING]") + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java index 18f702014e2d8..080602e8fd245 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -74,7 +74,7 @@ public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, @@ -112,7 +112,7 @@ public void testFromResponse_FailsWhenChoicesFieldNotAnArray() { }, "logprobs": null, "finish_reason": "stop" - }, + } }, "usage": { "prompt_tokens": 46, @@ -153,7 +153,7 @@ public void testFromResponse_FailsWhenMessageDoesNotExist() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java new file mode 100644 index 0000000000000..93d948a5bdcf3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class AzureOpenAiCompletionModelTests extends ESTestCase { + + public void testOverrideWith_UpdatedTaskSettings_OverridesUser() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiVersion = "api version"; + var apiKey = "api key"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var user = "user"; + var userOverride = "user override"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var requestTaskSettingsMap = taskSettingsMap(userOverride); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat( + overriddenModel, + equalTo(createCompletionModel(resource, deploymentId, apiVersion, userOverride, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testOverrideWith_EmptyMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var requestTaskSettingsMap = Map.of(); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var overriddenModel = AzureOpenAiCompletionModel.of(model, null); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_UpdatedServiceSettings_OverridesApiVersion() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var apiVersion = "api version"; + var updatedApiVersion = "updated api version"; + + var updatedServiceSettings = new AzureOpenAiCompletionServiceSettings(resource, deploymentId, updatedApiVersion, null); + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var overriddenModel = new AzureOpenAiCompletionModel(model, updatedServiceSettings); + + assertThat( + overriddenModel, + is(createCompletionModel(resource, deploymentId, updatedApiVersion, user, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + } + + public static AzureOpenAiCompletionModel createModelWithRandomValues() { + return createCompletionModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + + public static AzureOpenAiCompletionModel createCompletionModel( + String resourceName, + String deploymentId, + String apiVersion, + String user, + @Nullable String apiKey, + @Nullable String entraId, + String inferenceEntityId + ) { + var secureApiKey = apiKey != null ? new SecureString(apiKey.toCharArray()) : null; + var secureEntraId = entraId != null ? new SecureString(entraId.toCharArray()) : null; + + return new AzureOpenAiCompletionModel( + inferenceEntityId, + TaskType.COMPLETION, + "service", + new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null), + new AzureOpenAiCompletionTaskSettings(user), + new AzureOpenAiSecretSettings(secureApiKey, secureEntraId) + ); + } + + private Map taskSettingsMap(String user) { + Map taskSettingsMap = new HashMap<>(); + taskSettingsMap.put(AzureOpenAiServiceFields.USER, user); + return taskSettingsMap; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java new file mode 100644 index 0000000000000..51963c275a08a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTaskSettingsTests extends ESTestCase { + + public void testFromMap_ReturnsEmptySettings_WhenMapIsEmpty() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsEmptySettings_WhenMapDoesNotContainKnownFields() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsUser() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + assertThat(settings.user(), is("user")); + } + + public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { + var exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java new file mode 100644 index 0000000000000..cbaa41c37958d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionServiceSettingsTests extends AbstractWireSerializingTestCase { + + private static AzureOpenAiCompletionServiceSettings createRandom() { + var resourceName = randomAlphaOfLength(8); + var deploymentId = randomAlphaOfLength(8); + var apiVersion = randomAlphaOfLength(8); + + return new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null); + } + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var resourceName = "this-resource"; + var deploymentId = "this-deployment"; + var apiVersion = "2024-01-01"; + + var serviceSettings = AzureOpenAiCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + AzureOpenAiServiceFields.RESOURCE_NAME, + resourceName, + AzureOpenAiServiceFields.DEPLOYMENT_ID, + deploymentId, + AzureOpenAiServiceFields.API_VERSION, + apiVersion + ) + ) + ); + + assertThat(serviceSettings, is(new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null))); + } + + public void testToXContent_WritesAllValues() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024","rate_limit":{"requests_per_minute":120}}""")); + } + + public void testToFilteredXContent_WritesAllValues_Except_RateLimit() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = entity.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionServiceSettings::new; + } + + @Override + protected AzureOpenAiCompletionServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AzureOpenAiCompletionServiceSettings mutateInstance(AzureOpenAiCompletionServiceSettings instance) throws IOException { + return createRandom(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java new file mode 100644 index 0000000000000..7f0e730b8835c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; +import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static AzureOpenAiCompletionTaskSettings createRandomWithUser() { + return new AzureOpenAiCompletionTaskSettings(randomAlphaOfLength(15)); + } + + public static AzureOpenAiCompletionTaskSettings createRandom() { + var user = randomBoolean() ? randomAlphaOfLength(15) : null; + return new AzureOpenAiCompletionTaskSettings(user); + } + + public void testFromMap_WithUser() { + var user = "user"; + + assertThat( + new AzureOpenAiCompletionTaskSettings(user), + is(AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user)))) + ); + } + + public void testFromMap_UserIsEmptyString() { + var thrownException = expectThrows( + ValidationException.class, + () -> AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [task_settings] Invalid value empty string. [user] must be a non-empty string;")) + ); + } + + public void testFromMap_MissingUser_DoesNotThrowException() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(taskSettings.user()); + } + + public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of( + taskSettings, + AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS + ); + assertThat(overriddenTaskSettings, is(taskSettings)); + } + + public void testOverrideWith_UsesOverriddenSettings() { + var user = "user"; + var userOverride = "user override"; + + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user))); + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap( + new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, userOverride)) + ); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of(taskSettings, requestTaskSettings); + assertThat(overriddenTaskSettings, is(new AzureOpenAiCompletionTaskSettings(userOverride))); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionTaskSettings::new; + } + + @Override + protected AzureOpenAiCompletionTaskSettings createTestInstance() { + return createRandomWithUser(); + } + + @Override + protected AzureOpenAiCompletionTaskSettings mutateInstance(AzureOpenAiCompletionTaskSettings instance) throws IOException { + return createRandomWithUser(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java index aebc2240983f7..1747155623a98 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import java.net.URISyntaxException; import java.util.Map; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettingsTests.getAzureOpenAiRequestTaskSettingsMap; @@ -65,6 +66,35 @@ public void testCreateModel_FromUpdatedServiceSettings() { assertThat(overridenModel, is(createModel("resource", "deployment", "override_apiversion", "user", "api_key", null, "id"))); } + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); + } + + public static AzureOpenAiEmbeddingsModel createModelWithRandomValues() { + return createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + public static AzureOpenAiEmbeddingsModel createModel( String resourceName, String deploymentId, From 9d9f23ca96e03a53f0447a8564bdb0f9519e2df5 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Wed, 8 May 2024 12:52:50 -0400 Subject: [PATCH 064/117] [DOCS] Add API example + diagrams to shard allocation awareness docs (#108390) --- .../high-availability/cluster-design.asciidoc | 18 ++++----- .../shard-allocation-awareness-one-rack.png | Bin 0 -> 25565 bytes .../shard-allocation-awareness-two-racks.png | Bin 0 -> 43058 bytes .../cluster/allocation_awareness.asciidoc | 37 ++++++++++++++---- 4 files changed, 38 insertions(+), 17 deletions(-) create mode 100644 docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png create mode 100644 docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png diff --git a/docs/reference/high-availability/cluster-design.asciidoc b/docs/reference/high-availability/cluster-design.asciidoc index 3f8e19b47d37a..6c17a494f36ae 100644 --- a/docs/reference/high-availability/cluster-design.asciidoc +++ b/docs/reference/high-availability/cluster-design.asciidoc @@ -7,14 +7,14 @@ nodes to take over their responsibilities, an {es} cluster can continue operating normally if some of its nodes are unavailable or disconnected. There is a limit to how small a resilient cluster can be. All {es} clusters -require: +require the following components to function: -- One <> node -- At least one node for each <>. -- At least one copy of every <>. +- One <> +- At least one node for each <> +- At least one copy of every <> A resilient cluster requires redundancy for every required cluster component. -This means a resilient cluster must have: +This means a resilient cluster must have the following components: - At least three master-eligible nodes - At least two nodes of each role @@ -375,11 +375,11 @@ The cluster will be resilient to the loss of any zone as long as: - There are at least two zones containing data nodes. - Every index that is not a <> has at least one replica of each shard, in addition to the primary. -- Shard allocation awareness is configured to avoid concentrating all copies of - a shard within a single zone. +- <> is configured to + avoid concentrating all copies of a shard within a single zone. - The cluster has at least three master-eligible nodes. At least two of these - nodes are not voting-only master-eligible nodes, and they are spread evenly - across at least three zones. + nodes are not <>, + and they are spread evenly across at least three zones. - Clients are configured to send their requests to nodes in more than one zone or are configured to use a load balancer that balances the requests across an appropriate set of nodes. The {ess-trial}[Elastic Cloud] service provides such diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png new file mode 100644 index 0000000000000000000000000000000000000000..d5a3040cc5343ea6e169e51bf9b6d2924f4e238a GIT binary patch literal 25565 zcmeFZ2T;>pyEquIpaSCiqJos5VnakgdI?1oRH`6dLQ{~A(i0#c0xDosl-?9+(u4@1 zg`kM2^cs2<0tASZ5FjMk1AKeG|J|LryEFg$?Y%R5GlP)i{LWL))6Vnw%s@|z^AO)5 z2n51;^Tstp2xR{%1hVJaf&JhUkBT40;MYOV8|GdR$R+LVAC@dwTpsw4#mi9ZDx|1Y zU>3aD>!_})4uO=Q*|%)>K_K>zZeCNrf9wZpvYOFNCb@P3xRbtWPWAFj zAHuJv>wVi_h)S2ms_bB$k+ingXkxy_JlImmz2C^Z&)Y?qy~(}pYH2uczn9lv)$W&D zTdtO9@E@d@gcYX`y~GvO_me+)$V9^3VT!Sqi{+O%l#oqh1ZA(96~;>ZJ!Z;0eZ?q2 zZlz`3LFI^>1Iz5YzKnUgF1~GRj@;#2{)&zn_hH?E-E*crGahjWf*G5gd+JrhSJ*gG z#9!5xp4h^0`gXts*_B!)tXTgUYS@NLwC{JRRYtbRxo5}QE)oeAEH(_HBgsld+7(^b zN(s71xvXRB2-_7z#3#`VJVMlf|JPi5TEjaLYfp-^NA)R!Pmi0a-@&d3NuH(H$;_>S zB30DXMs+6{P71s?opigt=5tY2|KN*zw(z-GQ?8x8LQWlA&F)=Z$cr~8N_+LpZM}8r zoa%{pP?h4*P(W&=vsJ?_i<94`J#1>ys#fm)5&S(SoG>yalQV+IdGRefr7FM&CP>0L zF(=q~?C<52p7aQ)mGJ6<9oR7)A#+l{r&pcVD$<7S+!KBd49q9yV^GKKJ|T$1iQK90 z`g9KU^GO}#it86dRBAxP!HyR@C ztctO3Noz6poYs2zpW|j7N;AB;ZQ*cXORD&y_10L&&Xy3~$7*OeAIXvaxtDq(AGZ$w zj5!90N)Tgf0j%V(!1*FSLrn)>r=&6MRK%qX`o(BV6C*q9K*BI*vheC1PRn0DI7XCg7%~M-mNdS_y z>#+Mhqs#4)rM^~7Q~vVd=B)%9YW@%VXOXM zoW;wIIu~-F&D&JV^Yz6my9X~%bl(jg>3?f0yAmPjD@AEd{@K)1SY^h?FkVqfG zo!>nZHj*ZeB0-xAGH>AB)MO6r5DS{@irDuf3Fu$Q>^wX}8r>e%CMl@R~o}4W3G{GTZ+)D|-Cs%>q5}n-R*LU`OD~w(iViAq>m*k%CplLJG?tD|J#m@c zWscHyF5GO78Rw?~qoGc>cfq!B?I3yqIHapHn@$^^wqGoc#~n*UmHQ7@1!-eYBW^dE zB`s@3*cYZT%wiGcPaYKN?nZql5Hq@kQ%KW21v2Ip<;6W3h4-f;vij|{XpxAGDLK1y zltq`%2Vc3iU5TRLOCO$O^hx?Jr;KaMcp~OLZ4HLNIsRPQ$#y?+9?@Gg;_fhO)oEGNK zCCVcgHd80wR^fgop}^Xg+GD=H_7!(Y(7ncWsY;XMlQh!BBD+@Barjem%%zlSoJ0z7 zsB_+}-$emA>HNp+uH&J7j*AzXTjP{0IoKjg6<3LFvzw1MPL{;sqU1o*#&UR=Q!)FrL(ze2?d#U$G7{*(;SNK#^=r}uJRY{s#Dw88Q5x15oeTf(t ztxl3T8rfHHJ*7(cAH;^Lp^Gd+%`jez&NID-)|@xAB81#AZh(bs)f9ww=s;})7yK=K z-t~F#7q}GVSCl;Qb+199eHSqNTm9yB0fXbaJN4N~v2dRw7=1dP|IXe^ituaqwC#TR zKmY#ka3G`9fwlZc=X^=fU#fXpDS6lDt1cxQyXd~6p`ygBF{q`gaNBrz61_RF+*07# zv+}^3Yf5sFR*JM#p3>2cxdayJN_GWm*2EY_^4I5lQ}cQ3+q zpmW}Fu0OJivp>>!4Yhf4N4G-q?lfgrUiy>lvM-QbWM~k-{-q&A_sC4islpL|^QYKA z`6vE6Y;SM5dx&a+TofC9Gwcz4<&wnb$_S|N3CSNWXyHJQZ~gPk;ePM6Ylu3w<2&nP zA5i$h9WiT5R)z26{O(gHOI<&5Ev-8Ay4QX}ErrSLi&o2MY`5S!rpt4^h+=*zhAj}e zcB*`aHDl*-o!B1jJ6Z4msdz1B`KrTQ33gjTA*T+k@;;!rh9%l}(B>cWT&S2JTGshO@=|`+sN*u2lO7v!PjOn5Z@W}eOq5z`j?DK| zU3y(soUZU0wWM*L7177tP8gZr^ZU*r(sF3p$=#SNqJ1t%E`1-8y@YRGPz0>Uqz`h; zXmG*yTO`rDzdRjow3q32xo0~fQ^nkIs&zAlXk22S&&s1qZAGo-wbzoBB#r}rBB*Dy zgDE6AdsuMyum9-Zknu|`jEyhAInj1~^@YOjIJ`715g}3LyU$2uaLMH_N3>ST=JcG~ z&ORBgaSp0J5@l0b%|q{CD8!CuZ|PUf^Mi|3(`SGYkK8pU7h-_B_juYDq--*O^#4&A5I{7w{#2)8Gn&gW^m zn?_2h4(wtykdk)Y@|#v?e#WvG*v!3M+oK9V(}pP`i82_cQ#-Ii{vJ$@7-^<<4(7y< zc>gt{`*xyiCC-DOv@4-sZp;ap^VO72_Us5Xk6V{oMr8R(MX5P{(4D7sA}F|3z#1d2 z?8j;z&X@4#QR1|E@tIV%ov&kU%i`ffBMN@;aD>s-47q(4Jl|8A+LZX4u)7N+)Z=*A zsiZ=VkmuYpy5Z^F>{8o7p3B~y$0Z~!+-QRJ?wZP`To?5-9 zwzucolMhq21nb>))#Jm(!__Cq2f<>~hlM3xE*k+6gFAbL+$^JB^FCZ{?j?wun{(kbu3ceCe2F73)ZnX+-$??WJE!_Z4h{X-K2z0QfH3^HlTj`CSc3upudu zyC2`$M&eQ73gblRE-UNy{xxH4t;K}{$jJO-shQQlrpxyBJ2XQwzU@Wq{$*gt`i~6h zg5*D*hrCHp+m#vQmQKbldm(4fweM~f@;CR)F12dU{;Rih)rZ(W+*)4>a%@e>Yin-q zj*E{MZ*u7ATv#>p#RC)Pnpm)?nAmf-Wq$gntFr@EsEOe*-;W`Fd#4epxBqPplnv6S zF!!(zA*G%r)-qC+g2$tlEKn=m?n;<+d|pmlMTq@rr)YJgyTk1efq ze`r|YI8T>P_gq-opz{qgf7;+^u#v&Ve;^)Nc)mRhx>!j}tDMIR(~BB9oTV#Z$;Yk6 zjzKa~EHtEBvs?-}a~G2OQF8Y=53+xGVT9fTxf6yE_^y$`wyzN_RGg~dHEp&kk32K_ z;7_YxEe05PooHYCtG+99Dp6vKBRHC7J!77OA`emX$!oNo+>3bWMIH7VeHrww*^j^V zyo^8!47nBuegu4`#UXX~wBL2icI6NOMkc>b)gY`Dvfbfr=d-YF0D z;gD3BDnCGjW&XJug=&Stsc~*|UH`X9o-W%0F+kVbfF~?A-^&AGEZJcfd(G?Qa4V_g zZA+>Xg9N+&CKVu9E^&*R2M zQcpla+`FH(4;O`qbd$7Cb^WM48r%`j3aMMYRc7;#sf*oDJ+JUS11t);o!f}) zVyIeiRGJ5Qg*$y5_G*L4X}w!I=@s=I07W?UJjWh&5K;!rcx7`2hybFQ1tq&&20F_b z3N;dTQBJUg!xwqKIyyKwbbJDJBaY6GC-^r4^2)aW(4shj*V|xX4AEg~XpxR8gSnQC zfWneEANBnZn+rfqYcKY^+z4=*NjE9HDYU)6tTwFmGxv<-vDOp?ua)KH<=Qp+d>G>% z>1$q50N|Q|;X;D*k9mjMh@tqNq{hd!-IL&a$01Uj&bMbScebnaqc9r*j%{f;5gO|Q zU~;2R_PjLb^)a&b#Tk2bhHsd&*(<0ACwaU=?lT)=?i&ru16hm z-UYAi8{XCjUVe&v*R2e#tZl|<0kZ-ez&6&B5Qq(TY0?!d+CuodV8fPx&DGg@b+&D( zSgONXrT?C}ZC)7lu62rdUuZsJ4JbpGp>ASN9QHS{rMY=%tYA!Lo7nQ_0B=y~Ds4@J zS+=EfrUYfkQGsDP0)a^EH+M^n*i=YNE^9OY;<7{KS%8dYhSpo3yZOQ{1{K5ob$~o= z6U(%DUlY&vY+xd6nsOTxWm#btXBVrN`C5F#DMr z`c_h6Vu1cQ<1r=_kc#y-@mw2S0Qz;v1gPOBSofq3cPR92?B&3!ni}Q!^!NxWsATl9 znlDtEB^c|=IO&LN6FhhmNbJbWZG~;?OC$sPE4|HAS>Qm+)$)7id8c#CV)eNH-O`5f zper#DNJydae5eUpWZSygu;n)9DThLh@3f6d)&RA&ENqfcB|n=rZX4Y`7N2?mK6?N@ zomFrxG1ay|K-pZ&fZa%W+G-o@+~q%fcpHS$Hv0iKB02i$pnTE;ynCNrEF>Ebc zHBL3|k=q)OngMm>JFuFYgcxUp0XH@{IkJ^%r(Y&=TYSiL8<3B*m|IL*RVqd$J*a%w0teF&=J}p^D0cQ=fB$T{wGHTuKETKn<97P=Umt|IlzO~n->u@6{BaBv`>j15oqtJID1Kp=I# zhUh(N;RjZM${RFgBNo@b02!BSivd|1h_J#!midVi=g`x=rtE!+=P`NxQ?ih{55P=4 z+g>1vWQEvl8D@TYXXcix*ii)zhbtG*f;-V5KL(|9OlR@~-(WM&&aIFuCx^kKd)V5Mr4DC^I)~`WVvG3LCSVCg9gKLpw;c*oy zo7Z`betLk876UZZ!dfg4dpQ62+!DYAYq3pT5Xe05XuwR08(wDj57kWee}1Do>Z+?< z7*NKmSN#L-TuAe`g?UNVz)1-2ZxoJ|ffo&b@RJoi_6tGcwMDDUp%9|bi!%Iuq;tMc zKmkl}e#zo_Q_mx`5V@qNzQbA!JKxb;6O2NullXHoOBjBITEyCyOdL&wp*I@cieJbb z?Ef-@bBp#$hW+h=x86KKvm^Noy3^c{^)uAOhi7Do)4eW5jdLmrLQ2FJW=Qk^Dj(rJ z>VQ|Uu?j2iUquq^{MjV#m$B#PykEbu7>pG; zCd|fGfH|JZ<4Tqp76|K!Fe8-kGVE3;y<~}*GAqGBmv-7n+6yf*Ye4W1pQAa5c%L4* zilU8~Vv}{NPP!JYz(cmY?dl@2*UpDFPxYHQyC78WYLEWzd3}phqd-(wYqP-q@_uH_ z3)iu-nEF5t#;~(w&1Y1gSKa0^T9q$T0b1|DZYjpJaK2sW=Bx6nx9d4qh1vfgXb{bY0vKW==7%Um8(wl}8e)ewNQEP4R9B|An6T8=CYDvm3>-F+k zEMc=pMrb5Nm9bXRl`faNlbdiw-qp~2{7Q^fZ){rErDml#OAjpZnzxYRY5GV5zNw|8 z*Z&p6SD)fP^2xcQvM}X=?t<-=&Qa36`RLW^a(?knmj~YlWaHt3MzFMaC|ns2)6Jyw7}t`XC?MZ>xfF37>FDwDe)uS- zr3f<(9J53V4l4Y2{4AC!7&Bg*yi6jM1wo;YtmYxG)$Yl6Q2l8Q*hmy2@YB|s>Ztt* z%DQJKBL_;>hZRA|SQX0p-cAklMK4ZRZkj{Eao_VuWnb;ZU|iFcGBc$gH=BeOSG<-T zwFGBNhI%Ro<*rjPxXzkRnO|KN=ljq3dEfrFh}GHk$cc*CvSx?Zi& z2YCvSo?3gvrC3VL8a;jt+rPTe5xrm()k>}mhKVhYdez_c5H8FoFyw?>(qnOaC#K)8 z(8(04&I*6wASJhzdhpJ5sqV>@chFHipd#z?d5t}jmW9XX9)@uKHKR_E-mjb{?s|D~ zdHwF9??qYt!|UcPy$gr})l5;;%al0Frz1&{X0`s+PIaYNqPtLZVNZ0=dsSVz!)CIo z*R)QwW(Y>A<5%=z>jOS|^K(#rfPi?|_Wq-*ILa(0Ht#{`)c2T}DSIF!lW7-`n zugIFYnQS(@eV>^IlrB(fe`90fDQ4$Jd1W6YyWvNqbvtE~J(O>15T$9C2A`G@OwnH) zFUxHz`(Ei~sL&7v`F%4Z`nBp0^6|uw4uE|TR0t2eP16mSN+N*LGg+U^PFcSITo3Tj{$s=OM0YNu?yQOYpqF>ht&df$+g*Kiwt^}tS%e;ePl3XIw9>v4WoFhV;Q~FK zPZwaZ9!ITXmhu_~d~OUxi1ZCMeV`X9*z`B>E?5W}yDQ%~NB8T0su`^>ra9`cl8!MR zaaaLzs9_+1&&tzcMiiPQ^BkP_A8;itWU+O1fgk)h@m~cX@c&-Pr16z zdzd#3L#gV9iY`)keGacra!yHKa_34a-F&;-n72O=jGKl>f4Y^&RSyf;ZE(ZVl-$d{ zE|dnhu~_QEY8AZRL5rUwavS{&rFMK(eeRUIT%;KscDViYHX{3e|2!2Zd|M zwy*w;Cnnv7@}-8#Qr5rcHs)W4)k=Xid5w{v*3x$}6xyzT(LwKUNwcqZz1XGbPg}my zk3L+j7^xb5+EdcYZxxq}?zx)23_`6vB@P8IqVPvZ*i3}1AbQ-jA^0_1UvXWr*MjL< z;BG0`@VoP5rKA_I#My2(lCszq_wWf>B_5Yzznt#($}tBv@PubGHRl>HwC($N9j2m8 z@%L#CM+D~NipV-#?w;f~W~a#4weLl!-M(nyTsT-SeX{#dTT7FwABOPp!L)l$l$Q9l zfDcmh(-zp|62-%?SkckCSVmk+Ujnc=i~FB!Q4V)5tf`L&7QlrZCC?d2`5{pf1%-VM zE|TyKH`&Te3Nx@k_K=(%?I@nOKX1z2V=5G}BxQdp#3>6Zl!~aa=$=%{Nvtk}3Z)?e zrMJ)Jz%fWFv}rg^&~9U^Av!s{kx#fNKi#3=nCWat&=A|bm>DHqqWzNH zrhBW62VI@VetBU0zOA`qxTf9Fn=r!D!mjAD1^n@dtx%_IsE`%R&l0B64=~I>F$Zkm zRhY^}z^?rVS3}-!B|b6bGAlpsGKrVVq@DN@)4OozKu;#KYJVln51C;5NY)~7A*v%y z=~rp8f)qla;rAKolhK_i!PhJ^h!xNIhzoqd~Rh- zcyy){%9&CIj=fT|p{D5bb4wjS-MW4ZgJc$5U$7mOu`8^&wa>PgHQVY>*>JagWuW8$S*Ib{ z{!?a&FC;^jxP(ny$UL4u*q2%8)Ezc@<)_@C?9t3#`%3SOKcdf)CJzZ&@IwUXlZD!PKG%~3C&1y?dI6$=Wq$xdPVj+%jRsI= zxmmRjR&~vdbD=kIKKP&`$>L^);}z}pR!+g0K1yQg&r5)d^Cnu5l48Cjb(q(+5aENh zLoQ0MkwKXPyv>?Kmd!v8zjEVE`XJe~kb(g#o$+bjFCSI>sj=-%{mpN?__USkQC>Du z$=iB$(HCJQhlSgV^%lV%$@^;J{$RQm7F0fIqZ+r&Yy3XLwQw>m4Rf+7(?n)4jaE>M zYboc_T*jMFas zTNNq9%|UwYP+J1}=Uc`|%n*G|)&$+^=KBsHh3#J>n&aJkGue7)9j%VJ49?$m1<@5V zO`A-}AXjR-w#i5*m6VVxZSndcKi@&EE%AszTDJx)Ef;viOart>Gzit7|KCFOw(O~1 zZ&#hB?8m=5hDveScM=X?yz8t{KuWH>4;B1WyKxF`niOaEc~5)gWl}EZ#t?!;q#$9l zRErs10LOvOkB%sr`5ldbjMl7n?%gR9UyZG<&kY`=A8bk4BDX4z5Y5e%tKq zE?#oDj2+r~*_7oZ#Wql%gdPvZPk0wJZboeIe`t)itDxd%Ui7Uqq}&_rS3oirG3dZ! zhs5@8DNfTa`N_*-M?JbLYKhCF-0P^u$h*EO5lCj2iUHTH`_D-o$L5x5M?Eqx=R<`S zF=d4yUuiJ}Das(wn>=61gy5W|w3xoe;UTFQNfH?$=e*Lz^A1NkIN{S)*RQ%WkaqG5 zQZO35miqnA^n0(n>l0O-$Z~GS_|&JZcvE2Q7vyH>?w_Hm0;!IV3GcBRGq|AN1O?Yj zLsj;U4Jzz-tlX9s!n%vrLz0?|>z&M;9}U2ARj4{8mB9!-qtRUToKrTWZ5-YsV$8zWG_6Zr@HVeQNJxBs!5XejFo;p0fC7QhqiVKCSqc#4$ z4WKB0cyN8a+Y6g4xfSYjTTuemR~1xRmez9AVS`desO$)PUcPybAhkGJLZ}k{PQ~`k zSAEZePHrf@(1!Ub;j&v8V%SGC-rhi0*9&ho3!pk~IfV$kbD@&}s_Tl#BB8v09NUjr z^p*Ox6wqhVHhj5@C~T#JgnfjL8d8Q9KnP?1gYaS7W7h#kzci_z-4|94C<48Fjb5SZd|Sc)3Hm(f6KeV|KytGf4tqo z!H|PXu^z*6tEJ)}t&U3(YcI*#ZS?0%Y~X46^7LXRDuof>Xh#STj~Z6>dRSAj_T7Jp zGu#Sih?)v!S2N<=Z^y zvsl|_R4ttmU7kPkfH0XSt1q*WmX4Fp|B1TX`g3k+RrO5=R>2 zt+*|^h8XG``=8*|EY%l|WcuB<`-c8YouljYcZ7q3_eH7b?guZ|VvPEkxIVcHm_i0) zX1dXO(|>EG2{lA1F{Sro?XYTG2{xAW$|PsBQ1E7$Xz2Au#K@ms{{|sCHRQ^`9DSnE z9L89+N^iYd6AO#KNkYteY=zpOJ1eI)*|aQF_IXmM@S`5VRMANACV(2e^h`cYx9vW* zI$$H8`rWdWVzm?+wa3Qr#3RgRQux7a%|k+Qr4b>hjTvU?-@TNTuIbU@O9b&|ZNto~ z7DOW~>Ws~6&T5o$5HY9p`!hcZ=gRA~+NChpes4{fPU-Dh)hcrZ^$%JJY!WdMbins( zA2CJ?w>efaoFj(CJMEnkg@QIFgX(L}2(|^2{TMl+jb#SDXw_x!z(so|uU66KAP-`I zvi#dG9j%IEysKEjph2zkwU9$o*8K?c^EwF*518qEthnccc>jg78zMG`=$|**Z)K_# zzdR*)l0Js7aT)t^8-7UP3F3}Ao-WJ@Q=0Rw9gtW`XZ>i?PSp~hRe%%_$X-4&PyOk) zQaQWs8Q@+)wVR?(?+edVVf!@`DQY_$7PP10=rrurzL3l!jP*KO>@^zGTk`igiH_C) z4WdtVr_A2xgn=8KCu;RaHyj^+DQ14DYV2edCQ;#WnN-VbDnqG3!wlwc0KfmC|FDr33PG=duOvELQJF8`FQzp(mD{6(z~X zv^Z{S2MGFi)xMy$!GaW_DE}adc-9KM@@DG#f__c#u)SZOxvwvPonGYNqx) zq0lc2JHlj`Z4HbQ_Sc>HYlek;;TX!BP!%v6GR-Du)gcq2#QZfnTe0r1$XzkW(pgLL zh+26>xN?KdB#*~>=~AP2h)?TZ@Agf7PNWN4RcBXte2^eJGJiASPVx_wRN1M9f2caW zkXP}KRYIZn%lpY_cSAI}`vRv#jO=+rc~&7-+i>yR!KwN8CzIe4m@O!lwRGJ5F^bA$>6i>LXrhQz%0F*?T1E z`Yd07>2y(S{9?}3;>AmY>tDGaPI*`jmFR_(O8q`(pfDRUbxdpVxfRhzwkA6)dBUeI z%`Gr^ctw0YW|Vwj<@M(x@3A{~9WAWqp6!jb>!W85%B-wep`W;$) zZVN}I65$M?-WFB2ZpgXjCTpDB2s7Q9AahR}wy?~o^;E5tKnANcyJ)XGhq1h zaud`WE{X`Y*L-LS)vJxPn!DK{7A+bPu;JsYbmLb2!>?yfI52O#`vweIg+1coLTM36 zJLWHB%7*rJ4h+g0RH9Wa<)avnFU^;GK)T%f+IqbI{?=5)h2&bhIN>FN57*|u|_4)5p~(dFlOLoir$fp zj(Ek$0UdQX-|OBZDUdK#)3yyi{OcaU^cls!A$NK*=%@F26F*G0o$^tt^|AXa_9CU5 zbe1`YKUMQHn0ZBNjrwX~4Kj@B~pmE+-X z=+Vt%$f>M7xx=imb9D}DXm=7~8PAxm@;NA#aEHHNHEpYyyik!|D}4FLgFOYjj>9a* zCH?f%sRpLXiMe9?0)WESx*||&{Xw+Q*4yyT_INH;&nt^~FS^gd?J=w3S1bPP->8A^+-KtxkF~9yI>0p4h|F_-^#*49gkdTr87t6tnFM zPM6|?30G#vW`Mo0@GG~+LXS4JDIW_K$H-sKz;a4Zk>w`BFG+YT1AJS(T>GBJj3n$H$(Px^3tSirrj#zKL4Y}#P zxDPpD1^Zg#PG~|d&Zd7qsUJ-3-t@7`2uS&vO-EI&fBRTl`87yM+Q^;|K%1x$RXxgc zHz0`c^4;K(Gs9o2YV6P-deSEc0(8>UXo^+|!Zk`I_S9C6jYXX^gQ z8v3dA#JJ#Zl&v#J?w00_Qhp5cMthNNa650pHuAZAILBoIsC<_o+S+?;D-E_%WhG8t zicEi|US)+Oe>F|#mKdKtL@i^Qf|j?02jXHJr>=M{Hkq)N^H>GW{tDGq9p-~q@AD1F z!Kh_EXm1NmEls3n&g8VsY%)vR z3W{n%0z5)UC_mo?sRyHGM%jbhT)n5vp%J;Iq+PFCHEoKpown z=Tc{SsO>@eMal8xvj-#&`tGZHbBj%R_;3WmszRAlF5&RU=WC2kJXh{VZ?VI+pe50@ z*xS%bkujf875#U6?}C}kO%3GNGdI-lmiSp6nqOoNP^&MaW~X?HlKr;W^{tKlKDP|ywzRx?$!X|u&5%D)5E9b%tg@!cJ1_yu__61t4NK6M(M#7 zjB30t50{LO%e5xe!;p4eEVq`?1L~wWed$;3l>pU^-qCO9+SMhWCVYi&-neWc?*bfg zUF2yKS402Ww5WCEWr$9_d+?$?mtQkd7P&}NT|@=(h@}E_;jCa|8fq$X!nnVb$nP9`kXoy{6%M?pIPN?i={&sEoi84aAh)F% zaj>qmw#3nT_^qq|o<3HLW0MdGM6hsAf)tnAQbh2=LiHxMs1aY)@0^xMIIpiKwQcKe z%{s+HaHIJ~-{r}X?ZhWYl>5#X6BKGSwg`)DqnLy~?|;$Oa|cQOO_Ymv&c_m@109m$ z(qSi5of!0AHB!vYzF!Yr72!Sww-SSv`7^C7>2=9)*<~-owpwWzDJ0-Ii~ME7(P z=B2osN(*uP2JEIxs^{jW%w2aQ}F8H?5oiW}WAR*7TUFLT8_q6{* z&0VBAD_ULos=J5)i z@D${i?<#X?{=XCb7aMbwrs-SB@a>9HrS)#1Cp{faOjF?oUc&G0y2sl0P`QPus8R|m zq^C@f@*QJNr!$AC_mnq=iI~~$TZONBSCaM+8(lCKftfNKu4M}!8>6ZOvFB(Y#RE~Y z0t@6$7@T42d{m(tL>&_Y5 z#Sfoh@d0?64By?2(5OiGd5DC0dFjXv^$}|*T~GNKxJPusEkX%^Q zSbb9lQJhX=00UoPV09Ss*6xRH#xfN3dv<-OksxdQI+E@E(Yc3b5X9UlxtNr4DuFOoI3m zj=ifmc9W&S12sVrZ${3HoCgnYa z3hSpREbM>cJ?68x(Hf9}(CimCjXV7MGIB6{Ddw`q z>d~yUYa#^_*G?MC%?B^#M_N}e2EYg+5H`r8h&;JCd&-llnr{B$cy>$%`BlhIxnE$z zoy3{}Kz;PrKJL$h++upNWI~ z46VAQGupT>KqnJu?*(WT_2+J^r|meH^R5I;L&&rugef|Rnc7z9jpBge4NZcLT}&0~ z14;LIu|i@5PG6(@&?+Q59$YWHUoNb*s5?sX9ZTN|9N*-Z(3Qnle{ksuoSIQB90}g^ zR$ltkB}!^?v5xv1k^rA9s%MrOqkdE*1WyoiebwzX|ori^VUK_2BhU@ z18Jc-iim>@OzsJR;w!K~zfGG*i@O#CTLo9KIjp(i+DW zGW$u;$DhyPlYh`B5Y~)7fV>&0SVJT6>))#2{(2eU5-PKdGxv#Q5z2GszBsB_Dj@Oj zAkJDjS;jzl>4%gkCyz(X)>jDRqeng|=IFE!DRFfWwHy&r%HPO%g&+ztzj6ai$=%Z3 z=O7YRAYZ?8lfWx~?*Vj1q6nn_GAY^K`{~=lTraCnQy&{BkTvlm$isQF+c@)bbWwIDAZ2Sddo4OYRJ_yVUR4c<<%(w z(K&w%VsMb{5f7I>VI8qGo}&cCgR%#x+_d@v7LFlTs;S?v7b~f%ED142Uo`Sr`=2h_ z65~7!F}OcjB-eRlB%MBkop{Bj=iBTN8NeebFK%P#3hHJWZpR~&bu(=$e9u61#Kl~J zu-3ccYdw`He=CASbFjnmg3N&r@N2lsR}LG0P6cDxrQgfCZEm{;B0=#~MiHVG9)lUV zVGHMj|9PgRsDYOLmXQ{q^S)(X2xU3*F6qNx1|1e-JAv%G95JTw`;zv2vM!$F7+ zI;ML(@5K$+%JcKA2Ul|W8uK7^4j>D;HRrQtU$tMoSdI0g`y2Yi*MjkazP^`n8$--B z&)yl|kM_aNPiRr-z@d%?q^@SaYG-055PSnzH2QTz3V)WdQJFF`RzKBOr*G5upMdkzDPj4@8xId$X2p_r*63 zu{p$P>G7elOClNfxK`M7SSajnVxhiU+nE!-%o|NT)lX2v+oxy(lz`?FsK6ePEP6r9 zH;8xMzWo|u-E`RQ`ExdX34oZmVc1vTtVvaA(lriz&|K4m_}0A)pu@7j7D&0`mVj63 znbHy5=7J{Z(7C1Uc;N!moQ)&<=O@~A8pi~af~U8Ma&i&Z4$O9Lbtj8k zkqe-lgU)o^oF#0#R zkYeACd~9gEk9PkbfdB3TiT|?PXeV5*_NeqY7g97Y1QKf_4>gZ9E=b5yr^dg{YI5P` zT)^NHcp1pF*f{UzEoJe$lcfLRDT;sIE+Ze``d`b9JNVeVcVebrX2s1ViwzlpMwLjEs(Ole?Yf;31TuC!aQ)A_zEFeezrIX>`Ju(<{wW`0 z!IaMkm#JaoRVlE-_h_u?aIVl2m}Bz0uB?}J6U;H?)sNk)78ErJo#p%=yM6usSVmG02|Q=Gy6hP?!WQh4_C($bOx;P-M_geX5h|M}Ze zJ4hI~R2Vk?W8vZ`9%aINtY%}pK5_xfJDpWjRCMMfc9w4Pdb8 zk|hAR;-B*Tb$jNc{WM<={hX0YdX@VjA=iJFILt{rIE;!Y<|!NVtpA3j#ty9zX(@44z9S zi>wU^x?^h4+d>*V1qClfK`6 zTiOZn{wY;J$&N2OHYh!`}mPB}Wz^8QkA6Hkp1YG|8a#^Ud|DP+v zH5a9^!5dTYJb+UB-wBdF(4*8@|NBnqvpbq3vLp|5O$s1{oSnW`Ar^E*`KHi;Hc$c6 z{83`};skrk`O(0YmROr}+XOhAg+{6zggH+!=(GUcDOEYXm_<+IrdprW6<$cl`!6@f zaO33y+S3VF5)!vn!u)CDI7ZXJDQ7_lSx+gfC)W^{9`5Oxw%OC;Fs>CjTR(Pt@d23Y za~5rgN)DqBThuR9uGqM_Ez|}nP77}fLSSj#dzDFuI)UgmtQ{k=qaNP#?9>xv$4?)Q z03A_r`+sIzYY@M5KHWYRFZ=~G0i()`NY{R3J^?I3!4wrs_e(+4T55u5YY!KdFSH7D z1$^?~rses!Nl#dpxM=H}!Jo@d(O*^<7XDIg)n+JD)WMLlTUdgp>Y>aKHoax2pxxK0 zBS33ULh&c)Ov*${p**U+3rSmBX!2h1?{4FZ`93l*FyJBt1k&@GI$cpw@k4Sf7G8{1 zn0RTLw_?+l=H7H3Py@`)df+xSa@dJ2^|tAF>)?J}Ap!BuVNoGySXRhk?=uicLG4XJ zHjyMns(@;m*xSm|6)hQJ4|H-yL}zAZ zn%p`Nl)ShImtvJC6%-e%zLu`y2Ly-A{f!onV=xg*( zzazXm+1hSa1*Iz<)9bWvZ3RqcfN^=BJ&=&BjSXUY^2S<}cx|8Fl36YD{__Tl4Mv4t znCqvMw)~iwnE18n3)_}hZR1%ZIpW_?YxvQ`_`vnnM2Ghj2c&MU{oyCYZBx}F3{6k} z=OVy=w~;2{_*4+JWc}6)dvj#&!}71cWAbl;WBIA-+hkN06A!PcJVxX~{2%RHdpMNa z8vjzUbvAO#)^@278@b2GV5eNFA*B#9QjsJ^n8t0K*pV<>a>@O&qY~4&-zM5FQ|^r1 zhlbq3$dJL9nX`tT=lr+NALqYw_WQ^8tZ%J%eQT|Et@mBOcYW)5G4#zYpzTF|l^I4+ zG|69n1fd2n>TzO)#4~6Eu=b_%u!UT0esi#CvMmo()8(m(f5+EF&E9ekAMn$|3$Sk1 zc;%Pqcbw>fpms&H$mbC;^wh}RSDd7!d5srp+(BTXNY^&xSC#Q-<0|oRq5RHluwPYL zfY1KQn&o`Ium!UI3wn{fqf8Fl%EUGOfL`l>MOh+z$!W4`aOe76Lo}I#RP@{_hMp!x z>_n>o(I)%wNr z>PHlds;aLzx6O+oCrjU9c0u-D6JM7dQ1@0^sLcaPS@TOT{do66Ve{z~*ad-Puyw`R z%n2k)+=B<8qe(h^oE8r%q1hJ>3wY8Wu_D%n51$!|nOmJ!>Zqz7ee~6}NuL4qLwSEB z4lRMYANMY+d3|8^DuNNXTVpqvOV5al#Ds&aWGp?S5aytnw>o5PO!&3 zT?}0{P(BbsGxcbm8Q_P)0{2Ib7rCuwjX1|(W9g_3#=0jW^w_R2VSt!iP;`ktQ>=am zau_-GsV<0+e#N8XUA4O;Kk?2+`3LFUFDBGclidH01n-xwQACnlCMg<$R8vME+ zI@{EhOFb-D-&r#8^^RV#QN3LccF}0Dc_1cK zq{PqAv z=}Uxd1x!7M8=Sg1+MC%7V+rko+D`-2dq8gKx#pHaxp+eIR<)IOah!dKG&xApu3n?^ zy5hM%OR#PdPc-jw~3_y?rl0u`aKXLt5MZrTuVA@XH2!EE`c+CDk#0J)mtgFZVvp4Y3BxPe9g#%3(uMjpnuUuwcdFw|qDr zDCL=g-YypcJ$Ye5q7QzgT%4>DAfMi{Py@`=H_%2_db@#A+0@hpVAk3ZTp{5TS_@4^ zLY)>5RasS9k4Fz3pIjRx2|{67vk|a5RWpSZdQhnZ)DUYAeAcAv>WSWyD-xT*Y34r| zXG{>9)u)jy3kL{J-DNYhmK)f(xwS*_QhOj5nV`dhvH=pP^%pnB<}U$+>T_KLta7QW zq%D1!+iggfam4cA-JDDYXE0{y?G3CUHGO5h!WwJ5aacAhNDpi`)lJl7N`O@8RV{Bl zD6D$B`2tZrM|y#FZjm_IGY%)wRqYPI13PqyHxzg3uK{LBT6QlYi-D-f;aB*1#M&VLr@ zU0Q3A8bM5z`E2^kQhU{VI?(#G&-j`%qE;wq#r!TD5|0uNdE#5#rAg^Am z+dI5M-Nu77Yj`r#4Er+6LoC60BN(IOvywWQ)#XFWNMWOmQ>b-dFmbS84C=ns^kVES zjw8aV%x#0H&pm>$&%I*K>)!Db@pBg~g>;9se)B;~B2jb3i}E!+%WTh5%YCB$S(Rz@ zemC&9>QWCXs*zOP0o$UabDXuIu$M)9fooXPBWr2!IN=ier_$J`9i%bb+zCCmkB`_Z z`T%R%3jnRy4XJcKNrQZ9=_Oi>+b&!L?F0Xi;k3G+*4R_^$tL*kNDBywo1tkvrpuAYdnyjU6_EXWwo!E45 zdI6GaGqd&w5ihv(Dqpg%#bGB6Jy-bmkp?9WA1|}-z z*kOnXCPc~Y{Dyswo`M5v#Ryu?Z2-x!m%3M*)BRyx76if<@N% zX|TOK@B59RcoLimTpsPG=G6(ipu)-!t?^JEo_u_Ob>-h6kL&^vfyv&$WvF`A9 z5SHE2>EqSmn^t}($L=4oRyo%cx9fym?mJlQ)YP1oO_yK2O+{y{W>M!V(&X|#bF~qI z6m(}tyq)i|maMEXB7w($AL#UQdiLCX$a1^X{+M0T2>cD!+0bJ4<7ss@XYteks}UyG z#Rc?qj4Q{QA>e_Jx`hCarhY0TLrJw(av7)iy<1`B878OD^Ix$-;SRrd7bZGiOLCqh zT-lMf^!j0`*&$kPp8Xy>T__?DP~o?q@T%r~Gh^og6Dx)4ZdXSw&OZ zZrfTbd==Zv6s%w3<+2#ay3hGX9@MqD=xw~s6DD%IPD#hlM$23Mg*ZPkRkFQT zrMPh9gcs+=+|Kxv;>l|`9NCiTTS#2_Vqt=RcyXG2NDW^Uv=cP(*AF@ef7_&HHP7GD>^(!VPM)3oWi>KfrF9466d!^yv?DJa59S- zS~GHXO4W6d-B0DD?}<5_L$c?tsVFQvm8PrC`Gb%w5`l+d?^Tl5>@ zuX-pMhbFQ&al8zjAK%I)KfTlHDiuh^Bs53xFl_SZax`LM7pT2C2?fxi%oCiEJ`bq}Yl&AwCp z!m{@$9!jWk!^;mW@H>^-C3>#O7Z0g?I3C0^uyA=Q)$oz$+F9R@%!cE&Pvz+;ZDtJu zKR(C}cd{BWE|*gqE|gi{9!Uew^QNnL37cKWq1%C}n*E~}w2AWQtb6FAO}T4T9S4HO0D5`> wJ*%7ec>G9zt;qfDoWCD`OW;39U^E}Q`%)!L_VC83?Van4snzLX6PE}70;e@1YXATM literal 0 HcmV?d00001 diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png new file mode 100644 index 0000000000000000000000000000000000000000..ce2ce6b2a95e9542ab3d8bf1ce6e0f51a8dca4d5 GIT binary patch literal 43058 zcmeFZ2UL?=w>BDdD_cQSlqLvU5k-2DqBJWCK~Rw1yL4%xmn|Y9B`Od?XN!Q;05Nn3 zA`t1)2@p^sASECmfe=XU3heLR@7!_6x#z$CIb)o2$G7&_IP$J{wK?aSbFR6b`9?p` zzI*KGnWGR0IgBsC!A#V zM*l=#nm*f;6E4%^>6^5&);}-&@#fTfjK){U9AW;WkR!g`LA~kR)eC+pnd9RS3%=}> z;8ES-rVpG)Ui~59d@dRjuKu`#s#9NsZmU%~=;C4U$jU>NFUTa?9!l2oUL-jb#SHBT z%eT9~*`P8Fnd^%+ioLnB7O+^b86pT~`mgdY3jF6$z$%#Ysb+QMPTD0wcd?}LZA8(f z;a=A^CUeuqbd{c>OEHT&#bMPa1xH1zlJ4V{KhqpLOOh+nQt^YIJ3J7gbHt4vxri(e zSe6MHHC#g;osHh0Q=Iv}&_-(Ca|;l?Y;kDg{g$P=_NYU~tFwyb@@{W`K2$H)Y7r zE=)d1EmRXBuAJ39dn&$Lj442bO*klIVBMGypjuaX2A_2>`N-bTe6i1X8V2>AibI#u ztywMl1AXRwUI%es)l*RuxQscU-$IPoBjydd7k{G-r<6J`# zdszA89CRm_o_uhxb46VfWO=W)UqlH@D^;l z1B^yZI|I$JBcu-%ni@Hz&^~J!;M{+C!Qq5~PZXBiBa|0!o%%#94zW&lU@Fi3>HUBm?^ZmvWS88+;@$ z=g>%_o$;GD;Z9)!`+LZ$6p-&mY6Nc8WC?$kSW4cFSv0h%%^2rNJGR#`($NIjch^}T z*4cV$(J>GfQ#}x8TO?TUfum$@jG)9RX4YLRA~tSByN3>#{maSimO4_-4IEkg+99NnA} ziRZ2Kaib{I`KBm3l&~15w!O=zi(Ohf7ckag3=EMm)Wt zGMHjRe`g#@jWD`bGH|92S|BunnZflFXG{=;d*f!}dSifnpWnFP_(^6#>$eWFhb6-- zW3G@*$Xk~K{N??EcE{F;bK1pWKNvm*DBgEjWY-K>I*t1mhP)f(-Tmr};C(*OYL&j4 z>Wpepyd1F{O$r&yKpd`m6Io^$5B^}|u*GH3ef)vqLiRk0z*NaSe zqfclPgkHl*bY4OG=5Onm9_Ls_f54iy4F`Iry%;%K(nfY!nbVvbi@Y>_y>b)%>hXAB zMesD+UROwtTDx~&5i!PIBAEWM`>eU{XL=p!y-QQ%(Rs0&jcr{-h+Dn=?x#5JuVZVr zPHEedsUZXz`HJr&ry`g64Z-frBy|o8Dn&Qgq+Fh_{Ap+HTS+jGrWkZ+kGz8CuM*ax zYfZ>w??$Fy?~MnUea`9NGdkZ`=37t}m^*sZHDAuKC?}@5gb%i``!dr75kv$NCgt>gCajQf3(p2J!t9p~Lc@79U7mOE8F5>L(^Hk13pV_49A@I2>Ahr!U8-glagF0p0% zQ~q>#sd!kh5cP#we9F)@UmLIo6ti7A;SJnT>#FykZcYY;{V_?!hpcc34o_Clex@j9 zFEvwfON7P(atmO4=(f7%!ULd$XrPuPcaN4yYVhEA~BC8Fl)q z|H!shymS|L$E)ZHLa13*%xDKqpfj(eX#TL}?~I;*YhoSa5%3XJQfnMtPW6;Fjjww< zvE|ggR8JXWSoq)bUOwZ=wYNMVO8T?UB}5*&Tdw*#M5CDZ>j_5la1lIDvoJ=)v|4F9 z3+j=_=?F|~J*U>8BeI9bob<_K34KaRw7@rF)F%pi<`5zxk*usl2r7!)c%hKqxApyp z2J&GC(B|?sVa;MxNR`B$39r5=9!BR7^7wr7>PCyPc-Xj2%a|!}4o>l1JtC`#r(Bm* z7EmH0G!pmi&k3(+^g2nud&!JiaB5M&{qSAw=GcCQ(mtuBT+nd#G~mgZ|LyL-fom~G`?nM4vh~?O&`c&J%ZLW zQOq)ZZy)tBVt;yCIfIakBOJu$0CU;+tY^lpW~QOaLY>YTE>--08~US|9^hMhi#ne~ zWwNwsHW%N7Z7aH!_>{^$->0KiQHJ+c2{glntH;@1-h$m>NEhBSY~Sr_!m>AEtCs{X z>1(Db;!b%d_;E*<+=g;8-*=kO*7-@s%6&DkhoWfd*HT5{}L}**m%Hci#0lYkcJgLVB8a!^)1`A*l?d)TX{y1wp?b z6}MI4PI+c5H@MDHYx^7Hak#Pm5<;4vRsEv1{M=Yb^6=WC4{lG=A?O7}Q6FC1wp-C5 zCqXJL2$d;T{CKYsL@EI+vHgubOLq^ZO`1398GfgT3!%olKL=J9m64j@V$Rsx&91be zYg{qL`wpsArK{XJG^Vdslsozve!{dm$2(xrd4F=UOGdKT4;a)c>!&whESAVWY4L9R z6p{KwmWwYqB={OUFEN>0J9b2NuT41L5j16_uXzca3extO-&HIzx9#%r-USdCVU+h* z{(cuCa=&@s`3~1iklHtO+5P_`Ui&~~ifwIcYwM5WRajtQVZj&_`5IOhB4!8N@Lu8& z2ofo_Jl17X>L6d2a=+V^mb(;2+*&0X>SSEQ@N}uHDngq2PQqo6b3Uzsf$d`uN&$vq za$UMz|MN-@XR>}X6C_WM6D5ZZixaa5S{N<2J;p(%{K3x*K^~i4ZTF;w)z|xb_b@x` zWCqNpJi4rwc7J0D8hROweA$?<3L&xxtjbEDly?ge0zuzbR#okx!;>X={(9!Ku`oP5 zjCUnZxaTTU1t5^FOtE;=c3O!tbqU?QM2~E3ZS_fVkwuzJdqTn~=lC5>G?3j`^(R(! z9P6&v6n-oWnW;7O<~p=F(M+edhlhN|cQ>b|03R>T&gW;9o>hXmUXJ;Ms5rXW4%-ZN z;wuV7Q9o4Gq_+lk7gsdGdQ~>ffIjx;gILMGz`?Zyxp}+Sv!j-vfkITj#wnxUNt%>X z_z&(7C?tG-aJkt-D9%^zk4hH@x`Z9a2IlMMXdDw{Ho`yM_?-3OL%h7a7+wW;vv__( zweO-cn*={g;B+B*>XJ5G$v#=#!|=08TFw>cUUuu_7 zP>2V_QD3(NWF3w_BGB!STsILc7jr~_{ggIievnngf$Pn&no!FYQ&UskRtGGp$VhU+ zZz_cAASArQpNx0xwJ*1>t;OU(o3218)6>(JlYXr-ruOO*|C}%|LdGE?bB)1F5D~3( zf5rM92*gKF`}=#b!;c=IBneuA)c)NYmbDPbP?F8Z)2AHJin$k8eIo20-#ZqiK#8%O#S??>eL9QE5d$J`w<4IauLiehs?kX7@h0Hfn zmrcR>?Z?}*q81{>sx=sluPQe~Cc>?waaYB_K|-dYgQ8h9RGV&DRaRCO3ZzE)Gz22m zlq6~>UlF#kQ!CaOe4(hxlJziS`BFz$6%+Czd6K^H|Gw6@Qwy`Kas-xIQxUc_dh7D@ zE>tI?m)9p)3hX;b4m(`diQVB*GuXQPZWt5Prc+wTYxhd#=MzAPh0nFdg9Z-k(%*GC zT`btP_qrDW-CwkBn}C{Q?dXX0#mdS`2|LR&U@61Tz2laFlFfvRORH?PYfmsHBQfoH zQKLClM3{|MU216xfv_!P1}$rp+vSv9^F4$a09ST5GrlYX=>4dGBq+%Tr3<`*=uu0nN}P4=+_SY z!abyP6y^h$p; z>xrBw1VVh%iU4zRJ_ciKNhJ5GA`#HYtk!F$!^^YyH$(IRh?G0151CDITzKfo7@7_2 zAkQ~awi4oEV#-EF7T$aPAWEIPX)3QR=L!uN42rs?+@q+0l=kayiZ3y60ju$)vjg5U z886jX80n+FS`ksF0G*#8TZpkS&v?A&+TwUOcD&A*?EtxQ0me}4y3+8{kHdM~2?o&& zq55=z-_h(s7gDKp*-|DBFs)srGQ05Z9mc#B4@`RkJKLsQILGBfKLBZZ=R1FR_T&4~ zz|b2&e4E}y<9L{l$j)j8)ju-{%~-Ys>g59w2e*vtakp~T=Tq2w>){EwEE~H+SJL7L zDod=`nK6mc0=<0Qnfeq%S0|J&mUs7BniGJIJAY-u$67gM#rd{nH-;;&?0zi&=s#r1+b0WWBMG ze$2v221zael4v%^1`J5QvMm}K0|Kr18!rB==*1}KhkMMAfA)8KXG$bcobR&_LYlr6 z%y2R0*HrvU%68JntS^whyQy#xu5`U7M?1ND!H!y0Lr=O6be+ML zd>Pk#;r8+$Pgw5&hY6V&H(GRE65JCpb^t^QL-W;a|%JZvw%+^;+s6 zGL*P9JKbs)fC+EMcjY-G2LRVnkl&FL2)@-l+(C%dj&82uw^LYQ5;VqFo^E4oXzPQKPF9e>a8J3& zL!G5FdV@ezh^Am${nEB11bZcpxX@*yH7Bi9;w(KmBH93-vvuT)a*$VHAD~du5w@_) zRg&w$UBfo@S?dIufx*~5ZG)j^zr$TJt6O~qk^c2O)2kLUL(Lv8#+Vaug5rwZA4U+WBpI#htyN0*5WYZn4`GG==lWl^`v9|UP0*F4+ zRUQ~}ZJ-ZbjP+X2wo1OwRHyWWCrqYRfY%<5PnrlZpt}00k}V?Q$m_@t?l?Ek!qz9- zA&%)D9(v>kv05|XbsN5nOSUU=VG=B6F7bdIcWqAx^KJfOBom{9zxB^Lv5oJOvj>_cv1xeH9|QzU_Ux zkRN`qzI2-7SFrt$;3c}WDW<>#Gd6lw>o?4lJ zdX|j{Ao*6S{ezQ%SWU)$I>#??9Q;(ajQ8gY$IreT9?8f2;6FX`cKv8dAbd@NtK%iV zeS^{4;w0{1jIN0xdEvv3ZRxL32RSynCT1seqfjxu%PnCts)8d~1_d>y>=w6v9JHb9 z>r;-B9-74L8mq@wzdVnqs!TqRyJxBO(-Le`*E%*D@!KUbrv|XCTr-fVa|l~_+pjyn>7?Z zL8Y?bPR+=9?0szSRIBjjc%md~WHq4F&X|u$d{6px3~nh;#`p)9?W|(QY|}lTOWsopkq72+v_H*C9t`WY<47ETe2?f&n!G~_ns7Jp8aIol zt-(#wtNBw0EGY8$td35vT#fLY-Wb$w(KFg-!&y%12AZ((C9qRzSlo5D` z5tCnP4u)^3j+?<+jx);bVQenU)xDZoBm~fZaXD|o8XL6lu~u*f;assROToFzHgGos z+u$j5k#q)NZATX0d)(Klnw`xB&6T&6Q)z^R?<0Y(XrN1#a3}UR*9y6M16geH0+-(k z{`GHibbyU9j6JYazP33X^<=d8KAWqWu2?9a45dU+~|61a5S3s$ET zO@_JjnaUnkN=w5^S&d)vT2El3rIru)-bm%=DRXONiE^qw;KO%VWiIE9$RFL@7KJTK zjal8Q;~RBuBT~4fz`Cw0NhC~4az*{dDNSoOkinQf(1`)3> ziJUsI;+~-D*UdCuihE)!)a5;+oU$?Jbs~;C%Y-$oYZmJ`M?rSedy??+O!g-PL1R^_QIXv~t!!GTotA zm1=z*?CKslS~c`stSBoXl470uXk_|laZ3+}hLb?@Q)2^o#-`U*HnPRA*E+HE*}EdP z^{i-_qypms*5~^0zIePfmLBz*T!b)H?Xayxq>3cNpE}DIM zHQ?d#)vbrlhd&iY`PavqxHlTx1g$DQd3>R3b^^pMuWfItFkF*x+8WcCLo}(=RqxQe zFVvQK(0dVIAt`1rH{Fc-T;YNyd9oP2{H1kf0XPN&!u zS&}kUvXudNt=E3!A30MW#Pr+h+=W6$F&RHo_yIADbPb-PlZ> zDLa`SMD0_Hur2} zQ}9W6GO#lOqJS$RHZLQLfz;d>0zOktA+xQZ_2JXG+1bOHl6p5#9Ls6R*p>IThpu?9 zJ3X1eAo=HymM4*eWntS)j<6Y>Y}PZ6>$7%N)s~M2HexAPu%`1SaT%)`!?yBr-s_j1 zR$TOC_%>@e*EKsT-FYe9$F1etWm?ZM-Og^Eit}bWJw`0qg+P2y!F#!Y$Ky0R+e(~? z#zlbxV&1R~#3%5w0@loix?@$+-Ru^3u5%%uC+H)z@C3ZBS@vb1dni)$Rs7SU%xx%6 zzeHu~MPI286t&@Q|Bf>Pj{nfD8B};|l&o92r0!#J(*J6kWz99Q?olQ66}FD|t^RPl zL+OMDup__e(vTrCY@5hkSNG7-sTy@HZCP?W-Q)-R;X*BC)%rkp3uRT{j@&H*-k9~! z`4-|Wa46P;HnR$MSRU~UY^ubJE{4_d5KxcZKFsN5KZMilkFDKyZ`4u{VLs-Lu&Ji9 zp-IAqJK;3w-f?unZ&dhA@u~B*b(-Kwr3DW69qtvCm-N_et*5&m} zahVU4M74IM_Nl8@ zwb4N9-MtUAp3WX!lLtP7J@;q9dB9gW8%ci+CPNg&S|NMP_b(ibVuhMYg2CIYxNK2R z10=SYB{8mnsxWdn=S(gMg~_k5c@tvm(?SddZhj!;LCVZ68Zhl}=?C3R=YD6X_BfSszW{B=xJR z&gH`A*k7U~@ad?}EmloP_z86wcXsFRj-BOe z;0&@=OOMHUD5(rfRa<(g7bvi9_?Iev5*`S^Wf``LCB4|NOWTQ6L0!v-sgyb3MiJ#| zPBYi7DH7%$EfM^#vUyym1}+6#ywnq~>aLNf@}o{DyruiKc?FrLti)RMdtwuVVuz6C z^Skcf5q|LUKp%dzOC&W(#la8TyL}eTTvDBf3lt{D3{y*6pK#PCN!Sl+PF78Z)`wCPGDl3@DK_mD z$u>V1t4_|8w0<2a$r@u4@OzbDPR`~3WX$`mv96X!n9Z-1w112U37dLOFi^HwuJbiiX(596R}9V% zC>dafwr;$@rMxI?&Cyq)t2vnJ|aQ(B2;>O|-9BwTe6cuB*#1|Dzo?8=i_16$+RHPxP}8c)->k67>I zvGFShkcJGNTA%uRi36;AOZ>9{eWD9vW55Wqd+%m)q0Ur;@c3CG5g25JoZwcBu>OvA z<($I!Of37>_s((g_3Rte((M;BvDB+%5ha`8mDcz6ae69k>#Ffk6@@Ia)8MAp8{A#v zBG0l?`7`Bu##glx0>Msk;#--&ayhj0EP4ix4|H$j8CVUeF3L>H%3HZx`#w-vgg@&& zbD`riMHq*KPu9H=MVdrIJmlsf+T;ImwwFMA{Z^KKc4ye)i`c=H}}er z0#QP;?ZM;)vJ<<#j+ffqz8CZTFIH2BM?y^QFKjrs`RF&b*|Vbz^?`oWLa9Su;G%#( ze_t>UD%b{9R^?mWdIBVUzpBodAheU3G z5JTad^N*WP|Lt>Y>HVNZcJseNK!6he*;iKou}INA3;x&d&6N0?x@vd*ES(PVy_O95 zS{aG7Q5E3Jb)pCFjAb?U&P52WOVZl&^S0NSKArG*lGwugCC&d}c*GYV42<%hvs#kB z)CjoGu6%8&`hNw%98y6LOvqZEzVP>TVOpS+e68L7o#ub<>%e&Crr537IaU3@@ib?@ z*5(qA=!nN5yEFb9y)@y{FpA{#_Ei1pn5qV$_z>#Z)#qc!TX-iajp0r{Mp^I*xKa-; z{NZkXZ@pRIc-WIUH_yeisN=7(w9OGBojPbZEgD8~mT%zn2p%XWc{Ngd6k0bMsp8Bc zr>soQs!zPOS@V!n8!vZX9nMv>2r_KCaxKjX5!)n}Ble-^v$EQBC+l6fg(M1|*6CN8A z7lnEnNtZ&)eNvtnUEHPom|Y25F`QlrrCa(okd(r9y*;&>V=sHo3uc{lrY3$bq<<%^ zW^RVzn%wl>E*+Po1=PP1>TG=7y}1USI!@5_{948v7iS?z--LLjMoLYXfaUIJRK?a# zZjVHEn{SCWU)=mp_yuHF=a@)MB<)Cr>!%8hkc7g=YpEVmM27QeP z{SWk6E?N}H&PZ*_nT8!7-+&^73i>lUf+^8i$C37*f>mqr+ESbM5Fd-F(_D?5*bPa# zZF<-bHP9z@W4Kk~buNK6JKt0LHk~=>e z?RdJf;g$p&HKHp0$shV4tYhKjJ_hzsU_ZDVw3 zf%OzznA4P7LI0$Dki|0^6^v@zoAm1)hK^8o0)y z#uQ(tba*|GkHFy7b<6Q8^J~#b+K+aB-JQahtD4uCYaw#W?=91sNUN1AwL+6ydBoM` zd>X87!??WO=YXhh@9$DN>d{Ix+Gd!2or@1rA*YSkslLL!z2ebw-EydD{f3f9G>WFr zX=*nfoO>eq-8*~`|5m(zj^`t$>?_#RCaoZfr1bVb8l& z{yTs&eS5pr3K7Tdae)`R-Wsy0Bi(W^Qt>VA&vBxm+S<*#$#LVW9hc~VRC!1?pSjEd z@43;csgoXRbRV(F;ecs1^O@8(ozK)jV$~*r-mSJ8a~7bPk~S*oGk)Zxl#U>hch*rP z2SXq=48!J3?Ib2EPct^1)bYtwp$640Y>4HEXu0LqprJ*Uj$shjZjP8VLI*(#?4@FD z=|eO4BgLpQoviP}BU;WD(1`eF|S^Mz9reV>M^4G zobp=xHesW&3cRusTF#*)-m#|Ve1Euey!Jvrr0}il#A}J>fX?}_27)dLIr{Xf;l;^& zwE3D9g8RqJtUv_iV#YnDruRFk2fSS#+@YqKzd5K7Mh~J+t{htl_Gq~j)__iDFY&S( zV+}elB7~$bZ(ggj(Tc86Q28la``B*zohX)Cvrh1c9=b%Q3NJ2(h1pKkDm2a;K_0bc zR}*K$c5;j>mU7CUEb3m@XML&g=CpU>My}+N056HW5~K%#%z&eh_aacbgLHB-`BImA zEzvw=WgRHqCCRzYT><92zUmL+usJf@$E8NsjO{d}tqu8q)61|;4EBSq z6`Cq&LDrogOssJtV+j_*4A_@iY+(eAlLoa9hxRI z+KZA$LK|IfF6AkOX0L>Bx$~y~($v7%6jcC)LqtB~k^TBd796!>sfI64S3T$v`qlA( z13qlKtMhE<{P%%5H8f8a?Yrg5xO|4snLOw_bDgm1T(EC$E>+hW_VfgSd9cS6B(2=__O>@PKTIB1~7)EM&K4YLaH@Rk zaVvOdcV)MuF~O|BH$_?<>6@&&hVLJr+)emY^*gk^LMQa?Iyyo7+3qhajRry9o%R~_ z=wDUu8$mytNN)Vu0eid540Hzs@(EmCIhdR=EQlREKRX_}!*#IGZm<^@CrQ7vLs;=c zyw-j~2;5{oSU63~s2rbglyx^KDYzM*fUkA-Fd2o zrS_2u$>7iEa+BM6~$ZcGpKBc|DqhQxSRsyOX@Olwx(?2b zK_}&(YJ|U%cp!=2xlSRSrpTwTgXQ&47ELQp87UA}OBqI_aT91_tm#d$mF;A@%~b6T z_`$pOiM_ZHUTlL00Rs#TQlRO^ceB zgv`}3Tb_RX#vZ%LjGe3W&T`(E9I$PlTPG&1KWgu{m1Ehep~O><~81kiG;bmjn^jXno;MzZ?jcZeE@* zT=enXyrkBLFI<~@*kE5JV?^z(js&rTh3XSbyT^Dv-rXL{XCWx(x0qnR zkMK@?pkJ|+R%e0)nzDw!)HJN01jCU`r(BxWcr9r$B=?1v?v%ekMuLN>iqfC@OQpi* zf0-*kOu7V8Dp+li{(?f-enmS%Nn~1o?DWDmXq_nV((p`1iR2WmQwMrc z*1KPSiWf`GjXz~D-=ZgI>POcho$Z|`!)fcizqHb?kvpf@oS-Z zHFAyy3(|}(`*6Q@#W2vPZVUK&`7!n?T&YP45)P**LFLS{RICh2F#FCo|Z+lzdL6d+BCtfxG7 zfIGvpkyWqqe6_T0iXZ#aW2ylKy$i`!J0crC3QZ-Dfy;3g?ljUpMQ?Pi3j++5!qIT_ zT`ZwW!hFM=Px3d&*X)iq-a7Gq+DI66%I(saJL&ar&_z9$4_6S*niU!drly|^g-9e% zg$P}|EYywA4)Y1pBuIyXT?}N^i(0u7tx&(-a0_!Y;?tG+-n(i}^=Kg?MY=VVq%);3 zNcIChJ+M*3E4B7NWSC~#ty|7sCaZRA9eysi?4R?4yizBwI;vd}vdRKdZ0wVbf^<2~JdF5$2KyrFBQkOXPs+-Ih7(<`-{TKeZ89)c@mH(IC|BGH5`?rXx zzmJSp00Iiiz_kL<3Mfd04J+4wf9}#l10D4cfLKKUI;tB= zi7alm0FX09zSi##WDD>9qn4e4oiaFwO|`TzGI|d{D|}CFmQq^tB=7w5$Zckq*<>IDm+n0SMT*=E5-mxdqturtd@REI7CT_6!FDO8515@KZnb z?>7ev^wJ`WOp;42vF_QP03x@L;+R=5m#n9iYBTU2pws}6=>|ZU0qmQfNy)GtWoD_< z^C|>l^^}46WQ$qbm_LjVs3$T5*~ z$H>T#0N9nz?nWaQ2J}(}0?B?>>=k;o^;P5$oZIN%07=(T50N5^0N`1J+^u_LWF7`! zx&i>1_d~EX)_A5~QM*~l91DOgKn}87h$?c;6neSw@9$p?x^4p8A}>G&?`#jQxRGa^ zt$+a}w~Nfpp*tvGF++?r9_EO@7#oxu05Ctj1prsCg~5Wse9|p|`b)9QYJ6gjKMJ}2 zkg7R6v{QQ;nunC(PGu~``UUQ)mI?Bq1_K~XhP=i!-(PgPzV_(#aR@}Dh`=rqhtlsw z=_IWKw6|WH26r5zQD_U159GXv6d4K;k|iM5G0~@vM~(16!rfb<`8dpFbdBd@oK@45z8E#>lt!M!#U1Sp!GE3c&@(jBuxU_w-z&?{+#&6$a15r_MC z&O)rn#|n{-tYY9tW56`LoI8S@+ZgWvIt2J*&@I*Lc(JIT$li_~U|&c?E=&y8ig zOpvB7e+FdLxOwKID=ZhsQLI4(>c{bd{l7txf4Nm#D+eI!icNMFRFRSA9oHaDKAMK5 z7TC~TyJ$^{yQ|+fAQi?e@#%oLR%9StLhKGwO|rep32jhC0_tt%z~{zMlmp1P5+NC) zsjidNp0?{W>WT+Yw5Qg84d{&Y)6?8*=j}M*xfG=eqvFh2s(wcT%ug~oUO+9hZ}j&8 zhMJYyZ6S66`U#)&Wl0Ee8nJESC*xEm;6mmbnLMbb0`IQ+!6o1bkk`8JL} ze?BiVoQq&A2%z^b zw!lC}^POevO-5;o9cXs#`6uY}KN<2xf2oy!js?f;*FiY`pPZQg9h(Tq49b5W-{!y0 z`u~Sk3R8xJM<}M;ht|v#;Xed4QRT<=c;-VlU_5>wMj8e~cZzNyyenE@X(X9W)hhE1 z#cFGdPK%eQRfjr|T4_?+ zTx$+7)SSp44vhs@x19NLY1mUZSTsh|5_+I>Ifp9(!q}aQuz(jK9Z0q~=-zMA6 z-EOMxMQI}0XEHVj`mX=owrH+Ee*a3oyRyyKo> zROx1oj%=EC>*P>+U0a_UG{+oIUoSL@-3>!bg{xBQ$!484&;F^ zUO-|k7jD`Qa^7z}`qL!n%sU>cXyFF8&gQf3(J6B+m5V_`&OF_tSEFB*3QvN&r9w@< z&}v;nc-+JwY7u9XPD4P#F9Q$`g!sBm=Ck2}Md@lN(!#l%{<8DZ+xnPt?9Ey}V&98h z@#;2x@{M_9%f2y~u(ro9UluCWxLoI1snx@KBjIR39fxwU*x3&8<7h z0Clhld!O#*K&-sX$+^P=Oq|>;K2??;P6SZj80A zt_)CB^hOl3Tr{&)egEizg16=_lKGS&@*{)tx;M$T{t0}7*+ww&!%Dm}??ybtf&ZyE z{c}0~8k^c#>Qsa*QURfukMum+Wp{i{`cvXM=mT0Dtpv+=(ci7!aTTHCcQZqIDU>v779Q z9R?!G4Xw55101FB#A=W7>7cu;9(;uMMDaSF2$j<_ZQ-wi4??U?7oB74ReGsdZ(6Yw z?Rt$1BGr1K3t}Y$t9yq3Tmt22$=95yGB(mCN{Y2UNgAy?oOtymGfSuBvBpkvBO{aG zB9w1(zKrdC$v!l+?;s(P$W$Wwcej`04>IX1PUZ|osm z3>ZJM*hkK#ktNY&$*f>Zr}pTFhem$#9ZHr)?hXTiv3JMdCf0=Nj>C{}&y!IC674b& zhfE-ZS{7#%tVX4oBu*n1%drP0?-#x=Jv}UEquhVw`A~B$=F1RB+#Jy03snU1JKz$D z#YKPVxLAmL!ihPTG%K=#I$E)wzb1^fsQZD#3j- z5mlzm0KV}9PKdItREG63L%OnwOc1{3G3B!N{6bz(-+k=RvzJvHzFGJa)zULmnB|75 z6HG(oP?4(RV7PYCEVOuK8t;CRP9cr7HFV4px;j4gI5^Or{6+;q>Sur1@ zyDl^+e5!&CrqZN^nNO7z6M5# zV>A+EV}@*-{0^}iUW#`rQ<`$s*`_mtRGX7gfdBtoy3U%*^wfa-cen4!I^dfWB72qL zDYH9t|Fg$u%~ke)2k0GdbCBL|`2R1=!3TQBBmsBND>LfPut0|kp)c5@-t73wG+$&F zQap4rnbT>GR)%MJ6Mp&97pd}ZkYNJb>+gJ>(SAo-%x*SL^kkkqTc8wPr?GP~{kT$) zBhhvAs>HY3%2vsvdBKXK=X1^5%l^FAQ_geGlU-x>9^#(nSc-OotV;UA!ty-$FJ-xO z>iGNl!Ua&43sdJx@+UWw{*iq<2)6ZJ5tjQ$lCI``5Ll60JB2FU(UjX~P0G-W30++Z z9PR)9T~Td@?E78)Jh;U_LrUnj{o9)>>$YjuI)*+85=a|nMM(oOL;IaO7zNEB56N{g z@|!nPM_2~_@qF~AoEm26VZd{1Mw3=`>$cs$yQ-*T8Q8G8!0zC^QnJw;yHF&`h1i!X zq&|k+o`zy(q#}F!nFU_){WbEryW@?$2OkJWfMPXUU%MuXX)ab5gb-Gj9LjoH8>m*PSQYyYEX%MW)W52^CZ*S*bYk{B!L9|c? zBbqKJS*oTy=ko=QSSeeW*OY-U04PmePuxJ>@&8H*KUnx1>yM!%OXE1Btp_c`wti;Y zDeC(@+-fhy7@~i5MY{TvVKZ(H_*dA~LnIwEIY zcx1&s&g{5|ULF11w&Ix)at*|SUKtvO%5}nU&ekx&@yKDJp1t0)>m;Hp>bK7((>|_# z{j7oPGrWcrT_v$}KQ3KDj|RfZ@>`7e^;PzF22;nh74CKq{m@Dz72cir_4#ZvKjHWI z#AiGQ$LBv4dtID#MU)q*y9#`Lv!8sQh||7bp%9OGV^?rM(>JS?7cb+8Ui<;RiUC3> zkBz5Y#m9ZMTWHMG(!k24jTha9Fn%ty7-jbPMZP^YA@AVj))sn3ZzA}{yV|%`x-L2! z9}w%dTwY%r^uSYIr1ci)%bQ>5QhYUhq3>7z<|${tfQ*jA?X^%#ok}N>KX6I77b!U1gWHzmj}n3juv~#IT)<>J#@CP zE7#s$!XWR524Q#q+#=;~S0xLlc|n<&zlfxx^1lrwh1IZ^V`MrHfj|W)+-Narg968q z-rmLL$J-$9u>$OrU%5-)`hfz_z7t$GteHs=K%3qIu`*ED-o8H8yL_Qz(!?;$6$ZE} zUviMH>+;$6Nj1U8*7*}ARYl_XXgo+>1w|`IeOa2fQkA!S^wl5#HygG0!oU9fkE8&b zo{(@9JaoaxRUg4@LY~cCEQj@~^9_RUYxxt~%Ht_IlZNnzOQn9I@ptUM^|b zHhOG#OC_^DU;0a6rhoacyvpafkZBXSX?2X-mgM&#x-N|yXq53yQGY4@F}&~Sqy12 zR4@0WDGQGHhc?znhFXVkgUvZ=42)0F^Ltyt2mb!nS5|AB2WpG%paaf9m(2*;M#pk0 zo>VOEIM@q4Pja?yXry>ON_BbdoJd+3?U4`LZvHYp(FvhY+66akgqEa!j}LFv6p4Mln{%0Wb$ia@AQ5s+R&=p;xLDUlK& z1QMk8NDU#7d=v0I_da*sb?;sGeZRHd`+e`MB@6i{^PkzXXV0EJzx|u1kc#PSmuP9W zU?uf+-Fl?AORn7kgNT^wS-Xvdv-;>$^&9BW1;eDJye+@2mWNS=?Ijrwm}8tb$QVGj zMqc=$3g6hV+gXZ0_s6I(Lxlqa0&y<|6Wg}Fy-#tfH2Znv^Z_iy`DfX(`sDGFw?i{p>R~Ii_PPOjViKv;KOlp;z=wKd2DCk}wj+lqmeMSZD57GGT4J&=)ZsmAt z%k^c6d(Z+SjC1A}YOzWvqG@CE3x7sx;@g&d6+$;|p%9P~s%qh@25sj93SedSBY2^! zpECVcu}3!3wzhN8UIWJ#_o#akQqdjdi>m_-%j-T)MMbTwqPDQBnoA8n<3yhQS4uNT zbj`gzE0s7`I6qJl)jU8j3|x=kG47S+{!UJ-4!4x@6utlG#bg!gYp6N*5 z7-rCP!lT~D#+$IIJMo8=vdz24n3_QCXDdNgE0NahxZu*(nm7{5cD0xN&#UHGSIxC< ztAgIZ@7X`s(n#fmm{YC=<)ijd#yH(<@>0*_V5`1POc5E#MpkM&V5=Zj??+o-O|}1G zkMxU-906FzjZm_b+aI>y{R+q6ELnbQc+6)uyk@zPx3E@ zy?KyMmb8N9n^o537XT$Js_)9>;Kv;$A$xuj zX$@&MqlopLrd^)y8=A2*c^OD7Utu84acFWTqFHrtUq^0tNaOG)D}D3Le3C6mWJh2lLDJjvGD;t_g+(T{$P8k=E1TY1CK~{2i z-jZ6>jXVT2>t)6Y(_;`2%8(dDQmaKEHBncth&tn?b}l32S_8qnyS!-GwwSpB2}8eu zFfnOWb=TRJS-2FfyM?d;>yE}NU1ipq$t-D|6wQ1YPw>6ocGbn+6W)Nn0RjZ{B7N2g z`WjxX4=v+z8xQWz!wGRA!DKD_! z-c{?Ho}gBIQ4UU1TMK|Q=q2Z`zkm*v+vWPVDO@wc6e*QIUQ~WhEPuP<$V2dTXuBMN z5ca**Y3OLaU^uf`l$!-xUUn|7?1o5+w%yUa(C<^pKO)HnyD05G6#e)>BpTcgZuG6h zW_!B#+vR5?(!LqR%?ze2t@g&*3MFohSS5aPlX4tJ&nNfYKuG1vrP_84uC}PN;Zh?t zqDuQkMeKo5uH{S8?klPxYgirOEGzZ9cINknb|8+iVx!8c(->j*WMp*v#BclI53olP z)O+S3y{D}VV)>Oy@#3@%_3o|da?(m-sbGj3 zu9JCNX1$iLv$D!v!Fn-~Dr9J!tb4F?>3G;2qnUx|`j9xDPki8ZVMnsy)$%sxjnoKC#U8k6VPX1yN2Url4- z8?mgq<-c^YdnMhZ@(>x)LW4Uawd~EGZdnL)$y|U8$wDl%|H9~y6wh}1UD;Ak*VDGJ z?1tkW$c5}8J;gQUx@9jA>Mq2i{Qi!IA*Jy)CV^;ww+ zJB2^iwCXE(x}{m*zzjsnC1+4KIwLEYHI*+@x_nP>zU&v3uu0MrT*Q{`&NX%_xfgMO zxdQ%@YCqXQ89ZiAjnXPn240z6il%9~PSFp$J`m`IXm-xO2CQ{&eiGIQG+JrX` zLVakw-CMM)`*Klpli`e8jHAEJ4A4LG;;Xxf!tK_!GL;jCx`KIuAL_C$BB-K0#R*b23X0UH@Xxy7nEwebgRd<` zk^5QB6>O{UdmmNC$o;pzRa#0R?g01)WKjMw;Im-~<^3-JBdLFKefW_7Kz}^)-`BnWVX`>ikBVDhjLg|rXiwF~` z8Xr;tcw>pPO5Va3E)*;0+v-OJv`l2>NA8DS8V@abg>QICR+cN68P#`6_(IQv`R;V! zpE4-V)&l&?e^w$FvtY8ooctaAGM_i5vWgBqi!wU?bv_D$88)rvC~-5xfsLE{^_{l|+(>|Z9+1AvAS%#43Q$sx0?~1Vi zPiCVf*8q+Z`ssL<*ul@@5-sq$5VJ}9F2OtGbTAK!oHvTqffpSA(>F5%{;P!chouLJ z%A@5^wCHYOOe}|;?K<$B&%Lq5mLawc4|>_3OE=b1$y}Yqt0?z-QtCE*R{DH-HyhOo zE4vGBq1zHjw+>=fAlsHBB-^C zVJQrDCLgrP53K^smJf6S%9594)Y4k%@01b?f^8Sm)wpRA1~_}yzq!MGbW@X0dODRQ zc`yL_rNUGAS|bPys6+zkZ$uim+|U*-&30E`oU!rFeQ4qx70;MQ2Qd!I3y7-Us?Hb$~=2S66-Aa zQ^sbkkUb_Q-~C9cZ+A7^CE!x=&xmESVC?-B2Su#EJKI$S)(~6{taivvk+0@3c@I1F@-Co>^!hr1P~Lioo`PPrn|MB z2o?Yc;CFC))J^a~Dz>_IDZU=O|2XK^yKPxaO(U$@ni9h1&c-JD< zB6$KnfgN{O%ve)^WPmlS^|}ZE59Js^v-SZn761T~^4G;FI!x)YPm}rL;HbJF)Xl{u z!@}5DcD6?koL(ipxdD(~hGu{#uXpnLP^>UDuxqR+QXRmoJnOal$ifSnK2w<7QJOI7mgDYJE4f+Y9=*L;@wen^brlvu< zoBOnnFHgXFlnrX-gB1YnBg;S0K5DD;PZSju6|_2`is}r}iW}eV%^qe5>+_?MXSL|G zk9CM5)y9ECWB_|}kN9WWM`Oylo(w?ys79xK3_|}+`}oV-hb0NFIhh$=MEAa<3<4$R zm~xhH zG_dL?_|ff`dvsKqkgksXj|9@~dxX-DFnm)N(wDb@20H^HP7Seh zbcj&%*?3PwAjH_hC&&?JNnEO5o~~f>Ey`hr+e$^yvP@XAb!TU?X!KRaD?f2{T*m;V z@_P)+R6p6jsM3`Vz+-O&1euosnIl^LLGzqfy%N3VpX`rsn`UNa`n&Bs{e5NSUb|Mv zJ|CC`7VU^?na#^f@ZO+PfUh)_?O8A#V#b~bkcWW$a3wp(qC7Hjw;qfV9DAY?1UJO~ z0?5OTOn3wqq2Hy-M#r+X0o?2rz{Gj1wTD+!;b0IM&Wr=?zhCyaK}=5KD4gqG2p|D3 zwdXQ<7N*qiELlMIkB*Lxr=vc9i@v5qFmaPSnqb`nSo-Fk)ZeD#X|Djt6<~Xt(Jz3v zV|MG8TJq+>mOwcP&}~-A!(aw^x|+2G-tlxSpDLKICjcQKXls!%)pc;`FRY=lu`#d; z?BT+d8V^2KcuTJo|0IM|mG7wrv32ywj@p-#OKfA^Cm9;6AD2xlSJfGZo6&p4r+n~{ zWPr;o*Ip9(JXQ#rX#$>$S|ClM-_Q-&6Lvs82$()S^R)AVK8ZD~xYFUKB4e0T1yi6z z1_ktX0g@^EbUGDdo=v+Qye1WvoEi!kprK*HTlKn7Jt0r~N&5TFBmkHz9qsAt>N>dA zpXa&ybB>E0Dl&u@U4&X|81wzp2hXL2b_{@RBwvAHc>aZ}SLu3j9PlZEiN6kx^1khY z!@z{W?2Az5Km9RltoxmMZ~y}~Fy6bWyS{#FpZ!rpM1;PyTISm>{TpgWFc4fqUHM=| z)EEX#nL6L=U&9RALjDc4@TF@xv%b{m_LEmCW6+^^niFI2>RT6b^Z%ygb_x7m#(-!$rqtgF5EJbY2EZA z_j{$))rJ~p^s<8J#*yp9s&p3M*2L?sv!I@Bf5__uugy5ZRVUy;Q(!NH0*)Vp=>P!LAQn zL;j5Rt+#H{%0MXXpr6_Qz!T>EBPqh^Z#?1Ke}M7+s~@X8*Hy)DZVE{YSwkWY-e8xu zNV5Sh6EQXMW`qpnz=`gX9HOc#e=3xR|{_qx7GC)g0|Wdp&(cUW(EZFJ>HVSDAdXGqEhGohOU0e3gKM#7Pfl@Vkn zHDYGw2PKEBXQ8IiS?d~0!76nqJ&{@>d}ybzfS{PB=qqu|S9l~o*&k~kO@SaS!i`Ub zWHx-8-t-^(KqC8#?7$+OdMTq4Jw#rpCgN9u)&i}k)aeqEjo{0V%?d}WV}>jye;gwGaWowbvDvoVMI3DdPUdMglXypS!QX;ENrdDhkSbYKOf}vvQ=64@8q|6n&VdTrS?#Q`NR)x-Xp%}*xWzWK*8i;%av$A{<1yf+chAp@J)id~#+>+P1f_8>rt==#@+~M{h7J_&uVi zSvEho(l7G?p?9VAdj%|mxLT`hB^z2?cp(Q8vFaHyw6cO43+dk_y`hDcQxxkcp&VMM zX@&O}*`U4?fvbDhR~xpGERpwulz6~@gMUXdLOGnEiGPZW5Y>$0SME4Vj1V8@ZyFLJ znrc6^*P8QKnicfYRzBQv`&QSnMywgJNkK^O%5|>cFhbvyl1%va&V*mMb&PC&ERoRT z#HAr2OZ@9A+wJ(i05AM|z3A^#BpQOJ^CLQyU~dp0Y!Lf-3yYbf$(5|Y&Fnt}6F9O< zWi;ovIekJ+VzrJRM#TF&(gs0}Y{AM1va4X`E-& zCC+Rq*~kR&;>d(4Ot5)XIl6MmR~RNYF6nuEKuo9ESp!T#vJZx<;WH$J@4$ z!d2U0+aaO!zL&VL5O~i@bg@S$0)pII*0a16<@nh(-@$=eVI81tzlnr8!AKp1F~6L4 z4}OnH`z(st4M+UtUV_uoQM1L~x>D4IK;L^9>+NwbL(DMw)aLLBk7APH^=?8#v;#?V z!2gw^&d#2s_0C7Rkp=Pe5`=le^G%-4CH(oy#d``#1+A78=hV+RTd`hCMXh+B zY;=%>dAezxysG@&n@j_QF+^`+*jELEXfiqU zu-DM>)ji`EMCJs#fzqTaN8zu8BdL%7lF=ou2T@_`8D^Ol-LbjjkSuf{pkIULE89zK z8$;x~EeQ5zv2(=7uDi}KThST3+)>d@fCxRsGn%_Vu6w$4YqU(}Onj@C+RKmIIcE)~;srG&O}V ze?8)Q{dUMMSodJc{l5~}SlcKp1mY(IPb(?hg!_ri>fUw~@% z?6A<>;0FZ;=_F2G#Oc*tZh6N#j^Lz{**0Qa)}z=SyW=Y{WbOF*2G1odJIrZm6IZ%8 z%uv$F*b}P=AoSmNB%!lSUN;tD)4wfFUSI-@G|k3PG@X1Q3Wiedse>^?sWi{ z;7{^6fR%#Uz+nHwyC8UllX~1fomsjz7ybm=yIxb_qWcs zcXsgF?Zb%C`q;Ts90iwtq#Py7P5Vdyn%W0zzKh(I6BK=$T-J#@J10}@FyrrXf?-8I zDVn}oiX3o}^FjTjI?9-&HIa6^L1GGq;t53EQBWRw(NGtAC;-gMnC%ec3u3&ogtWlw zUhhv)P*iE}q>dTRlxElY3`QDK#ZK;B6DvftEtd65`ZaWVKzi$rc;_?&!>_?n)*nX+ z@LEwpI9GFor?j}gspLPee7#~2bykith=rP6|Hj?rR6&G0@>ZYAT81Y)9JM#FaSOCJ zy6khZqweU8(^Al2z#{op$%R1L0@v1oE*S3LDttwxDhTiWmkD`yE>l!%SRs-6s;dVN_^>LeNyX*v|bQO4%{@ty7H(YKEVJSJytYHQRTLmg51m2HdS#;;P5i(}(8 z9D5{eMVAxg)0WXzkSoA(pu&lFV^CsVh8$&8@e5OaViV= z3GoTrJnUdo{qQZybNLULZ9y3u*R2l}?oXS8ekZj-8bR$&4V07jBQ_!YLT$25_0Spk zl7R0-+~n^~!sk9k?orzTH*8y7joeNrBPMr8IPLHpQ_*+BQ}!ZTabp!@1~!s7d9=k} z_i?CNbUB9IOk$=}dJDVjUrA65yrGPi#Y0%_lL!UhV)YdIN~WpOq& zK~E|P8SYvtp|jSaKV;5R*zyaoPegnw%Q(W-J5eJX$Cjhtz2?+9q*)Rs9NdOsH_zIX zz8{9sq88_3K-^qzWRJ>-?f`FJ4MWHiC?SnDHOmUp1+rz>4#s$A?na8erY>d!7e9WXxSBc;8wTSqWJY~l;bhH|_| zGS5}O1nXU2dGnK_&!YdJkgEEOiB6~N6r?gMAyqfmfjP!J5sXtV`hj=cEoG9O_w6Ed zY-&K{oQkY~48Gq|qEW`m&^taP>{#?eGFmToYfnOkm$6Pqx?#o-zBi z`aacUMmKr3j+?YOOiM58OO^T}la*XS5S_)_h}apF{iv*q#{g-WiPpP*3kKKY0KLI2 zK66r=oAYV5whHHy zSIm1va1i#7slnc1-e#R5>mIPpqSK{jj(!r)g?I~iaYybSX~8raeX%2m7q%9^%2>TV+-GlH#wc7`7fIVDor%fOg{*9Q-2`qx=RF@fZd`>NJhB%V=OwLf`$Y;F{XR{zXzAh zQx?2?qzM5b&`CL>HEiCf|3$|VJidiWao#!IR))^;o+4qh=B@;hs!dzV)Lt- zE^f#BDt#f#?&;@A-oISyrasa$K|2Rhc1aVIW)DN?Dh<%!Pj~ar4b!JYDXKQ8q9Rtg$>3)+G5=h}}=P(VfXz^;NdLM2G4R`~_-=^@|; z-6os93_m)z5-|A*L|PaEScSwPAi$#g1|-F-)$sK9UpaHCNs)=+Tbf~%kqzuw zK+f%`{~Zyc{~htB5p6Mo;NJ&_$ZD#RJ~-f8=?IMfc~EC@5Mw_$(R=v65YGA!8%uA6 zX6Vjz<$SK<6N3X8!Mj0l4V(@Hb~_#X!Lx0X=KlXywCz7^-hUjsQ$N&INZK8gN;{-7 zrLIJ-RMRL6rBak8w#UEhW}&F6?Ti~@%k7M(z%@jV&c@$!n>YW8kSl$@?#3f;t4wHCQ486N*J0wcVj{fKaDV^f5)SnTz?xu64wNMq*+ zG(2viWQ6EM4XdlAH%#&?%6{7!FU_ERv5z!XyjtxPI;*gHPiafxhzRVgt?2Gx z@`RJ5mpa20V%HmnPz4i0b11PV%<;zjWLBD2S>X^RWE-}d=r?qH@tCw=LQH#t5_)3Q zO`g@86E(%r_3TbuylZCX+4gw^ z!_u%Qk5(Ueqt(YhMoUYcI#ybYuC|H>dCKw8476x+;*ty+ulO!LP4VfsaB=haebfC% zhO#{?d!ElQ=)CB}vJ#caWnJoe=8B=ueBDY+Rqzk{sx(WDlECE%!kppkq9}Dy*nO*G zIpmzBF(A=M!sS@d+G4jSX60Bd@}5vbVHDq05-h-?-Lv8PZmu_~3Zv3At4PMgZMqIg zQV14NsNe0a7(OAWn0Wqc`smWk6RBlh@5Q7FtNEPeH?9(UN=DPXmd znc6HR$E&rMbe?JUn$gx6FO62oG*jo=`NsZQ*;aR97%cfLxT;PA4x>zye7n>meuR3j z(9kFG7LI^Yx_>OcYD$)y=>1)3^U>C4QzctRSrL+{6?VrhYe1Z$kqjNdOEK_68`hJ`;ex%lz zox_EZ1gY)7f{XcTKP8XZXGTX-GEnZ2@1l#V+L17D zAKj4|ZNZ0xzU+4y#M$4x1&|wKzLlrlituCJ!Fi>qsUE$VKLpdDlJSB0F%6S8lk7AXWuWt@Lrsgi?^kC?~~H}9C&B2g4mzm)871ZuxGvHF&w zZa!FvD}2W%3yw9BLWnQ!v~m-jF@gyKN5VREnomJZq^RrLsrLTM5f*NXqL=(TGl9KBR+SiL7%p{ygCEn#BR{)#?k}u4?;IptjR4phv3e`)kheh z!gO;DkY@l7v)dTZn2tjp@!ngFJJXU-HA)_`W_L8R*s};Nw=r|0Ma9>qCFYU%VU8u| zQ&N?8ckLx7m=$}mC5E+)FqG%!rkfXE*D=SOmBA{}V_nZ4#l_dNg)FLbcVEx3OJMlw zJm4okR+?n&H^+DNt7`)-xBPqaA-J`vJ0pYF69A6^ASpAr|9dfe1h?w~PnI$k6K-x5~chlPT z>&2nFp+~zC28Q}pVY*_Gu3K`7RIJJfO~v6Qi$IoY>tw^n zZTZuA$Bc~$xX`9p=PqVIZ^b2Y;-=+Fb`!Y{Eid(Btnivql2g+8v{zU&t!e$`j+?x$ z!4&OXhxohxRn)KMv^L)luoCKgmF&=_&-Y_+d(`A*|CJ&0d54$cW>e*;BVHVmJOoFN z)m(rUKT!@AHg&fxNaf-pYQ1b(vDXy|IBdGgLx2#UNWYzXT7&peIqgT+96a@N{HgS* zg+yWXqpQAqlY{}ci#MkmYI>SDAg7fMMKoZ37 zTi-_x&$2xhmyJH}Zux21}(rm2*q|Gj=8$0F_4t<`jO`&hDGc;CQoZj9ZK#{e$*Qw)#-S%axUs2R( znor;`Hmt>*AzgI6 z@#<1nEmNW`ht`gDS>vl{+oXv_)LvQKG{HC+HN&1F)7&-7Z|Cyu*LkRczLg?rC)ap)>wMc# zvLxcjOm0YOA9pE%mh2X7;%f2|9lR!F#f-XcXLukFPK-$x)4_PTjPK zUgsq{ zT0AEpHobbt`LDlS{Y>@oDMNlTG$iOzcZz|lz_ufnP5L=PTC_jh$wz~hmN_p zwasAZ#UU9>|r-h+Rn(3Wz_Y?HV^aMTorEas;7L^{>;1rIH=A+~bwE4R%;t>%m!o}4=)v|<=I!+3;& zT@*lCECs-3qRIsTpAsI{c+0p35SXiSLI5UaT+HDO0O5f0{|Ou_9V9|8@BXV$+N8r! zBa5AmD!s#V3%q-qTC$4o7~2^e{a&=cms&l>AO;!u4{PMLTQAmj{D}hIk2)M|u~ts* zH^Xv2Hj(=GH4gvPM*X|=6aT?(m5%ty2$^@nZRn7Az!-yKSy^)s6)6E6g`hOn=N+}l zQvI3X_@9ECX4JB^9j9_(fX@%aF&Zyb2W&ba#2}Z|l<5e(pL~}uMpt3zZsxu{xt7E;I0A zZU5R{`qMuE2jiRbUrJ_kg%asR8{@y8vq1|>24DTBH_xb*C9HPQSur9fSDL&O4j!@y z2Fv$2(Ahcxf^f5`Z~;?bK*tL4l_bFc?5~#R0AY3<0N-r?=E!qrWGsnT_1pp$Ae)q8^92+F5l##(@06NU)U}g3F4=a5IH8r|Kz+F50 zHy>vOSnd1)1ojxbqxIvXjsd3reT-dE5=_8fz6XTg0Nv&u-&6taI$OaMxM#Wgqv1`V zR|f0rwHwoV9RGyns~E{Cnw@n#23UPmuSwG0v}P{au88hmv}^_7wb#L-mJM*$NU_D1 zpB-%zh!!sZbee!sgUm)py#0NNbTUuC-1~rYsZVR%C)+@(Lzwu;$cP}md&KX@XGYOK zJN^w%W`Ts9QKpvxAPAUzvtrlHqt66K|0K42b8rQ=D*y;)F!>_D<<^(xT?d0^C&15$ zh=_nv`fcZpR{+(g6dj_uFkIGdR)hg>WU1IuGwein0%Rqey@KU{b@LjyBm52+@uU@} z2fi1k>_F;@t2qEsv){R(qnH8H6ra7vtWa^GVjnwu;$J=-+v5X)B#T-Mo&&Rj5fCOO zCSaS@tSXUb5AGg!1t0wr)JtkS!dz(2yt!MimhGXeBgVjRN~rafk#LPZG&t|PXI}&z zjOh+Qq9CjLZ4&^>W<8pF5(I$=$)U{60P`CA?U&p-r$BB1A?6aiX-D}2MDfBB9Sz<1 z39vPS09*De*}yK%Oc#s?oE75j>A9W)x_anw#5&Wp=fYhf8toH%{|Y>tb&JPu{HZ6l zncTEAvJUXSUO-h8wY#9_d!qNQqz`R$j_+d_-vXM;->j7Lgs)#Owq+h#UoU4}&RhB` zkPjHP=yO6j&@wT~PB^oopWwf`E*|vYD; z3+XK#=qmR763?ud-ZQcf9Y+Oh#!k0QUHBKL0`Tv9o_gNBHQiUhil_;B3*Z+l3X)*3 zle0`rOzoQqVjOg>qJTTrzJSP#c5{rj8vlTowFn-YYSeLZWk~ywBDg$0TmQzau|T?3 z3|+eWsA_;pE-<*HFg9IVQ{%r3X4pt00Ai;I85E~$M66qP^GB?&(!sM2ukg$Y~E(_b87m@yIJtN%5k&d{VdSMhAiUkP+v`)&D<+w|>%s2g6! z@|X`$1t8xJSG1mBA_6F6wB}cUn7_IiTZ%v=S~uzduyal4@3ZhGa;hSNPYVp?L&b?z z7P_%S*89(fUHl5h1(lis1qgLH&7Pth>%sE_hZyWEP|y4?P+T6PVZn3nBYTcU(hVWN z#IK6GgZ)vE|9a1%n-%(-1l{`<^9T(9uSaVRv%y(?K=u{%BGA-g+Y`ymwpj%VE0zxy z=m2N6uUw~rFdKKz#>b}sfaG*%n(rCt5p~}xJ7)Wr+!`~86^|mJ!XpeCxn8piv7AzI zrymLsVwZaqe>8$ruh(>Obt$fH|Mf3T#{$r~PUs2#Eul}d18`Q#=F)0n;kOWl!0tAH z^KSd61cjXTs`_b>_HsYrSVBsxHR2Z1;=R111j!@B(Q!T<$&y3IPh0Lpwa35sN8`%w zcQ-xeUpC&TX+95<1>RdK%I)VPsK_fG`^ONptpI|}Jkf|->0ln)sR_~vHUlKnhyMBp z!R9vfA5Bhy4p3KSX6!^b%{nwUNOk?^foGkK{I%S089O{LU%#?~A=4>xp{IZ5Y3vKy z)4@lsv`bkBSMO^Vk8w8S`Iuo20O0)#zsZ+Z3n@sJ&!cnPkFKr#B-`9(p$}2gVfN_3 zC={LpOwfCDygY;QC3@CKHz3NRmoBy;+QD!8|NZa&u;fj&L@KOW7rE?oQ2yrcq+?y3 zu0h?pzhauWRJHf(hI#{$Vg8RhS^19-4gUMM;D7I=Lj4fKsYydg;n|!L`{uSo7C48~ z;rB}l5QFaa5WG+k38gp{5O=zCR2h;H9#UzB092dEPV@XCmufj5MToJ*PUiL2ad1Tf zL%#|a5@pOa3b+G5I6(veIKA;TNaKdPFj(puz~85EX7YST6db9~83_RVZzUoC{u%hR z;?tnLLhE;9h#WvW&K$odai;h}27!17bs6Lg#Y+c;s+^A`t$8Gq{AGX-eSlVG5KGk& z9~n!h$Xt{=I=#Wu8Jtwqs(ENbBgUYXY3uHi>2Y>aZTnRhgAt@LY`SlKUQQM?uS?c_ zdqWLp0-(p@K!Remy04ZEUh(J)KF*YD3>dPu4{R$MF(!%muJJs_3}cHw?4_0)#eYt1 zKEwcS@&p8j61DBK3}TxA{mYP5(5Cr)`!#!f;+*-HLi^^{;YEI{8iaFM+kg`!1uhO% zVd{5mz&KYc$+D{F8pp@CTK0JiF`8DgG^^tLb2?@FX7grevWWs_mY-&3vgZL&U!!or zbLskI)wb`H8ZbL_R5^1PCX-HvHH=V|B&Hw+fx-sPM5WEN(sFb_L2pBRm@DFwlUWi@ ztEjyeqL9SvRpV|L)uYIguaH#q=WJyx#q}}pk5sr672N$q`Lp1>mDcrV8l~xz9(clG zt1_%vRT$KWE%*G&H+4#V4vQQj`(-zCToyJd05TTaw0qNi<0aegnX!1^_Q9-mN*+k0 zTffZ}vXS3wE?2@5KJ@%ZSl(|y7Ly51!e%de&oMPF7QKflOQwr<6}0*}J(BnUuYnqK zb(1-wXf`m9T+t|t`m8%w+&*4=^=Vw|cKt&rQqJz4gsdE|65#skDpv+1iFYJqA+y}f z4A473|6>Hl6#PS%#^zH}fA{C(T#5RAjtRq=X1?C}Ftw*?&xO!^&zM4qd6 zoukNX8=oo}KG(G@uGs2rkcCPRhi>c@wfk)>9B<%}>nI0l7MA8On(O3Zq~(K$3Toz+ ztHn=;UCaTy1;FL{Ld54w##t!@yJ^`ws?(_wmW?+^f)}F)aL%D@6F&gZ+iC~gwRDj_ z!zXp(dyr+<|FAz)pR8=P6US~2z;((RxdEgel+VlzjRl4 zzDPRFkQ2vu@sMTxVEOlZWd?q`qRV5CeERiy7{^g(XEf|H>%&ve-;xnoe`$0l<@}Yx zupeTtq`EpaM%en!|LXIrei)2V`n^zN;k%HOq?=duugYC3n;6!g2zwk%kWnj46KcKi zhf1x|Sl~L5+&vY3v{D4W9l|{=Vk4dMCuuC6OuZQ0eWS zKR}d$!SLwV6Yh{fZ}Z418;`lxkeJ!)N5YI*p@$khGR~Y>U(>*9R@NI@;zFzxh%A_w z9#Un5F6^vckcDBWQfNYNAhA6e6Ym~=hw9cRqmwE1ZUf}9>5a<<^?@4MBCUhYK7Y<= zd6XS&!IMhZk~z=jNM#qYMnkmn2+`B?rjtme$k~WZz}#D^7Vuqmx7OR5OCS?*e)3&; z^|^4wUl6SLILpMeo|~OsNFMgd+zqZ^ zvh5rp*}AX|u9I#qUK^*rE%))`xWHhUSbc`JE6X$piHIBoYm>e*_15D6yZN3^xSk}7 zlbh)4IGxIQjXaX{3@){N41ScPgl>C_UR`=t+ZyaUv2%{l-;=&0_lODy4&3SD<^p$a zd{lHiBxTwb)bpI}se>c@gYN0pH!fLK+B?Hp;E}IW5MK66Rh}KenTjm6-0*AzL)k}o>eWyKDF%j53CZ7O zgObp}+{hQ2>BL}0hD2~mB|g6*&AiM&~Z9dCHk zLL7jp9$z#4x8$V%8ohMg+wm3{yhT5PfFwbc4yO@c;W2m)+41M7N zT0oKxOw!cCp z?gXsF+k)LAC}FY-8iiLQG;f-|Y%Dy{whql$j>F0lY+3i8eyVtc3mM52peza;8GreU z-2<`I_QG$SU8OPmj~|N8KDW_0(`8Y~br_y=zYlius!6Yc6PQh)JPfW=Sr!(@WeVKn zn(RB?m1<4Y7k$}$BV@b!pz_UV7^=oDWOj(pa{Y9r<~XH*NuE5ft5ytRWJ-#FzhTmzj?UkU<4$`OX%?H|OyI;m2*nM@MY|8F|M^ z`ws9lv5*k`=wZ@^jdVS$5`N?$V)ssVt&;M1Z>B?6J$z|1oFU1Xq{m;YQndd9hI|M= zq?0xjN)NXEXGJkyrG(htY#?`_q^21@BsJB;Ggt*Dm4Mxq$ID`GHW|yRus>g$?w&@0 zFX{1{94k|-XQffwapTcvq2eZ_Pv=pm`GS5O|Qwkcnm#iUlnlB@&{FveAf99lF@IK+~Ohv z(el~ze-;w@uT6KdLd&Yhq;Rj>COw;PYTn;@ojcxipb@E8h^k4Xb7^dEVjsKAtI9_; zxG_Id`xW{Kzf$13#fXxSlcwm1QrxyA`@T~QqpQl2&RB0JKdsnjWeqd_s$$!DI$lb7 zd8%V%2usq`f0JA_B#(DUso3l&gbok;InLwV|r2vnfOteg0TsnN#y{DM}Uq2E&`&Mr> zHmsA~!iy^=DYAsvW;|N77jZDJgtd5LHkI$BT#FIZvTVcbZI4-PZHm>Z$gM8A+jC`J zt%6YB9vF> @@ -19,22 +19,27 @@ allocated in each location. If the number of nodes in each location is unbalanced and there are a lot of replicas, replica shards might be left unassigned. +TIP: Learn more about <>. + [[enabling-awareness]] ===== Enabling shard allocation awareness To enable shard allocation awareness: -. Specify the location of each node with a custom node attribute. For example, -if you want Elasticsearch to distribute shards across different racks, you might -set an awareness attribute called `rack_id` in each node's `elasticsearch.yml` -config file. +. Specify the location of each node with a custom node attribute. For example, +if you want Elasticsearch to distribute shards across different racks, you might +use an awareness attribute called `rack_id`. ++ +You can set custom attributes in two ways: + +- By editing the `elasticsearch.yml` config file: + [source,yaml] -------------------------------------------------------- node.attr.rack_id: rack_one -------------------------------------------------------- + -You can also set custom attributes when you start a node: +- Using the `-E` command line argument when you start a node: + [source,sh] -------------------------------------------------------- @@ -56,17 +61,33 @@ cluster.routing.allocation.awareness.attributes: rack_id <1> + You can also use the <> API to set or update -a cluster's awareness attributes. +a cluster's awareness attributes: ++ +[source,console] +-------------------------------------------------- +PUT /_cluster/settings +{ + "persistent" : { + "cluster.routing.allocation.awareness.attributes" : "rack_id" + } +} +-------------------------------------------------- With this example configuration, if you start two nodes with `node.attr.rack_id` set to `rack_one` and create an index with 5 primary shards and 1 replica of each primary, all primaries and replicas are -allocated across the two nodes. +allocated across the two node. + +.All primaries and replicas allocated across two nodes in the same rack +image::images/shard-allocation/shard-allocation-awareness-one-rack.png[All primaries and replicas are allocated across two nodes in the same rack] If you add two nodes with `node.attr.rack_id` set to `rack_two`, {es} moves shards to the new nodes, ensuring (if possible) that no two copies of the same shard are in the same rack. +.Primaries and replicas allocated across four nodes in two racks, with no two copies of the same shard in the same rack +image::images/shard-allocation/shard-allocation-awareness-two-racks.png[Primaries and replicas are allocated across four nodes in two racks with no two copies of the same shard in the same rack] + If `rack_two` fails and takes down both its nodes, by default {es} allocates the lost shard copies to nodes in `rack_one`. To prevent multiple copies of a particular shard from being allocated in the same location, you can From 91f9cf7290786cbb3e4634cca1a69834f61bf66a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 8 May 2024 13:03:13 -0400 Subject: [PATCH 065/117] ESQL: Move a few more test out of IT_test_only (#108377) This moves the "skip" logic from our IT_test_only suffix into a new feature - this one is historical `esql.enrich_load`. This feature is not supported by `CsvTests` but is supported across all tests. --- ...g-IT_tests_only.csv-spec => blog.csv-spec} | 3 + .../resources/enrich-IT_tests_only.csv-spec | 350 --------------- .../src/main/resources/enrich.csv-spec | 409 +++++++++++++++++- .../xpack/esql/plugin/EsqlFeatures.java | 9 +- .../elasticsearch/xpack/esql/CsvTests.java | 1 + 5 files changed, 419 insertions(+), 353 deletions(-) rename x-pack/plugin/esql/qa/testFixtures/src/main/resources/{blog-IT_tests_only.csv-spec => blog.csv-spec} (88%) delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec similarity index 88% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec index 6ddc9601db4ac..64c4641b2ca01 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec @@ -1,4 +1,7 @@ +# Examples that were published in a blog post + 2023-08-08.full-blown-query +required_feature: esql.enrich_load FROM employees | WHERE still_hired == true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec deleted file mode 100644 index 367fbf044deed..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ /dev/null @@ -1,350 +0,0 @@ -simple -row language_code = "1" -| enrich languages_policy -; - -language_code:keyword | language_name:keyword -1 | English -; - - -enrichOn -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; - -emp_no:integer | language_name:keyword -10001 | French -; - - -enrichOn2 -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; - -emp_no:integer | language_name:keyword -10001 | French -; - -simpleSortLimit -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; - -emp_no:integer | language_name:keyword -10001 | French -; - - -with -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 -| enrich languages_policy on x with language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -withAlias -from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasSort -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasOverwriteName#[skip:-8.13.0] -from employees | sort emp_no -| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name -| keep emp_no | limit 1 -; - -emp_no:keyword -French -; - - -withAliasAndPlain -from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, language_name; - -emp_no:integer | x:keyword | lang:keyword | language_name:keyword -10100 | 4 | German | German -10099 | 2 | French | French -10098 | 4 | German | German -; - - -withTwoAliasesSameProp -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, lang2 = language_name; - -emp_no:integer | x:keyword | lang:keyword | lang2:keyword -10001 | 2 | French | French -; - - -redundantWith -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -nullInput -from employees | where emp_no == 10017 | keep emp_no, gender -| enrich languages_policy on gender with language_name, language_name; - -emp_no:integer | gender:keyword | language_name:keyword -10017 | null | null -; - - -constantNullInput -from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10020 | null | null -; - - -multipleEnrich -row a = "1", b = "2", c = "10" -| enrich languages_policy on a with a_lang = language_name -| enrich languages_policy on b with b_lang = language_name -| enrich languages_policy on c with c_lang = language_name; - -a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword -1 | 2 | 10 | English | French | null -; - - -enrichEval -from employees | eval x = to_string(languages) -| enrich languages_policy on x with lang = language_name -| eval language = concat(x, "-", lang) -| keep emp_no, x, lang, language -| sort emp_no desc | limit 3; - -emp_no:integer | x:keyword | lang:keyword | language:keyword -10100 | 4 | German | 4-German -10099 | 2 | French | 2-French -10098 | 4 | German | 4-German -; - - -multivalue -required_feature: esql.mv_sort -row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); - -a:keyword | a_lang:keyword -["1", "2"] | ["English", "French"] -; - - -enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env -| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) -| KEEP client_ip, count_env, max_env -| SORT client_ip -; - -client_ip:ip | count_env:i | max_env:keyword -172.21.0.5 | 1 | Development -172.21.2.113 | 2 | QA -172.21.2.162 | 2 | QA -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -; - - -enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr -| KEEP client_ip, env, client_cidr -| SORT client_ip -; - -client_ip:ip | env:keyword | client_cidr:ip_range -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.0.5 | Development | 172.21.0.0/16 -172.21.2.113 | [Development, QA] | 172.21.2.0/24 -172.21.2.162 | [Development, QA] | 172.21.2.0/24 -; - - -enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date > "1960-01-01" -| EVAL birth_year = DATE_EXTRACT("year", birth_date) -| EVAL age = 2022 - birth_year -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group, birth_year -| KEEP birth_year, age_group, count -| SORT birth_year DESC -; - -birth_year:long | age_group:keyword | count:long -1965 | Middle-aged | 1 -1964 | Middle-aged | 4 -1963 | Middle-aged | 7 -1962 | Senior | 6 -1961 | Senior | 8 -1960 | Senior | 8 -; - - -enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date IS NOT NULL -| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group -| SORT count DESC -; - -count:long | age_group:keyword -78 | Senior -12 | Middle-aged -; - - -enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH heights_policy ON height WITH height_group = description -| STATS count=count(height_group), min=min(height), max=max(height) BY height_group -| KEEP height_group, min, max, count -| SORT min ASC -; - -height_group:k | min:double | max:double | count:long -Very Short | 1.41 | 1.48 | 9 -Short | 1.5 | 1.59 | 20 -Medium Height | 1.61 | 1.79 | 26 -Tall | 1.8 | 1.99 | 25 -Very Tall | 2.0 | 2.1 | 20 -; - - -enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description -| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description -| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description -| KEEP birth_decade, hire_decade, birth_description, hire_description, count -| SORT birth_decade DESC, hire_decade DESC -; - -birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long -null | 1990 | null | Nineties Nostalgia | 6 -null | 1980 | null | Radical Eighties | 4 -1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 -1960 | 1980 | Swinging Sixties | Radical Eighties | 21 -1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 -1950 | 1980 | Nifty Fifties | Radical Eighties | 34 -; - - -spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_names ON city WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.mv_warn - -FROM airports -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) -; -warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. -warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value - -city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer -POINT(1.396561 24.127649) | 872 | 88 | 1044 -; - - -spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*) BY airport_in_city -| SORT count ASC -; - -count:long | airport_in_city:boolean -114 | null -396 | true -455 | false -; - - -spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city -| SORT count ASC -; - -count:long | centroid:geo_point | airport_in_city:boolean -114 | POINT (-24.750062 31.575549) | null -396 | POINT (-2.534797 20.667712) | true -455 | POINT (3.090752 27.676442) | false -; - - -spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "IDR" -| ENRICH city_airports ON name WITH city_name = city, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length -; - -abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i -IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index f5847260bbb16..e84e79748c179 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -1,10 +1,10 @@ -simple +simpleNoLoad from employees | eval x = 1, y = to_string(languages) | enrich languages_policy on y | where x > 1 | keep emp_no, language_name | limit 1; emp_no:integer | language_name:keyword ; -docsGettingStartedEnrich +docsGettingStartedEnrichNoLoad // tag::gs-enrich[] FROM sample_data | KEEP @timestamp, client_ip, event_duration @@ -30,3 +30,408 @@ FROM sample_data median_duration:double | env:keyword ; + +simple +required_feature: esql.enrich_load + +row language_code = "1" +| enrich languages_policy +; + +language_code:keyword | language_name:keyword +1 | English +; + + +enrichOn +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; + +emp_no:integer | language_name:keyword +10001 | French +; + + +enrichOn2 +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; + +emp_no:integer | language_name:keyword +10001 | French +; + + +simpleSortLimit +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; + +emp_no:integer | language_name:keyword +10001 | French +; + +with +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 +| enrich languages_policy on x with language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +withAlias +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + + +withAliasSort +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + + +withAliasOverwriteName#[skip:-8.13.0] +required_feature: esql.enrich_load + +from employees | sort emp_no +| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name +| keep emp_no | limit 1 +; + +emp_no:keyword +French +; + +withAliasAndPlain +required_feature: esql.enrich_load + +from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, language_name; + +emp_no:integer | x:keyword | lang:keyword | language_name:keyword +10100 | 4 | German | German +10099 | 2 | French | French +10098 | 4 | German | German +; + + +withTwoAliasesSameProp +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, lang2 = language_name; + +emp_no:integer | x:keyword | lang:keyword | lang2:keyword +10001 | 2 | French | French +; + + +redundantWith +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +nullInput +required_feature: esql.enrich_load + +from employees | where emp_no == 10017 | keep emp_no, gender +| enrich languages_policy on gender with language_name, language_name; + +emp_no:integer | gender:keyword | language_name:keyword +10017 | null | null +; + + +constantNullInput +required_feature: esql.enrich_load + +from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10020 | null | null +; + + +multipleEnrich +required_feature: esql.enrich_load + +row a = "1", b = "2", c = "10" +| enrich languages_policy on a with a_lang = language_name +| enrich languages_policy on b with b_lang = language_name +| enrich languages_policy on c with c_lang = language_name; + +a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword +1 | 2 | 10 | English | French | null +; + + +enrichEval +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) +| enrich languages_policy on x with lang = language_name +| eval language = concat(x, "-", lang) +| keep emp_no, x, lang, language +| sort emp_no desc | limit 3; + +emp_no:integer | x:keyword | lang:keyword | language:keyword +10100 | 4 | German | 4-German +10099 | 2 | French | 2-French +10098 | 4 | German | 4-German +; + + +multivalue +required_feature: esql.enrich_load +required_feature: esql.mv_sort + +row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); + +a:keyword | a_lang:keyword +["1", "2"] | ["English", "French"] +; + + +enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] +required_feature: esql.enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env +| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) +| KEEP client_ip, count_env, max_env +| SORT client_ip +; + +client_ip:ip | count_env:i | max_env:keyword +172.21.0.5 | 1 | Development +172.21.2.113 | 2 | QA +172.21.2.162 | 2 | QA +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +; + + +enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] +required_feature: esql.enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr +| KEEP client_ip, env, client_cidr +| SORT client_ip +; + +client_ip:ip | env:keyword | client_cidr:ip_range +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.0.5 | Development | 172.21.0.0/16 +172.21.2.113 | [Development, QA] | 172.21.2.0/24 +172.21.2.162 | [Development, QA] | 172.21.2.0/24 +; + + +enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| WHERE birth_date > "1960-01-01" +| EVAL birth_year = DATE_EXTRACT("year", birth_date) +| EVAL age = 2022 - birth_year +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group, birth_year +| KEEP birth_year, age_group, count +| SORT birth_year DESC +; + +birth_year:long | age_group:keyword | count:long +1965 | Middle-aged | 1 +1964 | Middle-aged | 4 +1963 | Middle-aged | 7 +1962 | Senior | 6 +1961 | Senior | 8 +1960 | Senior | 8 +; + + +enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| WHERE birth_date IS NOT NULL +| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group +| SORT count DESC +; + +count:long | age_group:keyword +78 | Senior +12 | Middle-aged +; + + +enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| ENRICH heights_policy ON height WITH height_group = description +| STATS count=count(height_group), min=min(height), max=max(height) BY height_group +| KEEP height_group, min, max, count +| SORT min ASC +; + +height_group:k | min:double | max:double | count:long +Very Short | 1.41 | 1.48 | 9 +Short | 1.5 | 1.59 | 20 +Medium Height | 1.61 | 1.79 | 26 +Tall | 1.8 | 1.99 | 25 +Very Tall | 2.0 | 2.1 | 20 +; + + +enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description +| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description +| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description +| KEEP birth_decade, hire_decade, birth_description, hire_description, count +| SORT birth_decade DESC, hire_decade DESC +; + +birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long +null | 1990 | null | Nineties Nostalgia | 6 +null | 1980 | null | Radical Eighties | 4 +1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 +1960 | 1980 | Swinging Sixties | Radical Eighties | 21 +1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 +1950 | 1980 | Nifty Fifties | Radical Eighties | 34 +; + + +spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_names ON city WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load +required_feature: esql.mv_warn + +FROM airports +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) +; +warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value + +city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer +POINT(1.396561 24.127649) | 872 | 88 | 1044 +; + + +spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_feature: esql.enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*) BY airport_in_city +| SORT count ASC +; + +count:long | airport_in_city:boolean +114 | null +396 | true +455 | false +; + + +spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_feature: esql.enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city +| SORT count ASC +; + +count:long | centroid:geo_point | airport_in_city:boolean +114 | POINT (-24.750062 31.575549) | null +396 | POINT (-2.534797 20.667712) | true +455 | POINT (3.090752 27.676442) | false +; + + +spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "IDR" +| ENRICH city_airports ON name WITH city_name = city, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length +; + +abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i +IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 059eec771efe8..4f852264193b4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -136,6 +136,12 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); + /** * Support for timespan units abbreviations */ @@ -174,7 +180,8 @@ public Map getHistoricalFeatures() { Map.entry(MV_WARN, Version.V_8_12_0), Map.entry(SPATIAL_POINTS, Version.V_8_12_0), Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0) + Map.entry(POW_DOUBLE, Version.V_8_12_0), + Map.entry(ENRICH_LOAD, Version.V_8_12_0) ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index c865b21723a9e..3539138e670eb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -224,6 +224,7 @@ public final void test() throws Throwable { * are tested in integration tests. */ assumeFalse("metadata fields aren't supported", testCase.requiredFeatures.contains(EsqlFeatures.METADATA_FIELDS.id())); + assumeFalse("enrich can't load fields in csv tests", testCase.requiredFeatures.contains(EsqlFeatures.ENRICH_LOAD.id())); doTest(); } catch (Throwable th) { throw reworkException(th); From 87df295817c95c09e0df27fa4da32346b83850b7 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Wed, 8 May 2024 13:34:56 -0400 Subject: [PATCH 066/117] Brief document blurb about RestClient (#107863) --- docs/internal/DistributedArchitectureGuide.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index b8fb92b1ea15d..7f10a1b3a8cae 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -133,6 +133,14 @@ are only used for internode operations/communications. ### Work Queues +### RestClient + +The `RestClient` is primarily used in testing, to send requests against cluster nodes in the same format as would users. There +are some uses of `RestClient`, via `RestClientBuilder`, in the production code. For example, remote reindex leverages the +`RestClient` internally as the REST client to the remote elasticsearch cluster, and to take advantage of the compatibility of +`RestClient` requests with much older elasticsearch versions. The `RestClient` is also used externally by the `Java API Client` +to communicate with Elasticsearch. + # Cluster Coordination (Sketch of important classes? Might inform more sections to add for details.) From 5a622b0a0719b050f6485fad9385135322d3a720 Mon Sep 17 00:00:00 2001 From: Andrew Wilkins Date: Thu, 9 May 2024 02:06:59 +0800 Subject: [PATCH 067/117] nativeaccess: try to load all located libsystemds (#108238) Linux systems with multiarch (e.g. i386 & x86_64) libraries may have libsystemd.0 in two subdirectories of an entry in java.library.path. For example, libsystemd.so.0 may be found in both /usr/lib/i386-linux-gnu and /usr/lib/x86_64-linux-gnu. Instead of attempting to load any library found, attempt all and stop as soon as one is successfully loaded. --- docs/changelog/108238.yaml | 6 ++ .../nativeaccess/jdk/JdkSystemdLibrary.java | 61 +++++++++++++------ 2 files changed, 47 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/108238.yaml diff --git a/docs/changelog/108238.yaml b/docs/changelog/108238.yaml new file mode 100644 index 0000000000000..607979c2eb0ac --- /dev/null +++ b/docs/changelog/108238.yaml @@ -0,0 +1,6 @@ +pr: 108238 +summary: "Nativeaccess: try to load all located libsystemds" +area: Infra/Core +type: bug +issues: + - 107878 diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java index 5313984ac6d61..0af87154960ad 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java @@ -17,7 +17,10 @@ import java.lang.foreign.MemorySegment; import java.lang.invoke.MethodHandle; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_INT; @@ -26,31 +29,49 @@ class JdkSystemdLibrary implements SystemdLibrary { static { - System.load(findLibSystemd()); - } - - // On some systems libsystemd does not have a non-versioned symlink. System.loadLibrary only knows how to find - // non-versioned library files. So we must manually check the library path to find what we need. - static String findLibSystemd() { - final String libsystemd = "libsystemd.so.0"; - String libpath = System.getProperty("java.library.path"); - for (String basepathStr : libpath.split(":")) { - var basepath = Paths.get(basepathStr); - if (Files.exists(basepath) == false) { - continue; + // Find and load libsystemd. We attempt all instances of + // libsystemd in case of multiarch systems, and stop when + // one is successfully loaded. If none can be loaded, + // UnsatisfiedLinkError will be thrown. + List paths = findLibSystemd(); + if (paths.isEmpty()) { + String libpath = System.getProperty("java.library.path"); + throw new UnsatisfiedLinkError("Could not find libsystemd in java.library.path: " + libpath); + } + UnsatisfiedLinkError last = null; + for (String path : paths) { + try { + System.load(path); + last = null; + break; + } catch (UnsatisfiedLinkError e) { + last = e; } - try (var stream = Files.walk(basepath)) { + } + if (last != null) { + throw last; + } + } - var foundpath = stream.filter(Files::isDirectory).map(p -> p.resolve(libsystemd)).filter(Files::exists).findAny(); - if (foundpath.isPresent()) { - return foundpath.get().toAbsolutePath().toString(); - } + // findLibSystemd returns a list of paths to instances of libsystemd + // found within java.library.path. + static List findLibSystemd() { + // Note: on some systems libsystemd does not have a non-versioned symlink. + // System.loadLibrary only knows how to find non-versioned library files, + // so we must manually check the library path to find what we need. + final Path libsystemd = Paths.get("libsystemd.so.0"); + final String libpath = System.getProperty("java.library.path"); + return Arrays.stream(libpath.split(":")).map(Paths::get).filter(Files::exists).flatMap(p -> { + try { + return Files.find( + p, + Integer.MAX_VALUE, + (fp, attrs) -> (attrs.isDirectory() == false && fp.getFileName().equals(libsystemd)) + ); } catch (IOException e) { throw new UncheckedIOException(e); } - - } - throw new UnsatisfiedLinkError("Could not find " + libsystemd + " in java.library.path: " + libpath); + }).map(p -> p.toAbsolutePath().toString()).toList(); } private static final MethodHandle sd_notify$mh = downcallHandle("sd_notify", FunctionDescriptor.of(JAVA_INT, JAVA_INT, ADDRESS)); From ab40808044f582ac76dd859aa2ba30cc9b6bc790 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 8 May 2024 11:32:33 -0700 Subject: [PATCH 068/117] Exchange should wait for remote sinks (#108337) Today, we do not wait for remote sinks to stop before completing the main request. While this doesn't affect correctness, it's important that we do not spawn child requests after the parent request is completed. Closes #105859 --- .../exchange/ExchangeSourceHandler.java | 24 +++++++++++++++---- .../exchange/ExchangeServiceTests.java | 16 ++++++++++++- .../xpack/esql/plugin/ComputeService.java | 3 +++ 3 files changed, 38 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index f1698ea401d28..adce8d8a88407 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.compute.data.Page; @@ -17,6 +18,7 @@ import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.transport.TransportException; +import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -89,6 +91,20 @@ public int bufferSize() { } } + public void addCompletionListener(ActionListener listener) { + buffer.addCompletionListener(ActionListener.running(() -> { + try (RefCountingListener refs = new RefCountingListener(listener)) { + for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { + // Create an outstanding instance and then finish to complete the completionListener + // if we haven't registered any instances of exchange sinks or exchange sources before. + pending.trackNewInstance(); + pending.completion.addListener(refs.acquire()); + pending.finishInstance(); + } + } + })); + } + /** * Create a new {@link ExchangeSource} for exchanging data * @@ -253,10 +269,10 @@ public Releasable addEmptySink() { private static class PendingInstances { private final AtomicInteger instances = new AtomicInteger(); - private final Releasable onComplete; + private final SubscribableListener completion = new SubscribableListener<>(); - PendingInstances(Releasable onComplete) { - this.onComplete = onComplete; + PendingInstances(Runnable onComplete) { + completion.addListener(ActionListener.running(onComplete)); } void trackNewInstance() { @@ -268,7 +284,7 @@ void finishInstance() { int refs = instances.decrementAndGet(); assert refs >= 0; if (refs == 0) { - onComplete.close(); + completion.onResponse(null); } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index bdaa045633dc0..51332b3c8997a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -55,6 +55,7 @@ import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; @@ -94,6 +95,8 @@ public void testBasic() throws Exception { ExchangeSink sink1 = sinkExchanger.createExchangeSink(); ExchangeSink sink2 = sinkExchanger.createExchangeSink(); ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletion = new PlainActionFuture<>(); + sourceExchanger.addCompletionListener(sourceCompletion); ExchangeSource source = sourceExchanger.createExchangeSource(); sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, 1); SubscribableListener waitForReading = source.waitForReading(); @@ -133,7 +136,9 @@ public void testBasic() throws Exception { sink2.finish(); assertTrue(sink2.isFinished()); assertTrue(source.isFinished()); + assertFalse(sourceCompletion.isDone()); source.finish(); + sourceCompletion.actionGet(10, TimeUnit.SECONDS); ESTestCase.terminate(threadPool); for (Page page : pages) { page.releaseBlocks(); @@ -320,7 +325,9 @@ protected void start(Driver driver, ActionListener listener) { public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + sourceExchanger.addCompletionListener(sourceCompletionFuture); List sinkHandlers = new ArrayList<>(); Supplier exchangeSink = () -> { final ExchangeSinkHandler sinkHandler; @@ -336,6 +343,7 @@ public void testConcurrentWithHandlers() { final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } public void testEarlyTerminate() { @@ -358,7 +366,7 @@ public void testEarlyTerminate() { assertTrue(sink.isFinished()); } - public void testConcurrentWithTransportActions() throws Exception { + public void testConcurrentWithTransportActions() { MockTransportService node0 = newTransportService(); ExchangeService exchange0 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange0.registerTransportHandler(node0); @@ -371,12 +379,15 @@ public void testConcurrentWithTransportActions() throws Exception { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } @@ -427,6 +438,8 @@ public void sendResponse(TransportResponse transportResponse) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); Transport.Connection connection = node0.getConnection(node1.getLocalDiscoNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); @@ -438,6 +451,7 @@ public void sendResponse(TransportResponse transportResponse) { assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); sinkHandler.onFailure(new RuntimeException(cause)); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 7b38197dde95a..d9005d5997b34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -205,6 +205,7 @@ public void execute( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new Result(collectedPages, collectedProfiles))) ) { // run compute on the coordinator + exchangeSource.addCompletionListener(refs.acquire()); runCompute( rootTask, new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), @@ -722,6 +723,7 @@ private void runComputeOnDataNode( var externalSink = exchangeService.getSinkHandler(externalId); task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); + exchangeSource.addCompletionListener(refs.acquire()); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); ActionListener reductionListener = cancelOnFailure(task, cancelled, refs.acquire()); runCompute( @@ -854,6 +856,7 @@ void runComputeOnRemoteCluster( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new ComputeResponse(collectedProfiles))) ) { exchangeSink.addCompletionListener(refs.acquire()); + exchangeSource.addCompletionListener(refs.acquire()); PhysicalPlan coordinatorPlan = new ExchangeSinkExec( plan.source(), plan.output(), From ef8b6107aa2d3b29ebadef40f4a1a81e9fef0553 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 8 May 2024 14:07:25 -0600 Subject: [PATCH 069/117] Move kibana reporting data stream settings into component template (#107581) Previously these were contained in the index template, however, Kibana needs to be able to make overrides to only the settings, so factoring these out would allow them to do this (in such a way that they can be overridden by the `kibana-reporting@custom` component template as well). Relates to #97765 --- .../main/resources/kibana-reporting@settings.json | 14 ++++++++++++++ .../main/resources/kibana-reporting@template.json | 6 +----- .../xpack/stack/StackTemplateRegistry.java | 8 ++++++++ .../xpack/stack/StackTemplateRegistryTests.java | 5 +++++ 4 files changed, 28 insertions(+), 5 deletions(-) create mode 100644 x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json new file mode 100644 index 0000000000000..933d7681c92e8 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json @@ -0,0 +1,14 @@ +{ + "template": { + "settings": { + "number_of_shards": 1, + "auto_expand_replicas": "0-1" + } + }, + "_meta": { + "description": "default kibana reporting settings installed by elasticsearch", + "managed": true + }, + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json index 9c4da646c3399..240ad36199fe3 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json @@ -5,14 +5,10 @@ "hidden": true }, "allow_auto_create": true, - "composed_of": ["kibana-reporting@custom"], + "composed_of": ["kibana-reporting@settings", "kibana-reporting@custom"], "ignore_missing_component_templates": ["kibana-reporting@custom"], "template": { "lifecycle": {}, - "settings": { - "number_of_shards": 1, - "auto_expand_replicas": "0-1" - }, "mappings": { "properties": { "meta": { diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 4fdb2d05c5326..30323a1d7d363 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -107,6 +107,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // Kibana reporting template /////////////////////////////////// public static final String KIBANA_REPORTING_INDEX_TEMPLATE_NAME = ".kibana-reporting"; + public static final String KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME = "kibana-reporting@settings"; public StackTemplateRegistry( Settings nodeSettings, @@ -229,6 +230,13 @@ protected List getLifecyclePolicies() { REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE, ADDITIONAL_TEMPLATE_VARIABLES + ), + new IndexTemplateConfig( + KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + "/kibana-reporting@settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ) )) { try { diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 782fe3b41ae3b..abb2d5765b128 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -429,6 +429,7 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { versions.put(StackTemplateRegistry.METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); + versions.put(StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); ClusterChangedEvent sameVersionEvent = createClusterChangedEvent(versions, nodes); client.setVerifier((action, request, listener) -> { if (action instanceof PutComponentTemplateAction) { @@ -484,6 +485,10 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) ); + versions.put( + StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) + ); ClusterChangedEvent higherVersionEvent = createClusterChangedEvent(versions, nodes); registry.clusterChanged(higherVersionEvent); } From 5b9dd3dda04820589df495ed68605d2fd7b2caf2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 8 May 2024 14:02:26 -0700 Subject: [PATCH 070/117] Mock empty state task in file settings tests (#108100) When the file watched by file settings is initially missing, a special method in reserved state service is called to write a dummy cluster state entry. In the case of tests, there is no real running master service, so when the task is submitted, the file watcher thread actually barfs and the watcher dies, silently. That then causes the test to timeout as it waits indefinitely but the file watcher is no longer watching for the test file that was written. This commit mocks out writing this empty state in the reserved state service. It also collapses the two tests that check stopping while blocked in processing works since they were almost exactly the same. closes #106968 --- .../service/FileSettingsServiceTests.java | 58 +++---------------- 1 file changed, 7 insertions(+), 51 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 53ca55f8a5f81..aca5d2cbee2c9 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.reservedstate.service; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -55,7 +55,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public class FileSettingsServiceTests extends ESTestCase { private Environment env; private ClusterService clusterService; @@ -234,54 +233,11 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); }).when(spiedController).parse(any(String.class), any()); - service.start(); - service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); - assertTrue(service.watching()); - - Files.createDirectories(service.watchedFileDir()); - - // Make some fake settings file to cause the file settings service to process it - writeTestFile(service.watchedFile(), "{}"); - - // we need to wait a bit, on MacOS it may take up to 10 seconds for the Java watcher service to notice the file, - // on Linux is instantaneous. Windows is instantaneous too. - assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - - // Stopping the service should interrupt the watcher thread, we should be able to stop - service.stop(); - assertFalse(service.watching()); - service.close(); - // let the deadlocked thread end, so we can cleanly exit the test - deadThreadLatch.countDown(); - } - - public void testStopWorksIfProcessingDidntReturnYet() throws Exception { - var spiedController = spy(controller); - var service = new FileSettingsService(clusterService, spiedController, env); - - CountDownLatch processFileLatch = new CountDownLatch(1); - CountDownLatch deadThreadLatch = new CountDownLatch(1); - - doAnswer((Answer) invocation -> { - // allow the other thread to continue, but hold on a bit to avoid - // completing the task immediately in the main watcher loop - try { - Thread.sleep(1_000); - } catch (InterruptedException e) { - // pass it on - Thread.currentThread().interrupt(); - } - processFileLatch.countDown(); - new Thread(() -> { - // Simulate a thread that never allows the completion to complete - try { - deadThreadLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); - }).when(spiedController).parse(any(String.class), any()); + doAnswer((Answer) invocation -> { + var completionListener = invocation.getArgument(1, ActionListener.class); + completionListener.onResponse(null); + return null; + }).when(spiedController).initEmpty(any(String.class), any()); service.start(); service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); @@ -296,7 +252,7 @@ public void testStopWorksIfProcessingDidntReturnYet() throws Exception { // on Linux is instantaneous. Windows is instantaneous too. assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - // Stopping the service should interrupt the watcher thread, allowing the whole thing to exit + // Stopping the service should interrupt the watcher thread, we should be able to stop service.stop(); assertFalse(service.watching()); service.close(); From 90b238e9d80ef39bd9786df40655f08c58bb5c0a Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 8 May 2024 15:28:25 -0600 Subject: [PATCH 071/117] Mention alias filters don't apply for get-by-id in docs (#108433) Resolves #3861 --- docs/reference/alias.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index e5c2db65778d8..9d784f530d63c 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -358,6 +358,8 @@ POST _aliases ---- // TEST[s/^/PUT my-index-2099.05.06-000001\n/] +NOTE: Filters are only applied when using the <>, and are not applied when <>. + [discrete] [[alias-routing]] === Routing From 31abf3e00c119bd807d69574c9fb18dba4b6d9bb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 8 May 2024 15:38:38 -0700 Subject: [PATCH 072/117] Make MockLogAppender threadsafe (#108206) Adding and removing appenders in Log4j is not threadsafe. Yet some tests rely on capturing logging by adding an in memory appender, MockLogAppender. This commit makes the mock logging threadsafe by creating a new, singular appender for mock logging that delegates, in a threadsafe way, to the existing appenders created. Confusingly MockLogAppender is no longer really an appender, but I'm leaving clarifying that for a followup so as to limit the scope of this PR. closes #106425 --- .../bootstrap/SpawnerNoBootstrapTests.java | 1 + .../common/settings/SettingsFilterTests.java | 1 - .../org/elasticsearch/test/ESTestCase.java | 1 + .../elasticsearch/test/MockLogAppender.java | 84 +++++++++++++------ .../test/MockLogAppenderTests.java | 38 +++++++++ ...LoadAuthorizedIndicesTimeCheckerTests.java | 2 - 6 files changed, 100 insertions(+), 27 deletions(-) create mode 100644 test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 81b3a086e9aca..c4aa3c9b1f1e6 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -64,6 +64,7 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { static { // normally done by ESTestCase, but need here because spawner depends on logging LogConfigurator.loadLog4jPlugins(); + MockLogAppender.init(); } static class ExpectedStreamMessage implements MockLogAppender.LoggingExpectation { diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 4885bbc277cb4..8e62a9306a3d4 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -119,7 +119,6 @@ private void assertExpectedLogMessages(Consumer consumer, MockLogAppende Logger testLogger = LogManager.getLogger("org.elasticsearch.test"); MockLogAppender appender = new MockLogAppender(); try (var ignored = appender.capturing("org.elasticsearch.test")) { - appender.start(); Arrays.stream(expectations).forEach(appender::addExpectation); consumer.accept(testLogger); appender.assertAllExpectationsMatched(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 804dbfbb2dc47..83f7fdfe386c7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -260,6 +260,7 @@ public static void resetPortCounter() { // TODO: consolidate logging initialization for tests so it all occurs in logconfigurator LogConfigurator.loadLog4jPlugins(); LogConfigurator.configureESLogging(); + MockLogAppender.init(); final List testAppenders = new ArrayList<>(3); for (String leakLoggerName : Arrays.asList("io.netty.util.ResourceLeakDetector", LeakTracker.class.getName())) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 10a3a8a78e483..bc3723119afa9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.Property; @@ -19,9 +18,10 @@ import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.equalTo; @@ -31,12 +31,38 @@ /** * Test appender that can be used to verify that certain events were logged correctly */ -public class MockLogAppender extends AbstractAppender { +public class MockLogAppender { + private static final Map> mockAppenders = new ConcurrentHashMap<>(); + private static final RealMockAppender parent = new RealMockAppender(); private final List expectations; + private volatile boolean isAlive = true; + + private static class RealMockAppender extends AbstractAppender { + + RealMockAppender() { + super("mock", null, null, false, Property.EMPTY_ARRAY); + } + + @Override + public void append(LogEvent event) { + List appenders = mockAppenders.get(event.getLoggerName()); + if (appenders == null) { + // check if there is a root appender + appenders = mockAppenders.getOrDefault("", List.of()); + } + for (MockLogAppender appender : appenders) { + if (appender.isAlive == false) { + continue; + } + for (LoggingExpectation expectation : appender.expectations) { + expectation.match(event); + } + } + } + } public MockLogAppender() { - super("mock", null, null, false, Property.EMPTY_ARRAY); /* * We use a copy-on-write array list since log messages could be appended while we are setting up expectations. When that occurs, * we would run into a concurrent modification exception from the iteration over the expectations in #append, concurrent with a @@ -45,15 +71,16 @@ public MockLogAppender() { expectations = new CopyOnWriteArrayList<>(); } - public void addExpectation(LoggingExpectation expectation) { - expectations.add(new WrappedLoggingExpectation(expectation)); + /** + * Initialize the mock log appender with the log4j system. + */ + public static void init() { + parent.start(); + Loggers.addAppender(LogManager.getLogger(""), parent); } - @Override - public void append(LogEvent event) { - for (LoggingExpectation expectation : expectations) { - expectation.match(event); - } + public void addExpectation(LoggingExpectation expectation) { + expectations.add(new WrappedLoggingExpectation(expectation)); } public void assertAllExpectationsMatched() { @@ -213,7 +240,7 @@ public void assertMatched() { */ private static class WrappedLoggingExpectation implements LoggingExpectation { - private final AtomicBoolean assertMatchedCalled = new AtomicBoolean(false); + private volatile boolean assertMatchedCalled = false; private final LoggingExpectation delegate; private WrappedLoggingExpectation(LoggingExpectation delegate) { @@ -230,7 +257,7 @@ public void assertMatched() { try { delegate.assertMatched(); } finally { - assertMatchedCalled.set(true); + assertMatchedCalled = true; } } @@ -243,34 +270,43 @@ public String toString() { /** * Adds the list of class loggers to this {@link MockLogAppender}. * - * Stops ({@link #stop()}) and runs some checks on the {@link MockLogAppender} once the returned object is released. + * Stops and runs some checks on the {@link MockLogAppender} once the returned object is released. */ public Releasable capturing(Class... classes) { - return appendToLoggers(Arrays.stream(classes).map(LogManager::getLogger).toList()); + return appendToLoggers(Arrays.stream(classes).map(Class::getCanonicalName).toList()); } /** * Same as above except takes string class names of each logger. */ public Releasable capturing(String... names) { - return appendToLoggers(Arrays.stream(names).map(LogManager::getLogger).toList()); + return appendToLoggers(Arrays.asList(names)); } - private Releasable appendToLoggers(List loggers) { - start(); - for (final var logger : loggers) { - Loggers.addAppender(logger, this); + private Releasable appendToLoggers(List loggers) { + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + if (v == null) { + v = new CopyOnWriteArrayList<>(); + } + v.add(this); + return v; + }); } return () -> { - for (final var logger : loggers) { - Loggers.removeAppender(logger, this); + isAlive = false; + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + assert v != null; + v.remove(this); + return v.isEmpty() ? null : v; + }); } - stop(); // check that all expectations have been evaluated before this is released for (WrappedLoggingExpectation expectation : expectations) { assertThat( "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, - expectation.assertMatchedCalled.get(), + expectation.assertMatchedCalled, is(true) ); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java new file mode 100644 index 0000000000000..4973bb83311bc --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.concurrent.atomic.AtomicBoolean; + +public class MockLogAppenderTests extends ESTestCase { + + public void testConcurrentLogAndLifecycle() throws Exception { + Logger logger = LogManager.getLogger(MockLogAppenderTests.class); + final var keepGoing = new AtomicBoolean(true); + final var logThread = new Thread(() -> { + while (keepGoing.get()) { + logger.info("test"); + } + }); + logThread.start(); + + final var appender = new MockLogAppender(); + for (int i = 0; i < 1000; i++) { + try (var ignored = appender.capturing(MockLogAppenderTests.class)) { + Thread.yield(); + } + } + + keepGoing.set(false); + logThread.join(); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java index e06f6f212c687..8295f028588cc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -195,7 +194,6 @@ private void testLogging( ); final MockLogAppender mockAppender = new MockLogAppender(); try (var ignored = mockAppender.capturing(timerLogger.getName())) { - Loggers.addAppender(timerLogger, mockAppender); mockAppender.addExpectation(expectation); checker.accept(List.of()); mockAppender.assertAllExpectationsMatched(); From e56ed71ef8beb973c039e52ed2790c8dc119af22 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 8 May 2024 17:13:46 -0700 Subject: [PATCH 073/117] ESQL: Disable quoting in FROM command (#108431) Disable location quoting in FROM command before 8.14 release to allow more time to discuss options --- docs/changelog/108431.yaml | 5 + .../esql/src/main/antlr/EsqlBaseLexer.g4 | 4 - .../esql/src/main/antlr/EsqlBaseParser.g4 | 1 - .../xpack/esql/parser/EsqlBaseLexer.interp | 3 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1426 ++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 2 +- .../xpack/esql/parser/EsqlBaseParser.java | 144 +- .../xpack/esql/parser/IdentifierBuilder.java | 2 +- .../esql/parser/StatementParserTests.java | 14 +- 9 files changed, 793 insertions(+), 808 deletions(-) create mode 100644 docs/changelog/108431.yaml diff --git a/docs/changelog/108431.yaml b/docs/changelog/108431.yaml new file mode 100644 index 0000000000000..84607b1b99ac3 --- /dev/null +++ b/docs/changelog/108431.yaml @@ -0,0 +1,5 @@ +pr: 108431 +summary: "ESQL: Disable quoting in FROM command" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index c4a3dc7c56615..f16afa86199f9 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -201,10 +201,6 @@ FROM_UNQUOTED_IDENTIFIER : FROM_UNQUOTED_IDENTIFIER_PART+ ; -FROM_QUOTED_IDENTIFIER - : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) - ; - FROM_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 62dcc6ebd484b..e30bc83595942 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -109,7 +109,6 @@ fromCommand fromIdentifier : FROM_UNQUOTED_IDENTIFIER - | QUOTED_IDENTIFIER ; fromOptions diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 866093ef55a6c..d6ad79586fa79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -319,7 +319,6 @@ OPTIONS METADATA FROM_UNQUOTED_IDENTIFIER_PART FROM_UNQUOTED_IDENTIFIER -FROM_QUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -405,4 +404,4 @@ META_MODE SETTING_MODE atn: -[4, 0, 110, 1203, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 484, 8, 18, 11, 18, 12, 18, 485, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 494, 8, 19, 10, 19, 12, 19, 497, 9, 19, 1, 19, 3, 19, 500, 8, 19, 1, 19, 3, 19, 503, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 512, 8, 20, 10, 20, 12, 20, 515, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 523, 8, 21, 11, 21, 12, 21, 524, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 566, 8, 32, 1, 32, 4, 32, 569, 8, 32, 11, 32, 12, 32, 570, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 580, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 587, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 592, 8, 38, 10, 38, 12, 38, 595, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 603, 8, 38, 10, 38, 12, 38, 606, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 613, 8, 38, 1, 38, 3, 38, 616, 8, 38, 3, 38, 618, 8, 38, 1, 39, 4, 39, 621, 8, 39, 11, 39, 12, 39, 622, 1, 40, 4, 40, 626, 8, 40, 11, 40, 12, 40, 627, 1, 40, 1, 40, 5, 40, 632, 8, 40, 10, 40, 12, 40, 635, 9, 40, 1, 40, 1, 40, 4, 40, 639, 8, 40, 11, 40, 12, 40, 640, 1, 40, 4, 40, 644, 8, 40, 11, 40, 12, 40, 645, 1, 40, 1, 40, 5, 40, 650, 8, 40, 10, 40, 12, 40, 653, 9, 40, 3, 40, 655, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 661, 8, 40, 11, 40, 12, 40, 662, 1, 40, 1, 40, 3, 40, 667, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 798, 8, 78, 10, 78, 12, 78, 801, 9, 78, 1, 78, 1, 78, 3, 78, 805, 8, 78, 1, 78, 4, 78, 808, 8, 78, 11, 78, 12, 78, 809, 3, 78, 812, 8, 78, 1, 79, 1, 79, 4, 79, 816, 8, 79, 11, 79, 12, 79, 817, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 881, 8, 92, 1, 93, 4, 93, 884, 8, 93, 11, 93, 12, 93, 885, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 3, 101, 921, 8, 101, 1, 102, 1, 102, 3, 102, 925, 8, 102, 1, 102, 5, 102, 928, 8, 102, 10, 102, 12, 102, 931, 9, 102, 1, 102, 1, 102, 3, 102, 935, 8, 102, 1, 102, 4, 102, 938, 8, 102, 11, 102, 12, 102, 939, 3, 102, 942, 8, 102, 1, 103, 1, 103, 4, 103, 946, 8, 103, 11, 103, 12, 103, 947, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 121, 4, 121, 1023, 8, 121, 11, 121, 12, 121, 1024, 1, 121, 1, 121, 3, 121, 1029, 8, 121, 1, 121, 4, 121, 1032, 8, 121, 11, 121, 12, 121, 1033, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 4, 156, 1188, 8, 156, 11, 156, 12, 156, 1189, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 2, 513, 604, 0, 160, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 0, 202, 75, 204, 76, 206, 77, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 78, 220, 79, 222, 80, 224, 81, 226, 0, 228, 0, 230, 0, 232, 0, 234, 82, 236, 0, 238, 83, 240, 84, 242, 85, 244, 0, 246, 0, 248, 86, 250, 87, 252, 0, 254, 88, 256, 0, 258, 0, 260, 89, 262, 90, 264, 91, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 92, 282, 93, 284, 94, 286, 0, 288, 0, 290, 0, 292, 0, 294, 95, 296, 96, 298, 97, 300, 0, 302, 98, 304, 99, 306, 100, 308, 101, 310, 0, 312, 102, 314, 103, 316, 104, 318, 105, 320, 0, 322, 106, 324, 107, 326, 108, 328, 109, 330, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1230, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 12, 332, 1, 0, 0, 0, 14, 342, 1, 0, 0, 0, 16, 349, 1, 0, 0, 0, 18, 358, 1, 0, 0, 0, 20, 365, 1, 0, 0, 0, 22, 375, 1, 0, 0, 0, 24, 382, 1, 0, 0, 0, 26, 389, 1, 0, 0, 0, 28, 403, 1, 0, 0, 0, 30, 410, 1, 0, 0, 0, 32, 418, 1, 0, 0, 0, 34, 425, 1, 0, 0, 0, 36, 437, 1, 0, 0, 0, 38, 446, 1, 0, 0, 0, 40, 452, 1, 0, 0, 0, 42, 459, 1, 0, 0, 0, 44, 466, 1, 0, 0, 0, 46, 474, 1, 0, 0, 0, 48, 483, 1, 0, 0, 0, 50, 489, 1, 0, 0, 0, 52, 506, 1, 0, 0, 0, 54, 522, 1, 0, 0, 0, 56, 528, 1, 0, 0, 0, 58, 533, 1, 0, 0, 0, 60, 538, 1, 0, 0, 0, 62, 542, 1, 0, 0, 0, 64, 546, 1, 0, 0, 0, 66, 550, 1, 0, 0, 0, 68, 554, 1, 0, 0, 0, 70, 556, 1, 0, 0, 0, 72, 558, 1, 0, 0, 0, 74, 561, 1, 0, 0, 0, 76, 563, 1, 0, 0, 0, 78, 572, 1, 0, 0, 0, 80, 574, 1, 0, 0, 0, 82, 579, 1, 0, 0, 0, 84, 581, 1, 0, 0, 0, 86, 586, 1, 0, 0, 0, 88, 617, 1, 0, 0, 0, 90, 620, 1, 0, 0, 0, 92, 666, 1, 0, 0, 0, 94, 668, 1, 0, 0, 0, 96, 671, 1, 0, 0, 0, 98, 675, 1, 0, 0, 0, 100, 679, 1, 0, 0, 0, 102, 681, 1, 0, 0, 0, 104, 684, 1, 0, 0, 0, 106, 686, 1, 0, 0, 0, 108, 691, 1, 0, 0, 0, 110, 693, 1, 0, 0, 0, 112, 699, 1, 0, 0, 0, 114, 705, 1, 0, 0, 0, 116, 710, 1, 0, 0, 0, 118, 712, 1, 0, 0, 0, 120, 715, 1, 0, 0, 0, 122, 718, 1, 0, 0, 0, 124, 723, 1, 0, 0, 0, 126, 727, 1, 0, 0, 0, 128, 732, 1, 0, 0, 0, 130, 738, 1, 0, 0, 0, 132, 741, 1, 0, 0, 0, 134, 743, 1, 0, 0, 0, 136, 749, 1, 0, 0, 0, 138, 751, 1, 0, 0, 0, 140, 756, 1, 0, 0, 0, 142, 759, 1, 0, 0, 0, 144, 762, 1, 0, 0, 0, 146, 765, 1, 0, 0, 0, 148, 767, 1, 0, 0, 0, 150, 770, 1, 0, 0, 0, 152, 772, 1, 0, 0, 0, 154, 775, 1, 0, 0, 0, 156, 777, 1, 0, 0, 0, 158, 779, 1, 0, 0, 0, 160, 781, 1, 0, 0, 0, 162, 783, 1, 0, 0, 0, 164, 785, 1, 0, 0, 0, 166, 790, 1, 0, 0, 0, 168, 811, 1, 0, 0, 0, 170, 813, 1, 0, 0, 0, 172, 821, 1, 0, 0, 0, 174, 823, 1, 0, 0, 0, 176, 827, 1, 0, 0, 0, 178, 831, 1, 0, 0, 0, 180, 835, 1, 0, 0, 0, 182, 840, 1, 0, 0, 0, 184, 844, 1, 0, 0, 0, 186, 848, 1, 0, 0, 0, 188, 852, 1, 0, 0, 0, 190, 856, 1, 0, 0, 0, 192, 860, 1, 0, 0, 0, 194, 868, 1, 0, 0, 0, 196, 880, 1, 0, 0, 0, 198, 883, 1, 0, 0, 0, 200, 887, 1, 0, 0, 0, 202, 891, 1, 0, 0, 0, 204, 895, 1, 0, 0, 0, 206, 899, 1, 0, 0, 0, 208, 903, 1, 0, 0, 0, 210, 908, 1, 0, 0, 0, 212, 912, 1, 0, 0, 0, 214, 920, 1, 0, 0, 0, 216, 941, 1, 0, 0, 0, 218, 945, 1, 0, 0, 0, 220, 949, 1, 0, 0, 0, 222, 953, 1, 0, 0, 0, 224, 957, 1, 0, 0, 0, 226, 961, 1, 0, 0, 0, 228, 966, 1, 0, 0, 0, 230, 970, 1, 0, 0, 0, 232, 974, 1, 0, 0, 0, 234, 978, 1, 0, 0, 0, 236, 981, 1, 0, 0, 0, 238, 985, 1, 0, 0, 0, 240, 989, 1, 0, 0, 0, 242, 993, 1, 0, 0, 0, 244, 997, 1, 0, 0, 0, 246, 1002, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1012, 1, 0, 0, 0, 252, 1019, 1, 0, 0, 0, 254, 1028, 1, 0, 0, 0, 256, 1035, 1, 0, 0, 0, 258, 1039, 1, 0, 0, 0, 260, 1043, 1, 0, 0, 0, 262, 1047, 1, 0, 0, 0, 264, 1051, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1061, 1, 0, 0, 0, 270, 1065, 1, 0, 0, 0, 272, 1069, 1, 0, 0, 0, 274, 1073, 1, 0, 0, 0, 276, 1077, 1, 0, 0, 0, 278, 1081, 1, 0, 0, 0, 280, 1085, 1, 0, 0, 0, 282, 1089, 1, 0, 0, 0, 284, 1093, 1, 0, 0, 0, 286, 1097, 1, 0, 0, 0, 288, 1102, 1, 0, 0, 0, 290, 1106, 1, 0, 0, 0, 292, 1110, 1, 0, 0, 0, 294, 1114, 1, 0, 0, 0, 296, 1118, 1, 0, 0, 0, 298, 1122, 1, 0, 0, 0, 300, 1126, 1, 0, 0, 0, 302, 1131, 1, 0, 0, 0, 304, 1136, 1, 0, 0, 0, 306, 1140, 1, 0, 0, 0, 308, 1144, 1, 0, 0, 0, 310, 1148, 1, 0, 0, 0, 312, 1153, 1, 0, 0, 0, 314, 1163, 1, 0, 0, 0, 316, 1167, 1, 0, 0, 0, 318, 1171, 1, 0, 0, 0, 320, 1175, 1, 0, 0, 0, 322, 1180, 1, 0, 0, 0, 324, 1187, 1, 0, 0, 0, 326, 1191, 1, 0, 0, 0, 328, 1195, 1, 0, 0, 0, 330, 1199, 1, 0, 0, 0, 332, 333, 5, 100, 0, 0, 333, 334, 5, 105, 0, 0, 334, 335, 5, 115, 0, 0, 335, 336, 5, 115, 0, 0, 336, 337, 5, 101, 0, 0, 337, 338, 5, 99, 0, 0, 338, 339, 5, 116, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 6, 0, 0, 0, 341, 13, 1, 0, 0, 0, 342, 343, 5, 100, 0, 0, 343, 344, 5, 114, 0, 0, 344, 345, 5, 111, 0, 0, 345, 346, 5, 112, 0, 0, 346, 347, 1, 0, 0, 0, 347, 348, 6, 1, 1, 0, 348, 15, 1, 0, 0, 0, 349, 350, 5, 101, 0, 0, 350, 351, 5, 110, 0, 0, 351, 352, 5, 114, 0, 0, 352, 353, 5, 105, 0, 0, 353, 354, 5, 99, 0, 0, 354, 355, 5, 104, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 6, 2, 2, 0, 357, 17, 1, 0, 0, 0, 358, 359, 5, 101, 0, 0, 359, 360, 5, 118, 0, 0, 360, 361, 5, 97, 0, 0, 361, 362, 5, 108, 0, 0, 362, 363, 1, 0, 0, 0, 363, 364, 6, 3, 0, 0, 364, 19, 1, 0, 0, 0, 365, 366, 5, 101, 0, 0, 366, 367, 5, 120, 0, 0, 367, 368, 5, 112, 0, 0, 368, 369, 5, 108, 0, 0, 369, 370, 5, 97, 0, 0, 370, 371, 5, 105, 0, 0, 371, 372, 5, 110, 0, 0, 372, 373, 1, 0, 0, 0, 373, 374, 6, 4, 3, 0, 374, 21, 1, 0, 0, 0, 375, 376, 5, 102, 0, 0, 376, 377, 5, 114, 0, 0, 377, 378, 5, 111, 0, 0, 378, 379, 5, 109, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 5, 4, 0, 381, 23, 1, 0, 0, 0, 382, 383, 5, 103, 0, 0, 383, 384, 5, 114, 0, 0, 384, 385, 5, 111, 0, 0, 385, 386, 5, 107, 0, 0, 386, 387, 1, 0, 0, 0, 387, 388, 6, 6, 0, 0, 388, 25, 1, 0, 0, 0, 389, 390, 5, 105, 0, 0, 390, 391, 5, 110, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 5, 105, 0, 0, 393, 394, 5, 110, 0, 0, 394, 395, 5, 101, 0, 0, 395, 396, 5, 115, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 97, 0, 0, 398, 399, 5, 116, 0, 0, 399, 400, 5, 115, 0, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 7, 0, 0, 402, 27, 1, 0, 0, 0, 403, 404, 5, 107, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 101, 0, 0, 406, 407, 5, 112, 0, 0, 407, 408, 1, 0, 0, 0, 408, 409, 6, 8, 1, 0, 409, 29, 1, 0, 0, 0, 410, 411, 5, 108, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 105, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 1, 0, 0, 0, 416, 417, 6, 9, 0, 0, 417, 31, 1, 0, 0, 0, 418, 419, 5, 109, 0, 0, 419, 420, 5, 101, 0, 0, 420, 421, 5, 116, 0, 0, 421, 422, 5, 97, 0, 0, 422, 423, 1, 0, 0, 0, 423, 424, 6, 10, 5, 0, 424, 33, 1, 0, 0, 0, 425, 426, 5, 109, 0, 0, 426, 427, 5, 118, 0, 0, 427, 428, 5, 95, 0, 0, 428, 429, 5, 101, 0, 0, 429, 430, 5, 120, 0, 0, 430, 431, 5, 112, 0, 0, 431, 432, 5, 97, 0, 0, 432, 433, 5, 110, 0, 0, 433, 434, 5, 100, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 11, 6, 0, 436, 35, 1, 0, 0, 0, 437, 438, 5, 114, 0, 0, 438, 439, 5, 101, 0, 0, 439, 440, 5, 110, 0, 0, 440, 441, 5, 97, 0, 0, 441, 442, 5, 109, 0, 0, 442, 443, 5, 101, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 12, 7, 0, 445, 37, 1, 0, 0, 0, 446, 447, 5, 114, 0, 0, 447, 448, 5, 111, 0, 0, 448, 449, 5, 119, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 13, 0, 0, 451, 39, 1, 0, 0, 0, 452, 453, 5, 115, 0, 0, 453, 454, 5, 104, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 119, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 14, 8, 0, 458, 41, 1, 0, 0, 0, 459, 460, 5, 115, 0, 0, 460, 461, 5, 111, 0, 0, 461, 462, 5, 114, 0, 0, 462, 463, 5, 116, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 15, 0, 0, 465, 43, 1, 0, 0, 0, 466, 467, 5, 115, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 97, 0, 0, 469, 470, 5, 116, 0, 0, 470, 471, 5, 115, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 6, 16, 0, 0, 473, 45, 1, 0, 0, 0, 474, 475, 5, 119, 0, 0, 475, 476, 5, 104, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 17, 0, 0, 481, 47, 1, 0, 0, 0, 482, 484, 8, 0, 0, 0, 483, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 18, 0, 0, 488, 49, 1, 0, 0, 0, 489, 490, 5, 47, 0, 0, 490, 491, 5, 47, 0, 0, 491, 495, 1, 0, 0, 0, 492, 494, 8, 1, 0, 0, 493, 492, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 498, 500, 5, 13, 0, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 502, 1, 0, 0, 0, 501, 503, 5, 10, 0, 0, 502, 501, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 19, 9, 0, 505, 51, 1, 0, 0, 0, 506, 507, 5, 47, 0, 0, 507, 508, 5, 42, 0, 0, 508, 513, 1, 0, 0, 0, 509, 512, 3, 52, 20, 0, 510, 512, 9, 0, 0, 0, 511, 509, 1, 0, 0, 0, 511, 510, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 516, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 516, 517, 5, 42, 0, 0, 517, 518, 5, 47, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 20, 9, 0, 520, 53, 1, 0, 0, 0, 521, 523, 7, 2, 0, 0, 522, 521, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 21, 9, 0, 527, 55, 1, 0, 0, 0, 528, 529, 3, 164, 76, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 22, 10, 0, 531, 532, 6, 22, 11, 0, 532, 57, 1, 0, 0, 0, 533, 534, 3, 66, 27, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 23, 12, 0, 536, 537, 6, 23, 13, 0, 537, 59, 1, 0, 0, 0, 538, 539, 3, 54, 21, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 24, 9, 0, 541, 61, 1, 0, 0, 0, 542, 543, 3, 50, 19, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 25, 9, 0, 545, 63, 1, 0, 0, 0, 546, 547, 3, 52, 20, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 26, 9, 0, 549, 65, 1, 0, 0, 0, 550, 551, 5, 124, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 6, 27, 13, 0, 553, 67, 1, 0, 0, 0, 554, 555, 7, 3, 0, 0, 555, 69, 1, 0, 0, 0, 556, 557, 7, 4, 0, 0, 557, 71, 1, 0, 0, 0, 558, 559, 5, 92, 0, 0, 559, 560, 7, 5, 0, 0, 560, 73, 1, 0, 0, 0, 561, 562, 8, 6, 0, 0, 562, 75, 1, 0, 0, 0, 563, 565, 7, 7, 0, 0, 564, 566, 7, 8, 0, 0, 565, 564, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 568, 1, 0, 0, 0, 567, 569, 3, 68, 28, 0, 568, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 568, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 77, 1, 0, 0, 0, 572, 573, 5, 64, 0, 0, 573, 79, 1, 0, 0, 0, 574, 575, 5, 96, 0, 0, 575, 81, 1, 0, 0, 0, 576, 580, 8, 9, 0, 0, 577, 578, 5, 96, 0, 0, 578, 580, 5, 96, 0, 0, 579, 576, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 580, 83, 1, 0, 0, 0, 581, 582, 5, 95, 0, 0, 582, 85, 1, 0, 0, 0, 583, 587, 3, 70, 29, 0, 584, 587, 3, 68, 28, 0, 585, 587, 3, 84, 36, 0, 586, 583, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 586, 585, 1, 0, 0, 0, 587, 87, 1, 0, 0, 0, 588, 593, 5, 34, 0, 0, 589, 592, 3, 72, 30, 0, 590, 592, 3, 74, 31, 0, 591, 589, 1, 0, 0, 0, 591, 590, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 593, 1, 0, 0, 0, 596, 618, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 599, 5, 34, 0, 0, 599, 600, 5, 34, 0, 0, 600, 604, 1, 0, 0, 0, 601, 603, 8, 1, 0, 0, 602, 601, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 608, 5, 34, 0, 0, 608, 609, 5, 34, 0, 0, 609, 610, 5, 34, 0, 0, 610, 612, 1, 0, 0, 0, 611, 613, 5, 34, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 615, 1, 0, 0, 0, 614, 616, 5, 34, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 588, 1, 0, 0, 0, 617, 597, 1, 0, 0, 0, 618, 89, 1, 0, 0, 0, 619, 621, 3, 68, 28, 0, 620, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 91, 1, 0, 0, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 633, 3, 108, 48, 0, 630, 632, 3, 68, 28, 0, 631, 630, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 667, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 638, 3, 108, 48, 0, 637, 639, 3, 68, 28, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 667, 1, 0, 0, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 654, 1, 0, 0, 0, 647, 651, 3, 108, 48, 0, 648, 650, 3, 68, 28, 0, 649, 648, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 655, 1, 0, 0, 0, 653, 651, 1, 0, 0, 0, 654, 647, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 3, 76, 32, 0, 657, 667, 1, 0, 0, 0, 658, 660, 3, 108, 48, 0, 659, 661, 3, 68, 28, 0, 660, 659, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 665, 3, 76, 32, 0, 665, 667, 1, 0, 0, 0, 666, 625, 1, 0, 0, 0, 666, 636, 1, 0, 0, 0, 666, 643, 1, 0, 0, 0, 666, 658, 1, 0, 0, 0, 667, 93, 1, 0, 0, 0, 668, 669, 5, 98, 0, 0, 669, 670, 5, 121, 0, 0, 670, 95, 1, 0, 0, 0, 671, 672, 5, 97, 0, 0, 672, 673, 5, 110, 0, 0, 673, 674, 5, 100, 0, 0, 674, 97, 1, 0, 0, 0, 675, 676, 5, 97, 0, 0, 676, 677, 5, 115, 0, 0, 677, 678, 5, 99, 0, 0, 678, 99, 1, 0, 0, 0, 679, 680, 5, 61, 0, 0, 680, 101, 1, 0, 0, 0, 681, 682, 5, 58, 0, 0, 682, 683, 5, 58, 0, 0, 683, 103, 1, 0, 0, 0, 684, 685, 5, 44, 0, 0, 685, 105, 1, 0, 0, 0, 686, 687, 5, 100, 0, 0, 687, 688, 5, 101, 0, 0, 688, 689, 5, 115, 0, 0, 689, 690, 5, 99, 0, 0, 690, 107, 1, 0, 0, 0, 691, 692, 5, 46, 0, 0, 692, 109, 1, 0, 0, 0, 693, 694, 5, 102, 0, 0, 694, 695, 5, 97, 0, 0, 695, 696, 5, 108, 0, 0, 696, 697, 5, 115, 0, 0, 697, 698, 5, 101, 0, 0, 698, 111, 1, 0, 0, 0, 699, 700, 5, 102, 0, 0, 700, 701, 5, 105, 0, 0, 701, 702, 5, 114, 0, 0, 702, 703, 5, 115, 0, 0, 703, 704, 5, 116, 0, 0, 704, 113, 1, 0, 0, 0, 705, 706, 5, 108, 0, 0, 706, 707, 5, 97, 0, 0, 707, 708, 5, 115, 0, 0, 708, 709, 5, 116, 0, 0, 709, 115, 1, 0, 0, 0, 710, 711, 5, 40, 0, 0, 711, 117, 1, 0, 0, 0, 712, 713, 5, 105, 0, 0, 713, 714, 5, 110, 0, 0, 714, 119, 1, 0, 0, 0, 715, 716, 5, 105, 0, 0, 716, 717, 5, 115, 0, 0, 717, 121, 1, 0, 0, 0, 718, 719, 5, 108, 0, 0, 719, 720, 5, 105, 0, 0, 720, 721, 5, 107, 0, 0, 721, 722, 5, 101, 0, 0, 722, 123, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 111, 0, 0, 725, 726, 5, 116, 0, 0, 726, 125, 1, 0, 0, 0, 727, 728, 5, 110, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 108, 0, 0, 731, 127, 1, 0, 0, 0, 732, 733, 5, 110, 0, 0, 733, 734, 5, 117, 0, 0, 734, 735, 5, 108, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 115, 0, 0, 737, 129, 1, 0, 0, 0, 738, 739, 5, 111, 0, 0, 739, 740, 5, 114, 0, 0, 740, 131, 1, 0, 0, 0, 741, 742, 5, 63, 0, 0, 742, 133, 1, 0, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 108, 0, 0, 745, 746, 5, 105, 0, 0, 746, 747, 5, 107, 0, 0, 747, 748, 5, 101, 0, 0, 748, 135, 1, 0, 0, 0, 749, 750, 5, 41, 0, 0, 750, 137, 1, 0, 0, 0, 751, 752, 5, 116, 0, 0, 752, 753, 5, 114, 0, 0, 753, 754, 5, 117, 0, 0, 754, 755, 5, 101, 0, 0, 755, 139, 1, 0, 0, 0, 756, 757, 5, 61, 0, 0, 757, 758, 5, 61, 0, 0, 758, 141, 1, 0, 0, 0, 759, 760, 5, 61, 0, 0, 760, 761, 5, 126, 0, 0, 761, 143, 1, 0, 0, 0, 762, 763, 5, 33, 0, 0, 763, 764, 5, 61, 0, 0, 764, 145, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 147, 1, 0, 0, 0, 767, 768, 5, 60, 0, 0, 768, 769, 5, 61, 0, 0, 769, 149, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 151, 1, 0, 0, 0, 772, 773, 5, 62, 0, 0, 773, 774, 5, 61, 0, 0, 774, 153, 1, 0, 0, 0, 775, 776, 5, 43, 0, 0, 776, 155, 1, 0, 0, 0, 777, 778, 5, 45, 0, 0, 778, 157, 1, 0, 0, 0, 779, 780, 5, 42, 0, 0, 780, 159, 1, 0, 0, 0, 781, 782, 5, 47, 0, 0, 782, 161, 1, 0, 0, 0, 783, 784, 5, 37, 0, 0, 784, 163, 1, 0, 0, 0, 785, 786, 5, 91, 0, 0, 786, 787, 1, 0, 0, 0, 787, 788, 6, 76, 0, 0, 788, 789, 6, 76, 0, 0, 789, 165, 1, 0, 0, 0, 790, 791, 5, 93, 0, 0, 791, 792, 1, 0, 0, 0, 792, 793, 6, 77, 13, 0, 793, 794, 6, 77, 13, 0, 794, 167, 1, 0, 0, 0, 795, 799, 3, 70, 29, 0, 796, 798, 3, 86, 37, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 812, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 805, 3, 84, 36, 0, 803, 805, 3, 78, 33, 0, 804, 802, 1, 0, 0, 0, 804, 803, 1, 0, 0, 0, 805, 807, 1, 0, 0, 0, 806, 808, 3, 86, 37, 0, 807, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 812, 1, 0, 0, 0, 811, 795, 1, 0, 0, 0, 811, 804, 1, 0, 0, 0, 812, 169, 1, 0, 0, 0, 813, 815, 3, 80, 34, 0, 814, 816, 3, 82, 35, 0, 815, 814, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 815, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 3, 80, 34, 0, 820, 171, 1, 0, 0, 0, 821, 822, 3, 170, 79, 0, 822, 173, 1, 0, 0, 0, 823, 824, 3, 50, 19, 0, 824, 825, 1, 0, 0, 0, 825, 826, 6, 81, 9, 0, 826, 175, 1, 0, 0, 0, 827, 828, 3, 52, 20, 0, 828, 829, 1, 0, 0, 0, 829, 830, 6, 82, 9, 0, 830, 177, 1, 0, 0, 0, 831, 832, 3, 54, 21, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 83, 9, 0, 834, 179, 1, 0, 0, 0, 835, 836, 3, 66, 27, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 84, 12, 0, 838, 839, 6, 84, 13, 0, 839, 181, 1, 0, 0, 0, 840, 841, 3, 164, 76, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 85, 10, 0, 843, 183, 1, 0, 0, 0, 844, 845, 3, 166, 77, 0, 845, 846, 1, 0, 0, 0, 846, 847, 6, 86, 14, 0, 847, 185, 1, 0, 0, 0, 848, 849, 3, 104, 46, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 87, 15, 0, 851, 187, 1, 0, 0, 0, 852, 853, 3, 100, 44, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 88, 16, 0, 855, 189, 1, 0, 0, 0, 856, 857, 3, 88, 38, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 89, 17, 0, 859, 191, 1, 0, 0, 0, 860, 861, 5, 111, 0, 0, 861, 862, 5, 112, 0, 0, 862, 863, 5, 116, 0, 0, 863, 864, 5, 105, 0, 0, 864, 865, 5, 111, 0, 0, 865, 866, 5, 110, 0, 0, 866, 867, 5, 115, 0, 0, 867, 193, 1, 0, 0, 0, 868, 869, 5, 109, 0, 0, 869, 870, 5, 101, 0, 0, 870, 871, 5, 116, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 100, 0, 0, 873, 874, 5, 97, 0, 0, 874, 875, 5, 116, 0, 0, 875, 876, 5, 97, 0, 0, 876, 195, 1, 0, 0, 0, 877, 881, 8, 10, 0, 0, 878, 879, 5, 47, 0, 0, 879, 881, 8, 11, 0, 0, 880, 877, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 197, 1, 0, 0, 0, 882, 884, 3, 196, 92, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 199, 1, 0, 0, 0, 887, 888, 3, 172, 80, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 94, 18, 0, 890, 201, 1, 0, 0, 0, 891, 892, 3, 50, 19, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 95, 9, 0, 894, 203, 1, 0, 0, 0, 895, 896, 3, 52, 20, 0, 896, 897, 1, 0, 0, 0, 897, 898, 6, 96, 9, 0, 898, 205, 1, 0, 0, 0, 899, 900, 3, 54, 21, 0, 900, 901, 1, 0, 0, 0, 901, 902, 6, 97, 9, 0, 902, 207, 1, 0, 0, 0, 903, 904, 3, 66, 27, 0, 904, 905, 1, 0, 0, 0, 905, 906, 6, 98, 12, 0, 906, 907, 6, 98, 13, 0, 907, 209, 1, 0, 0, 0, 908, 909, 3, 108, 48, 0, 909, 910, 1, 0, 0, 0, 910, 911, 6, 99, 19, 0, 911, 211, 1, 0, 0, 0, 912, 913, 3, 104, 46, 0, 913, 914, 1, 0, 0, 0, 914, 915, 6, 100, 15, 0, 915, 213, 1, 0, 0, 0, 916, 921, 3, 70, 29, 0, 917, 921, 3, 68, 28, 0, 918, 921, 3, 84, 36, 0, 919, 921, 3, 158, 73, 0, 920, 916, 1, 0, 0, 0, 920, 917, 1, 0, 0, 0, 920, 918, 1, 0, 0, 0, 920, 919, 1, 0, 0, 0, 921, 215, 1, 0, 0, 0, 922, 925, 3, 70, 29, 0, 923, 925, 3, 158, 73, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 929, 1, 0, 0, 0, 926, 928, 3, 214, 101, 0, 927, 926, 1, 0, 0, 0, 928, 931, 1, 0, 0, 0, 929, 927, 1, 0, 0, 0, 929, 930, 1, 0, 0, 0, 930, 942, 1, 0, 0, 0, 931, 929, 1, 0, 0, 0, 932, 935, 3, 84, 36, 0, 933, 935, 3, 78, 33, 0, 934, 932, 1, 0, 0, 0, 934, 933, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 938, 3, 214, 101, 0, 937, 936, 1, 0, 0, 0, 938, 939, 1, 0, 0, 0, 939, 937, 1, 0, 0, 0, 939, 940, 1, 0, 0, 0, 940, 942, 1, 0, 0, 0, 941, 924, 1, 0, 0, 0, 941, 934, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 946, 3, 216, 102, 0, 944, 946, 3, 170, 79, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 947, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 219, 1, 0, 0, 0, 949, 950, 3, 50, 19, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 104, 9, 0, 952, 221, 1, 0, 0, 0, 953, 954, 3, 52, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 105, 9, 0, 956, 223, 1, 0, 0, 0, 957, 958, 3, 54, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 106, 9, 0, 960, 225, 1, 0, 0, 0, 961, 962, 3, 66, 27, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 107, 12, 0, 964, 965, 6, 107, 13, 0, 965, 227, 1, 0, 0, 0, 966, 967, 3, 100, 44, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 108, 16, 0, 969, 229, 1, 0, 0, 0, 970, 971, 3, 104, 46, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 109, 15, 0, 973, 231, 1, 0, 0, 0, 974, 975, 3, 108, 48, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 110, 19, 0, 977, 233, 1, 0, 0, 0, 978, 979, 5, 97, 0, 0, 979, 980, 5, 115, 0, 0, 980, 235, 1, 0, 0, 0, 981, 982, 3, 218, 103, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 112, 20, 0, 984, 237, 1, 0, 0, 0, 985, 986, 3, 50, 19, 0, 986, 987, 1, 0, 0, 0, 987, 988, 6, 113, 9, 0, 988, 239, 1, 0, 0, 0, 989, 990, 3, 52, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 114, 9, 0, 992, 241, 1, 0, 0, 0, 993, 994, 3, 54, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 115, 9, 0, 996, 243, 1, 0, 0, 0, 997, 998, 3, 66, 27, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 116, 12, 0, 1000, 1001, 6, 116, 13, 0, 1001, 245, 1, 0, 0, 0, 1002, 1003, 3, 164, 76, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 10, 0, 1005, 1006, 6, 117, 21, 0, 1006, 247, 1, 0, 0, 0, 1007, 1008, 5, 111, 0, 0, 1008, 1009, 5, 110, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 118, 22, 0, 1011, 249, 1, 0, 0, 0, 1012, 1013, 5, 119, 0, 0, 1013, 1014, 5, 105, 0, 0, 1014, 1015, 5, 116, 0, 0, 1015, 1016, 5, 104, 0, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 119, 22, 0, 1018, 251, 1, 0, 0, 0, 1019, 1020, 8, 12, 0, 0, 1020, 253, 1, 0, 0, 0, 1021, 1023, 3, 252, 120, 0, 1022, 1021, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1022, 1, 0, 0, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 3, 322, 155, 0, 1027, 1029, 1, 0, 0, 0, 1028, 1022, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1031, 1, 0, 0, 0, 1030, 1032, 3, 252, 120, 0, 1031, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1031, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 255, 1, 0, 0, 0, 1035, 1036, 3, 172, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 122, 18, 0, 1038, 257, 1, 0, 0, 0, 1039, 1040, 3, 254, 121, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 123, 23, 0, 1042, 259, 1, 0, 0, 0, 1043, 1044, 3, 50, 19, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 124, 9, 0, 1046, 261, 1, 0, 0, 0, 1047, 1048, 3, 52, 20, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 125, 9, 0, 1050, 263, 1, 0, 0, 0, 1051, 1052, 3, 54, 21, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 126, 9, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 66, 27, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 12, 0, 1058, 1059, 6, 127, 13, 0, 1059, 1060, 6, 127, 13, 0, 1060, 267, 1, 0, 0, 0, 1061, 1062, 3, 100, 44, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 128, 16, 0, 1064, 269, 1, 0, 0, 0, 1065, 1066, 3, 104, 46, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 129, 15, 0, 1068, 271, 1, 0, 0, 0, 1069, 1070, 3, 108, 48, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 130, 19, 0, 1072, 273, 1, 0, 0, 0, 1073, 1074, 3, 250, 119, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 131, 24, 0, 1076, 275, 1, 0, 0, 0, 1077, 1078, 3, 218, 103, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 132, 20, 0, 1080, 277, 1, 0, 0, 0, 1081, 1082, 3, 172, 80, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 133, 18, 0, 1084, 279, 1, 0, 0, 0, 1085, 1086, 3, 50, 19, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 134, 9, 0, 1088, 281, 1, 0, 0, 0, 1089, 1090, 3, 52, 20, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 135, 9, 0, 1092, 283, 1, 0, 0, 0, 1093, 1094, 3, 54, 21, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 136, 9, 0, 1096, 285, 1, 0, 0, 0, 1097, 1098, 3, 66, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 137, 12, 0, 1100, 1101, 6, 137, 13, 0, 1101, 287, 1, 0, 0, 0, 1102, 1103, 3, 108, 48, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 138, 19, 0, 1105, 289, 1, 0, 0, 0, 1106, 1107, 3, 172, 80, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 139, 18, 0, 1109, 291, 1, 0, 0, 0, 1110, 1111, 3, 168, 78, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 140, 25, 0, 1113, 293, 1, 0, 0, 0, 1114, 1115, 3, 50, 19, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 141, 9, 0, 1117, 295, 1, 0, 0, 0, 1118, 1119, 3, 52, 20, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 142, 9, 0, 1121, 297, 1, 0, 0, 0, 1122, 1123, 3, 54, 21, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 143, 9, 0, 1125, 299, 1, 0, 0, 0, 1126, 1127, 3, 66, 27, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 144, 12, 0, 1129, 1130, 6, 144, 13, 0, 1130, 301, 1, 0, 0, 0, 1131, 1132, 5, 105, 0, 0, 1132, 1133, 5, 110, 0, 0, 1133, 1134, 5, 102, 0, 0, 1134, 1135, 5, 111, 0, 0, 1135, 303, 1, 0, 0, 0, 1136, 1137, 3, 50, 19, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 146, 9, 0, 1139, 305, 1, 0, 0, 0, 1140, 1141, 3, 52, 20, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1143, 6, 147, 9, 0, 1143, 307, 1, 0, 0, 0, 1144, 1145, 3, 54, 21, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 148, 9, 0, 1147, 309, 1, 0, 0, 0, 1148, 1149, 3, 66, 27, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1151, 6, 149, 12, 0, 1151, 1152, 6, 149, 13, 0, 1152, 311, 1, 0, 0, 0, 1153, 1154, 5, 102, 0, 0, 1154, 1155, 5, 117, 0, 0, 1155, 1156, 5, 110, 0, 0, 1156, 1157, 5, 99, 0, 0, 1157, 1158, 5, 116, 0, 0, 1158, 1159, 5, 105, 0, 0, 1159, 1160, 5, 111, 0, 0, 1160, 1161, 5, 110, 0, 0, 1161, 1162, 5, 115, 0, 0, 1162, 313, 1, 0, 0, 0, 1163, 1164, 3, 50, 19, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 6, 151, 9, 0, 1166, 315, 1, 0, 0, 0, 1167, 1168, 3, 52, 20, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 152, 9, 0, 1170, 317, 1, 0, 0, 0, 1171, 1172, 3, 54, 21, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 153, 9, 0, 1174, 319, 1, 0, 0, 0, 1175, 1176, 3, 166, 77, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 154, 14, 0, 1178, 1179, 6, 154, 13, 0, 1179, 321, 1, 0, 0, 0, 1180, 1181, 5, 58, 0, 0, 1181, 323, 1, 0, 0, 0, 1182, 1188, 3, 78, 33, 0, 1183, 1188, 3, 68, 28, 0, 1184, 1188, 3, 108, 48, 0, 1185, 1188, 3, 70, 29, 0, 1186, 1188, 3, 84, 36, 0, 1187, 1182, 1, 0, 0, 0, 1187, 1183, 1, 0, 0, 0, 1187, 1184, 1, 0, 0, 0, 1187, 1185, 1, 0, 0, 0, 1187, 1186, 1, 0, 0, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1187, 1, 0, 0, 0, 1189, 1190, 1, 0, 0, 0, 1190, 325, 1, 0, 0, 0, 1191, 1192, 3, 50, 19, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 157, 9, 0, 1194, 327, 1, 0, 0, 0, 1195, 1196, 3, 52, 20, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 158, 9, 0, 1198, 329, 1, 0, 0, 0, 1199, 1200, 3, 54, 21, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 159, 9, 0, 1202, 331, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 485, 495, 499, 502, 511, 513, 524, 565, 570, 579, 586, 591, 593, 604, 612, 615, 617, 622, 627, 633, 640, 645, 651, 654, 662, 666, 799, 804, 809, 811, 817, 880, 885, 920, 924, 929, 934, 939, 941, 945, 947, 1024, 1028, 1033, 1187, 1189, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 68, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file +[4, 0, 110, 1197, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 482, 8, 18, 11, 18, 12, 18, 483, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 492, 8, 19, 10, 19, 12, 19, 495, 9, 19, 1, 19, 3, 19, 498, 8, 19, 1, 19, 3, 19, 501, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 510, 8, 20, 10, 20, 12, 20, 513, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 521, 8, 21, 11, 21, 12, 21, 522, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 564, 8, 32, 1, 32, 4, 32, 567, 8, 32, 11, 32, 12, 32, 568, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 578, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 585, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 590, 8, 38, 10, 38, 12, 38, 593, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 601, 8, 38, 10, 38, 12, 38, 604, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 611, 8, 38, 1, 38, 3, 38, 614, 8, 38, 3, 38, 616, 8, 38, 1, 39, 4, 39, 619, 8, 39, 11, 39, 12, 39, 620, 1, 40, 4, 40, 624, 8, 40, 11, 40, 12, 40, 625, 1, 40, 1, 40, 5, 40, 630, 8, 40, 10, 40, 12, 40, 633, 9, 40, 1, 40, 1, 40, 4, 40, 637, 8, 40, 11, 40, 12, 40, 638, 1, 40, 4, 40, 642, 8, 40, 11, 40, 12, 40, 643, 1, 40, 1, 40, 5, 40, 648, 8, 40, 10, 40, 12, 40, 651, 9, 40, 3, 40, 653, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 659, 8, 40, 11, 40, 12, 40, 660, 1, 40, 1, 40, 3, 40, 665, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 796, 8, 78, 10, 78, 12, 78, 799, 9, 78, 1, 78, 1, 78, 3, 78, 803, 8, 78, 1, 78, 4, 78, 806, 8, 78, 11, 78, 12, 78, 807, 3, 78, 810, 8, 78, 1, 79, 1, 79, 4, 79, 814, 8, 79, 11, 79, 12, 79, 815, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 879, 8, 92, 1, 93, 4, 93, 882, 8, 93, 11, 93, 12, 93, 883, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 3, 100, 915, 8, 100, 1, 101, 1, 101, 3, 101, 919, 8, 101, 1, 101, 5, 101, 922, 8, 101, 10, 101, 12, 101, 925, 9, 101, 1, 101, 1, 101, 3, 101, 929, 8, 101, 1, 101, 4, 101, 932, 8, 101, 11, 101, 12, 101, 933, 3, 101, 936, 8, 101, 1, 102, 1, 102, 4, 102, 940, 8, 102, 11, 102, 12, 102, 941, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 120, 4, 120, 1017, 8, 120, 11, 120, 12, 120, 1018, 1, 120, 1, 120, 3, 120, 1023, 8, 120, 1, 120, 4, 120, 1026, 8, 120, 11, 120, 12, 120, 1027, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 4, 155, 1182, 8, 155, 11, 155, 12, 155, 1183, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 2, 511, 602, 0, 159, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 75, 202, 76, 204, 77, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 78, 218, 79, 220, 80, 222, 81, 224, 0, 226, 0, 228, 0, 230, 0, 232, 82, 234, 0, 236, 83, 238, 84, 240, 85, 242, 0, 244, 0, 246, 86, 248, 87, 250, 0, 252, 88, 254, 0, 256, 0, 258, 89, 260, 90, 262, 91, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 92, 280, 93, 282, 94, 284, 0, 286, 0, 288, 0, 290, 0, 292, 95, 294, 96, 296, 97, 298, 0, 300, 98, 302, 99, 304, 100, 306, 101, 308, 0, 310, 102, 312, 103, 314, 104, 316, 105, 318, 0, 320, 106, 322, 107, 324, 108, 326, 109, 328, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1224, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 12, 330, 1, 0, 0, 0, 14, 340, 1, 0, 0, 0, 16, 347, 1, 0, 0, 0, 18, 356, 1, 0, 0, 0, 20, 363, 1, 0, 0, 0, 22, 373, 1, 0, 0, 0, 24, 380, 1, 0, 0, 0, 26, 387, 1, 0, 0, 0, 28, 401, 1, 0, 0, 0, 30, 408, 1, 0, 0, 0, 32, 416, 1, 0, 0, 0, 34, 423, 1, 0, 0, 0, 36, 435, 1, 0, 0, 0, 38, 444, 1, 0, 0, 0, 40, 450, 1, 0, 0, 0, 42, 457, 1, 0, 0, 0, 44, 464, 1, 0, 0, 0, 46, 472, 1, 0, 0, 0, 48, 481, 1, 0, 0, 0, 50, 487, 1, 0, 0, 0, 52, 504, 1, 0, 0, 0, 54, 520, 1, 0, 0, 0, 56, 526, 1, 0, 0, 0, 58, 531, 1, 0, 0, 0, 60, 536, 1, 0, 0, 0, 62, 540, 1, 0, 0, 0, 64, 544, 1, 0, 0, 0, 66, 548, 1, 0, 0, 0, 68, 552, 1, 0, 0, 0, 70, 554, 1, 0, 0, 0, 72, 556, 1, 0, 0, 0, 74, 559, 1, 0, 0, 0, 76, 561, 1, 0, 0, 0, 78, 570, 1, 0, 0, 0, 80, 572, 1, 0, 0, 0, 82, 577, 1, 0, 0, 0, 84, 579, 1, 0, 0, 0, 86, 584, 1, 0, 0, 0, 88, 615, 1, 0, 0, 0, 90, 618, 1, 0, 0, 0, 92, 664, 1, 0, 0, 0, 94, 666, 1, 0, 0, 0, 96, 669, 1, 0, 0, 0, 98, 673, 1, 0, 0, 0, 100, 677, 1, 0, 0, 0, 102, 679, 1, 0, 0, 0, 104, 682, 1, 0, 0, 0, 106, 684, 1, 0, 0, 0, 108, 689, 1, 0, 0, 0, 110, 691, 1, 0, 0, 0, 112, 697, 1, 0, 0, 0, 114, 703, 1, 0, 0, 0, 116, 708, 1, 0, 0, 0, 118, 710, 1, 0, 0, 0, 120, 713, 1, 0, 0, 0, 122, 716, 1, 0, 0, 0, 124, 721, 1, 0, 0, 0, 126, 725, 1, 0, 0, 0, 128, 730, 1, 0, 0, 0, 130, 736, 1, 0, 0, 0, 132, 739, 1, 0, 0, 0, 134, 741, 1, 0, 0, 0, 136, 747, 1, 0, 0, 0, 138, 749, 1, 0, 0, 0, 140, 754, 1, 0, 0, 0, 142, 757, 1, 0, 0, 0, 144, 760, 1, 0, 0, 0, 146, 763, 1, 0, 0, 0, 148, 765, 1, 0, 0, 0, 150, 768, 1, 0, 0, 0, 152, 770, 1, 0, 0, 0, 154, 773, 1, 0, 0, 0, 156, 775, 1, 0, 0, 0, 158, 777, 1, 0, 0, 0, 160, 779, 1, 0, 0, 0, 162, 781, 1, 0, 0, 0, 164, 783, 1, 0, 0, 0, 166, 788, 1, 0, 0, 0, 168, 809, 1, 0, 0, 0, 170, 811, 1, 0, 0, 0, 172, 819, 1, 0, 0, 0, 174, 821, 1, 0, 0, 0, 176, 825, 1, 0, 0, 0, 178, 829, 1, 0, 0, 0, 180, 833, 1, 0, 0, 0, 182, 838, 1, 0, 0, 0, 184, 842, 1, 0, 0, 0, 186, 846, 1, 0, 0, 0, 188, 850, 1, 0, 0, 0, 190, 854, 1, 0, 0, 0, 192, 858, 1, 0, 0, 0, 194, 866, 1, 0, 0, 0, 196, 878, 1, 0, 0, 0, 198, 881, 1, 0, 0, 0, 200, 885, 1, 0, 0, 0, 202, 889, 1, 0, 0, 0, 204, 893, 1, 0, 0, 0, 206, 897, 1, 0, 0, 0, 208, 902, 1, 0, 0, 0, 210, 906, 1, 0, 0, 0, 212, 914, 1, 0, 0, 0, 214, 935, 1, 0, 0, 0, 216, 939, 1, 0, 0, 0, 218, 943, 1, 0, 0, 0, 220, 947, 1, 0, 0, 0, 222, 951, 1, 0, 0, 0, 224, 955, 1, 0, 0, 0, 226, 960, 1, 0, 0, 0, 228, 964, 1, 0, 0, 0, 230, 968, 1, 0, 0, 0, 232, 972, 1, 0, 0, 0, 234, 975, 1, 0, 0, 0, 236, 979, 1, 0, 0, 0, 238, 983, 1, 0, 0, 0, 240, 987, 1, 0, 0, 0, 242, 991, 1, 0, 0, 0, 244, 996, 1, 0, 0, 0, 246, 1001, 1, 0, 0, 0, 248, 1006, 1, 0, 0, 0, 250, 1013, 1, 0, 0, 0, 252, 1022, 1, 0, 0, 0, 254, 1029, 1, 0, 0, 0, 256, 1033, 1, 0, 0, 0, 258, 1037, 1, 0, 0, 0, 260, 1041, 1, 0, 0, 0, 262, 1045, 1, 0, 0, 0, 264, 1049, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1059, 1, 0, 0, 0, 270, 1063, 1, 0, 0, 0, 272, 1067, 1, 0, 0, 0, 274, 1071, 1, 0, 0, 0, 276, 1075, 1, 0, 0, 0, 278, 1079, 1, 0, 0, 0, 280, 1083, 1, 0, 0, 0, 282, 1087, 1, 0, 0, 0, 284, 1091, 1, 0, 0, 0, 286, 1096, 1, 0, 0, 0, 288, 1100, 1, 0, 0, 0, 290, 1104, 1, 0, 0, 0, 292, 1108, 1, 0, 0, 0, 294, 1112, 1, 0, 0, 0, 296, 1116, 1, 0, 0, 0, 298, 1120, 1, 0, 0, 0, 300, 1125, 1, 0, 0, 0, 302, 1130, 1, 0, 0, 0, 304, 1134, 1, 0, 0, 0, 306, 1138, 1, 0, 0, 0, 308, 1142, 1, 0, 0, 0, 310, 1147, 1, 0, 0, 0, 312, 1157, 1, 0, 0, 0, 314, 1161, 1, 0, 0, 0, 316, 1165, 1, 0, 0, 0, 318, 1169, 1, 0, 0, 0, 320, 1174, 1, 0, 0, 0, 322, 1181, 1, 0, 0, 0, 324, 1185, 1, 0, 0, 0, 326, 1189, 1, 0, 0, 0, 328, 1193, 1, 0, 0, 0, 330, 331, 5, 100, 0, 0, 331, 332, 5, 105, 0, 0, 332, 333, 5, 115, 0, 0, 333, 334, 5, 115, 0, 0, 334, 335, 5, 101, 0, 0, 335, 336, 5, 99, 0, 0, 336, 337, 5, 116, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 6, 0, 0, 0, 339, 13, 1, 0, 0, 0, 340, 341, 5, 100, 0, 0, 341, 342, 5, 114, 0, 0, 342, 343, 5, 111, 0, 0, 343, 344, 5, 112, 0, 0, 344, 345, 1, 0, 0, 0, 345, 346, 6, 1, 1, 0, 346, 15, 1, 0, 0, 0, 347, 348, 5, 101, 0, 0, 348, 349, 5, 110, 0, 0, 349, 350, 5, 114, 0, 0, 350, 351, 5, 105, 0, 0, 351, 352, 5, 99, 0, 0, 352, 353, 5, 104, 0, 0, 353, 354, 1, 0, 0, 0, 354, 355, 6, 2, 2, 0, 355, 17, 1, 0, 0, 0, 356, 357, 5, 101, 0, 0, 357, 358, 5, 118, 0, 0, 358, 359, 5, 97, 0, 0, 359, 360, 5, 108, 0, 0, 360, 361, 1, 0, 0, 0, 361, 362, 6, 3, 0, 0, 362, 19, 1, 0, 0, 0, 363, 364, 5, 101, 0, 0, 364, 365, 5, 120, 0, 0, 365, 366, 5, 112, 0, 0, 366, 367, 5, 108, 0, 0, 367, 368, 5, 97, 0, 0, 368, 369, 5, 105, 0, 0, 369, 370, 5, 110, 0, 0, 370, 371, 1, 0, 0, 0, 371, 372, 6, 4, 3, 0, 372, 21, 1, 0, 0, 0, 373, 374, 5, 102, 0, 0, 374, 375, 5, 114, 0, 0, 375, 376, 5, 111, 0, 0, 376, 377, 5, 109, 0, 0, 377, 378, 1, 0, 0, 0, 378, 379, 6, 5, 4, 0, 379, 23, 1, 0, 0, 0, 380, 381, 5, 103, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 111, 0, 0, 383, 384, 5, 107, 0, 0, 384, 385, 1, 0, 0, 0, 385, 386, 6, 6, 0, 0, 386, 25, 1, 0, 0, 0, 387, 388, 5, 105, 0, 0, 388, 389, 5, 110, 0, 0, 389, 390, 5, 108, 0, 0, 390, 391, 5, 105, 0, 0, 391, 392, 5, 110, 0, 0, 392, 393, 5, 101, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 5, 116, 0, 0, 395, 396, 5, 97, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 115, 0, 0, 398, 399, 1, 0, 0, 0, 399, 400, 6, 7, 0, 0, 400, 27, 1, 0, 0, 0, 401, 402, 5, 107, 0, 0, 402, 403, 5, 101, 0, 0, 403, 404, 5, 101, 0, 0, 404, 405, 5, 112, 0, 0, 405, 406, 1, 0, 0, 0, 406, 407, 6, 8, 1, 0, 407, 29, 1, 0, 0, 0, 408, 409, 5, 108, 0, 0, 409, 410, 5, 105, 0, 0, 410, 411, 5, 109, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 116, 0, 0, 413, 414, 1, 0, 0, 0, 414, 415, 6, 9, 0, 0, 415, 31, 1, 0, 0, 0, 416, 417, 5, 109, 0, 0, 417, 418, 5, 101, 0, 0, 418, 419, 5, 116, 0, 0, 419, 420, 5, 97, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 10, 5, 0, 422, 33, 1, 0, 0, 0, 423, 424, 5, 109, 0, 0, 424, 425, 5, 118, 0, 0, 425, 426, 5, 95, 0, 0, 426, 427, 5, 101, 0, 0, 427, 428, 5, 120, 0, 0, 428, 429, 5, 112, 0, 0, 429, 430, 5, 97, 0, 0, 430, 431, 5, 110, 0, 0, 431, 432, 5, 100, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 11, 6, 0, 434, 35, 1, 0, 0, 0, 435, 436, 5, 114, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 5, 110, 0, 0, 438, 439, 5, 97, 0, 0, 439, 440, 5, 109, 0, 0, 440, 441, 5, 101, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 12, 7, 0, 443, 37, 1, 0, 0, 0, 444, 445, 5, 114, 0, 0, 445, 446, 5, 111, 0, 0, 446, 447, 5, 119, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 6, 13, 0, 0, 449, 39, 1, 0, 0, 0, 450, 451, 5, 115, 0, 0, 451, 452, 5, 104, 0, 0, 452, 453, 5, 111, 0, 0, 453, 454, 5, 119, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 6, 14, 8, 0, 456, 41, 1, 0, 0, 0, 457, 458, 5, 115, 0, 0, 458, 459, 5, 111, 0, 0, 459, 460, 5, 114, 0, 0, 460, 461, 5, 116, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 6, 15, 0, 0, 463, 43, 1, 0, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 5, 116, 0, 0, 466, 467, 5, 97, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 115, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 16, 0, 0, 471, 45, 1, 0, 0, 0, 472, 473, 5, 119, 0, 0, 473, 474, 5, 104, 0, 0, 474, 475, 5, 101, 0, 0, 475, 476, 5, 114, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 6, 17, 0, 0, 479, 47, 1, 0, 0, 0, 480, 482, 8, 0, 0, 0, 481, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 18, 0, 0, 486, 49, 1, 0, 0, 0, 487, 488, 5, 47, 0, 0, 488, 489, 5, 47, 0, 0, 489, 493, 1, 0, 0, 0, 490, 492, 8, 1, 0, 0, 491, 490, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 498, 5, 13, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 500, 1, 0, 0, 0, 499, 501, 5, 10, 0, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 19, 9, 0, 503, 51, 1, 0, 0, 0, 504, 505, 5, 47, 0, 0, 505, 506, 5, 42, 0, 0, 506, 511, 1, 0, 0, 0, 507, 510, 3, 52, 20, 0, 508, 510, 9, 0, 0, 0, 509, 507, 1, 0, 0, 0, 509, 508, 1, 0, 0, 0, 510, 513, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 511, 509, 1, 0, 0, 0, 512, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 515, 5, 42, 0, 0, 515, 516, 5, 47, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 20, 9, 0, 518, 53, 1, 0, 0, 0, 519, 521, 7, 2, 0, 0, 520, 519, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 21, 9, 0, 525, 55, 1, 0, 0, 0, 526, 527, 3, 164, 76, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 22, 10, 0, 529, 530, 6, 22, 11, 0, 530, 57, 1, 0, 0, 0, 531, 532, 3, 66, 27, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 23, 12, 0, 534, 535, 6, 23, 13, 0, 535, 59, 1, 0, 0, 0, 536, 537, 3, 54, 21, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 24, 9, 0, 539, 61, 1, 0, 0, 0, 540, 541, 3, 50, 19, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 25, 9, 0, 543, 63, 1, 0, 0, 0, 544, 545, 3, 52, 20, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 26, 9, 0, 547, 65, 1, 0, 0, 0, 548, 549, 5, 124, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 27, 13, 0, 551, 67, 1, 0, 0, 0, 552, 553, 7, 3, 0, 0, 553, 69, 1, 0, 0, 0, 554, 555, 7, 4, 0, 0, 555, 71, 1, 0, 0, 0, 556, 557, 5, 92, 0, 0, 557, 558, 7, 5, 0, 0, 558, 73, 1, 0, 0, 0, 559, 560, 8, 6, 0, 0, 560, 75, 1, 0, 0, 0, 561, 563, 7, 7, 0, 0, 562, 564, 7, 8, 0, 0, 563, 562, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 566, 1, 0, 0, 0, 565, 567, 3, 68, 28, 0, 566, 565, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 566, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 77, 1, 0, 0, 0, 570, 571, 5, 64, 0, 0, 571, 79, 1, 0, 0, 0, 572, 573, 5, 96, 0, 0, 573, 81, 1, 0, 0, 0, 574, 578, 8, 9, 0, 0, 575, 576, 5, 96, 0, 0, 576, 578, 5, 96, 0, 0, 577, 574, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 578, 83, 1, 0, 0, 0, 579, 580, 5, 95, 0, 0, 580, 85, 1, 0, 0, 0, 581, 585, 3, 70, 29, 0, 582, 585, 3, 68, 28, 0, 583, 585, 3, 84, 36, 0, 584, 581, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 584, 583, 1, 0, 0, 0, 585, 87, 1, 0, 0, 0, 586, 591, 5, 34, 0, 0, 587, 590, 3, 72, 30, 0, 588, 590, 3, 74, 31, 0, 589, 587, 1, 0, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 616, 5, 34, 0, 0, 595, 596, 5, 34, 0, 0, 596, 597, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 602, 1, 0, 0, 0, 599, 601, 8, 1, 0, 0, 600, 599, 1, 0, 0, 0, 601, 604, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 606, 5, 34, 0, 0, 606, 607, 5, 34, 0, 0, 607, 608, 5, 34, 0, 0, 608, 610, 1, 0, 0, 0, 609, 611, 5, 34, 0, 0, 610, 609, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 613, 1, 0, 0, 0, 612, 614, 5, 34, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 616, 1, 0, 0, 0, 615, 586, 1, 0, 0, 0, 615, 595, 1, 0, 0, 0, 616, 89, 1, 0, 0, 0, 617, 619, 3, 68, 28, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 91, 1, 0, 0, 0, 622, 624, 3, 68, 28, 0, 623, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 631, 3, 108, 48, 0, 628, 630, 3, 68, 28, 0, 629, 628, 1, 0, 0, 0, 630, 633, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 665, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 634, 636, 3, 108, 48, 0, 635, 637, 3, 68, 28, 0, 636, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 665, 1, 0, 0, 0, 640, 642, 3, 68, 28, 0, 641, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 652, 1, 0, 0, 0, 645, 649, 3, 108, 48, 0, 646, 648, 3, 68, 28, 0, 647, 646, 1, 0, 0, 0, 648, 651, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 645, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 3, 76, 32, 0, 655, 665, 1, 0, 0, 0, 656, 658, 3, 108, 48, 0, 657, 659, 3, 68, 28, 0, 658, 657, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 658, 1, 0, 0, 0, 660, 661, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 663, 3, 76, 32, 0, 663, 665, 1, 0, 0, 0, 664, 623, 1, 0, 0, 0, 664, 634, 1, 0, 0, 0, 664, 641, 1, 0, 0, 0, 664, 656, 1, 0, 0, 0, 665, 93, 1, 0, 0, 0, 666, 667, 5, 98, 0, 0, 667, 668, 5, 121, 0, 0, 668, 95, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 110, 0, 0, 671, 672, 5, 100, 0, 0, 672, 97, 1, 0, 0, 0, 673, 674, 5, 97, 0, 0, 674, 675, 5, 115, 0, 0, 675, 676, 5, 99, 0, 0, 676, 99, 1, 0, 0, 0, 677, 678, 5, 61, 0, 0, 678, 101, 1, 0, 0, 0, 679, 680, 5, 58, 0, 0, 680, 681, 5, 58, 0, 0, 681, 103, 1, 0, 0, 0, 682, 683, 5, 44, 0, 0, 683, 105, 1, 0, 0, 0, 684, 685, 5, 100, 0, 0, 685, 686, 5, 101, 0, 0, 686, 687, 5, 115, 0, 0, 687, 688, 5, 99, 0, 0, 688, 107, 1, 0, 0, 0, 689, 690, 5, 46, 0, 0, 690, 109, 1, 0, 0, 0, 691, 692, 5, 102, 0, 0, 692, 693, 5, 97, 0, 0, 693, 694, 5, 108, 0, 0, 694, 695, 5, 115, 0, 0, 695, 696, 5, 101, 0, 0, 696, 111, 1, 0, 0, 0, 697, 698, 5, 102, 0, 0, 698, 699, 5, 105, 0, 0, 699, 700, 5, 114, 0, 0, 700, 701, 5, 115, 0, 0, 701, 702, 5, 116, 0, 0, 702, 113, 1, 0, 0, 0, 703, 704, 5, 108, 0, 0, 704, 705, 5, 97, 0, 0, 705, 706, 5, 115, 0, 0, 706, 707, 5, 116, 0, 0, 707, 115, 1, 0, 0, 0, 708, 709, 5, 40, 0, 0, 709, 117, 1, 0, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 110, 0, 0, 712, 119, 1, 0, 0, 0, 713, 714, 5, 105, 0, 0, 714, 715, 5, 115, 0, 0, 715, 121, 1, 0, 0, 0, 716, 717, 5, 108, 0, 0, 717, 718, 5, 105, 0, 0, 718, 719, 5, 107, 0, 0, 719, 720, 5, 101, 0, 0, 720, 123, 1, 0, 0, 0, 721, 722, 5, 110, 0, 0, 722, 723, 5, 111, 0, 0, 723, 724, 5, 116, 0, 0, 724, 125, 1, 0, 0, 0, 725, 726, 5, 110, 0, 0, 726, 727, 5, 117, 0, 0, 727, 728, 5, 108, 0, 0, 728, 729, 5, 108, 0, 0, 729, 127, 1, 0, 0, 0, 730, 731, 5, 110, 0, 0, 731, 732, 5, 117, 0, 0, 732, 733, 5, 108, 0, 0, 733, 734, 5, 108, 0, 0, 734, 735, 5, 115, 0, 0, 735, 129, 1, 0, 0, 0, 736, 737, 5, 111, 0, 0, 737, 738, 5, 114, 0, 0, 738, 131, 1, 0, 0, 0, 739, 740, 5, 63, 0, 0, 740, 133, 1, 0, 0, 0, 741, 742, 5, 114, 0, 0, 742, 743, 5, 108, 0, 0, 743, 744, 5, 105, 0, 0, 744, 745, 5, 107, 0, 0, 745, 746, 5, 101, 0, 0, 746, 135, 1, 0, 0, 0, 747, 748, 5, 41, 0, 0, 748, 137, 1, 0, 0, 0, 749, 750, 5, 116, 0, 0, 750, 751, 5, 114, 0, 0, 751, 752, 5, 117, 0, 0, 752, 753, 5, 101, 0, 0, 753, 139, 1, 0, 0, 0, 754, 755, 5, 61, 0, 0, 755, 756, 5, 61, 0, 0, 756, 141, 1, 0, 0, 0, 757, 758, 5, 61, 0, 0, 758, 759, 5, 126, 0, 0, 759, 143, 1, 0, 0, 0, 760, 761, 5, 33, 0, 0, 761, 762, 5, 61, 0, 0, 762, 145, 1, 0, 0, 0, 763, 764, 5, 60, 0, 0, 764, 147, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 767, 5, 61, 0, 0, 767, 149, 1, 0, 0, 0, 768, 769, 5, 62, 0, 0, 769, 151, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 772, 5, 61, 0, 0, 772, 153, 1, 0, 0, 0, 773, 774, 5, 43, 0, 0, 774, 155, 1, 0, 0, 0, 775, 776, 5, 45, 0, 0, 776, 157, 1, 0, 0, 0, 777, 778, 5, 42, 0, 0, 778, 159, 1, 0, 0, 0, 779, 780, 5, 47, 0, 0, 780, 161, 1, 0, 0, 0, 781, 782, 5, 37, 0, 0, 782, 163, 1, 0, 0, 0, 783, 784, 5, 91, 0, 0, 784, 785, 1, 0, 0, 0, 785, 786, 6, 76, 0, 0, 786, 787, 6, 76, 0, 0, 787, 165, 1, 0, 0, 0, 788, 789, 5, 93, 0, 0, 789, 790, 1, 0, 0, 0, 790, 791, 6, 77, 13, 0, 791, 792, 6, 77, 13, 0, 792, 167, 1, 0, 0, 0, 793, 797, 3, 70, 29, 0, 794, 796, 3, 86, 37, 0, 795, 794, 1, 0, 0, 0, 796, 799, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 810, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 800, 803, 3, 84, 36, 0, 801, 803, 3, 78, 33, 0, 802, 800, 1, 0, 0, 0, 802, 801, 1, 0, 0, 0, 803, 805, 1, 0, 0, 0, 804, 806, 3, 86, 37, 0, 805, 804, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 805, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 810, 1, 0, 0, 0, 809, 793, 1, 0, 0, 0, 809, 802, 1, 0, 0, 0, 810, 169, 1, 0, 0, 0, 811, 813, 3, 80, 34, 0, 812, 814, 3, 82, 35, 0, 813, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 813, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 818, 3, 80, 34, 0, 818, 171, 1, 0, 0, 0, 819, 820, 3, 170, 79, 0, 820, 173, 1, 0, 0, 0, 821, 822, 3, 50, 19, 0, 822, 823, 1, 0, 0, 0, 823, 824, 6, 81, 9, 0, 824, 175, 1, 0, 0, 0, 825, 826, 3, 52, 20, 0, 826, 827, 1, 0, 0, 0, 827, 828, 6, 82, 9, 0, 828, 177, 1, 0, 0, 0, 829, 830, 3, 54, 21, 0, 830, 831, 1, 0, 0, 0, 831, 832, 6, 83, 9, 0, 832, 179, 1, 0, 0, 0, 833, 834, 3, 66, 27, 0, 834, 835, 1, 0, 0, 0, 835, 836, 6, 84, 12, 0, 836, 837, 6, 84, 13, 0, 837, 181, 1, 0, 0, 0, 838, 839, 3, 164, 76, 0, 839, 840, 1, 0, 0, 0, 840, 841, 6, 85, 10, 0, 841, 183, 1, 0, 0, 0, 842, 843, 3, 166, 77, 0, 843, 844, 1, 0, 0, 0, 844, 845, 6, 86, 14, 0, 845, 185, 1, 0, 0, 0, 846, 847, 3, 104, 46, 0, 847, 848, 1, 0, 0, 0, 848, 849, 6, 87, 15, 0, 849, 187, 1, 0, 0, 0, 850, 851, 3, 100, 44, 0, 851, 852, 1, 0, 0, 0, 852, 853, 6, 88, 16, 0, 853, 189, 1, 0, 0, 0, 854, 855, 3, 88, 38, 0, 855, 856, 1, 0, 0, 0, 856, 857, 6, 89, 17, 0, 857, 191, 1, 0, 0, 0, 858, 859, 5, 111, 0, 0, 859, 860, 5, 112, 0, 0, 860, 861, 5, 116, 0, 0, 861, 862, 5, 105, 0, 0, 862, 863, 5, 111, 0, 0, 863, 864, 5, 110, 0, 0, 864, 865, 5, 115, 0, 0, 865, 193, 1, 0, 0, 0, 866, 867, 5, 109, 0, 0, 867, 868, 5, 101, 0, 0, 868, 869, 5, 116, 0, 0, 869, 870, 5, 97, 0, 0, 870, 871, 5, 100, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 116, 0, 0, 873, 874, 5, 97, 0, 0, 874, 195, 1, 0, 0, 0, 875, 879, 8, 10, 0, 0, 876, 877, 5, 47, 0, 0, 877, 879, 8, 11, 0, 0, 878, 875, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 879, 197, 1, 0, 0, 0, 880, 882, 3, 196, 92, 0, 881, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 881, 1, 0, 0, 0, 883, 884, 1, 0, 0, 0, 884, 199, 1, 0, 0, 0, 885, 886, 3, 50, 19, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 94, 9, 0, 888, 201, 1, 0, 0, 0, 889, 890, 3, 52, 20, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 95, 9, 0, 892, 203, 1, 0, 0, 0, 893, 894, 3, 54, 21, 0, 894, 895, 1, 0, 0, 0, 895, 896, 6, 96, 9, 0, 896, 205, 1, 0, 0, 0, 897, 898, 3, 66, 27, 0, 898, 899, 1, 0, 0, 0, 899, 900, 6, 97, 12, 0, 900, 901, 6, 97, 13, 0, 901, 207, 1, 0, 0, 0, 902, 903, 3, 108, 48, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 98, 18, 0, 905, 209, 1, 0, 0, 0, 906, 907, 3, 104, 46, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 99, 15, 0, 909, 211, 1, 0, 0, 0, 910, 915, 3, 70, 29, 0, 911, 915, 3, 68, 28, 0, 912, 915, 3, 84, 36, 0, 913, 915, 3, 158, 73, 0, 914, 910, 1, 0, 0, 0, 914, 911, 1, 0, 0, 0, 914, 912, 1, 0, 0, 0, 914, 913, 1, 0, 0, 0, 915, 213, 1, 0, 0, 0, 916, 919, 3, 70, 29, 0, 917, 919, 3, 158, 73, 0, 918, 916, 1, 0, 0, 0, 918, 917, 1, 0, 0, 0, 919, 923, 1, 0, 0, 0, 920, 922, 3, 212, 100, 0, 921, 920, 1, 0, 0, 0, 922, 925, 1, 0, 0, 0, 923, 921, 1, 0, 0, 0, 923, 924, 1, 0, 0, 0, 924, 936, 1, 0, 0, 0, 925, 923, 1, 0, 0, 0, 926, 929, 3, 84, 36, 0, 927, 929, 3, 78, 33, 0, 928, 926, 1, 0, 0, 0, 928, 927, 1, 0, 0, 0, 929, 931, 1, 0, 0, 0, 930, 932, 3, 212, 100, 0, 931, 930, 1, 0, 0, 0, 932, 933, 1, 0, 0, 0, 933, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 936, 1, 0, 0, 0, 935, 918, 1, 0, 0, 0, 935, 928, 1, 0, 0, 0, 936, 215, 1, 0, 0, 0, 937, 940, 3, 214, 101, 0, 938, 940, 3, 170, 79, 0, 939, 937, 1, 0, 0, 0, 939, 938, 1, 0, 0, 0, 940, 941, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 942, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 944, 3, 50, 19, 0, 944, 945, 1, 0, 0, 0, 945, 946, 6, 103, 9, 0, 946, 219, 1, 0, 0, 0, 947, 948, 3, 52, 20, 0, 948, 949, 1, 0, 0, 0, 949, 950, 6, 104, 9, 0, 950, 221, 1, 0, 0, 0, 951, 952, 3, 54, 21, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 105, 9, 0, 954, 223, 1, 0, 0, 0, 955, 956, 3, 66, 27, 0, 956, 957, 1, 0, 0, 0, 957, 958, 6, 106, 12, 0, 958, 959, 6, 106, 13, 0, 959, 225, 1, 0, 0, 0, 960, 961, 3, 100, 44, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 107, 16, 0, 963, 227, 1, 0, 0, 0, 964, 965, 3, 104, 46, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 108, 15, 0, 967, 229, 1, 0, 0, 0, 968, 969, 3, 108, 48, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 109, 18, 0, 971, 231, 1, 0, 0, 0, 972, 973, 5, 97, 0, 0, 973, 974, 5, 115, 0, 0, 974, 233, 1, 0, 0, 0, 975, 976, 3, 216, 102, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 111, 19, 0, 978, 235, 1, 0, 0, 0, 979, 980, 3, 50, 19, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 112, 9, 0, 982, 237, 1, 0, 0, 0, 983, 984, 3, 52, 20, 0, 984, 985, 1, 0, 0, 0, 985, 986, 6, 113, 9, 0, 986, 239, 1, 0, 0, 0, 987, 988, 3, 54, 21, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 114, 9, 0, 990, 241, 1, 0, 0, 0, 991, 992, 3, 66, 27, 0, 992, 993, 1, 0, 0, 0, 993, 994, 6, 115, 12, 0, 994, 995, 6, 115, 13, 0, 995, 243, 1, 0, 0, 0, 996, 997, 3, 164, 76, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 116, 10, 0, 999, 1000, 6, 116, 20, 0, 1000, 245, 1, 0, 0, 0, 1001, 1002, 5, 111, 0, 0, 1002, 1003, 5, 110, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 21, 0, 1005, 247, 1, 0, 0, 0, 1006, 1007, 5, 119, 0, 0, 1007, 1008, 5, 105, 0, 0, 1008, 1009, 5, 116, 0, 0, 1009, 1010, 5, 104, 0, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 118, 21, 0, 1012, 249, 1, 0, 0, 0, 1013, 1014, 8, 12, 0, 0, 1014, 251, 1, 0, 0, 0, 1015, 1017, 3, 250, 119, 0, 1016, 1015, 1, 0, 0, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1016, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 3, 320, 154, 0, 1021, 1023, 1, 0, 0, 0, 1022, 1016, 1, 0, 0, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1025, 1, 0, 0, 0, 1024, 1026, 3, 250, 119, 0, 1025, 1024, 1, 0, 0, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1025, 1, 0, 0, 0, 1027, 1028, 1, 0, 0, 0, 1028, 253, 1, 0, 0, 0, 1029, 1030, 3, 172, 80, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 121, 22, 0, 1032, 255, 1, 0, 0, 0, 1033, 1034, 3, 252, 120, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 122, 23, 0, 1036, 257, 1, 0, 0, 0, 1037, 1038, 3, 50, 19, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 123, 9, 0, 1040, 259, 1, 0, 0, 0, 1041, 1042, 3, 52, 20, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1044, 6, 124, 9, 0, 1044, 261, 1, 0, 0, 0, 1045, 1046, 3, 54, 21, 0, 1046, 1047, 1, 0, 0, 0, 1047, 1048, 6, 125, 9, 0, 1048, 263, 1, 0, 0, 0, 1049, 1050, 3, 66, 27, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 126, 12, 0, 1052, 1053, 6, 126, 13, 0, 1053, 1054, 6, 126, 13, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 100, 44, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 16, 0, 1058, 267, 1, 0, 0, 0, 1059, 1060, 3, 104, 46, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1062, 6, 128, 15, 0, 1062, 269, 1, 0, 0, 0, 1063, 1064, 3, 108, 48, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 129, 18, 0, 1066, 271, 1, 0, 0, 0, 1067, 1068, 3, 248, 118, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 130, 24, 0, 1070, 273, 1, 0, 0, 0, 1071, 1072, 3, 216, 102, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 131, 19, 0, 1074, 275, 1, 0, 0, 0, 1075, 1076, 3, 172, 80, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 132, 22, 0, 1078, 277, 1, 0, 0, 0, 1079, 1080, 3, 50, 19, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 133, 9, 0, 1082, 279, 1, 0, 0, 0, 1083, 1084, 3, 52, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 134, 9, 0, 1086, 281, 1, 0, 0, 0, 1087, 1088, 3, 54, 21, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 135, 9, 0, 1090, 283, 1, 0, 0, 0, 1091, 1092, 3, 66, 27, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 136, 12, 0, 1094, 1095, 6, 136, 13, 0, 1095, 285, 1, 0, 0, 0, 1096, 1097, 3, 108, 48, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 137, 18, 0, 1099, 287, 1, 0, 0, 0, 1100, 1101, 3, 172, 80, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 138, 22, 0, 1103, 289, 1, 0, 0, 0, 1104, 1105, 3, 168, 78, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 139, 25, 0, 1107, 291, 1, 0, 0, 0, 1108, 1109, 3, 50, 19, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 140, 9, 0, 1111, 293, 1, 0, 0, 0, 1112, 1113, 3, 52, 20, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1115, 6, 141, 9, 0, 1115, 295, 1, 0, 0, 0, 1116, 1117, 3, 54, 21, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 142, 9, 0, 1119, 297, 1, 0, 0, 0, 1120, 1121, 3, 66, 27, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 143, 12, 0, 1123, 1124, 6, 143, 13, 0, 1124, 299, 1, 0, 0, 0, 1125, 1126, 5, 105, 0, 0, 1126, 1127, 5, 110, 0, 0, 1127, 1128, 5, 102, 0, 0, 1128, 1129, 5, 111, 0, 0, 1129, 301, 1, 0, 0, 0, 1130, 1131, 3, 50, 19, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1133, 6, 145, 9, 0, 1133, 303, 1, 0, 0, 0, 1134, 1135, 3, 52, 20, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1137, 6, 146, 9, 0, 1137, 305, 1, 0, 0, 0, 1138, 1139, 3, 54, 21, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 147, 9, 0, 1141, 307, 1, 0, 0, 0, 1142, 1143, 3, 66, 27, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 148, 12, 0, 1145, 1146, 6, 148, 13, 0, 1146, 309, 1, 0, 0, 0, 1147, 1148, 5, 102, 0, 0, 1148, 1149, 5, 117, 0, 0, 1149, 1150, 5, 110, 0, 0, 1150, 1151, 5, 99, 0, 0, 1151, 1152, 5, 116, 0, 0, 1152, 1153, 5, 105, 0, 0, 1153, 1154, 5, 111, 0, 0, 1154, 1155, 5, 110, 0, 0, 1155, 1156, 5, 115, 0, 0, 1156, 311, 1, 0, 0, 0, 1157, 1158, 3, 50, 19, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 150, 9, 0, 1160, 313, 1, 0, 0, 0, 1161, 1162, 3, 52, 20, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1164, 6, 151, 9, 0, 1164, 315, 1, 0, 0, 0, 1165, 1166, 3, 54, 21, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 152, 9, 0, 1168, 317, 1, 0, 0, 0, 1169, 1170, 3, 166, 77, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 153, 14, 0, 1172, 1173, 6, 153, 13, 0, 1173, 319, 1, 0, 0, 0, 1174, 1175, 5, 58, 0, 0, 1175, 321, 1, 0, 0, 0, 1176, 1182, 3, 78, 33, 0, 1177, 1182, 3, 68, 28, 0, 1178, 1182, 3, 108, 48, 0, 1179, 1182, 3, 70, 29, 0, 1180, 1182, 3, 84, 36, 0, 1181, 1176, 1, 0, 0, 0, 1181, 1177, 1, 0, 0, 0, 1181, 1178, 1, 0, 0, 0, 1181, 1179, 1, 0, 0, 0, 1181, 1180, 1, 0, 0, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1181, 1, 0, 0, 0, 1183, 1184, 1, 0, 0, 0, 1184, 323, 1, 0, 0, 0, 1185, 1186, 3, 50, 19, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 156, 9, 0, 1188, 325, 1, 0, 0, 0, 1189, 1190, 3, 52, 20, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 157, 9, 0, 1192, 327, 1, 0, 0, 0, 1193, 1194, 3, 54, 21, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 158, 9, 0, 1196, 329, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 483, 493, 497, 500, 509, 511, 522, 563, 568, 577, 584, 589, 591, 602, 610, 613, 615, 620, 625, 631, 638, 643, 649, 652, 660, 664, 797, 802, 807, 809, 815, 878, 883, 914, 918, 923, 928, 933, 935, 939, 941, 1018, 1022, 1027, 1181, 1183, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 68, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index ac3354d0aa907..75fa8061fa48b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -69,11 +69,11 @@ private static String[] makeRuleNames() { "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "FROM_QUOTED_STRING", "OPTIONS", "METADATA", - "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", - "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", - "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", - "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", + "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", + "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", + "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", @@ -191,7 +191,7 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000n\u04b3\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000n\u04ad\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ @@ -234,720 +234,716 @@ public EsqlBaseLexer(CharStream input) { "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ - "\u0002\u009f\u0007\u009f\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ + "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01e4\b\u0012\u000b\u0012\f"+ - "\u0012\u01e5\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0005\u0013\u01ee\b\u0013\n\u0013\f\u0013\u01f1\t\u0013\u0001"+ - "\u0013\u0003\u0013\u01f4\b\u0013\u0001\u0013\u0003\u0013\u01f7\b\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u0200\b\u0014\n\u0014\f\u0014\u0203\t\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ - "\u0015\u020b\b\u0015\u000b\u0015\f\u0015\u020c\u0001\u0015\u0001\u0015"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ - " \u0003 \u0236\b \u0001 \u0004 \u0239\b \u000b \f \u023a\u0001!\u0001"+ - "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0244\b#\u0001$\u0001$\u0001"+ - "%\u0001%\u0001%\u0003%\u024b\b%\u0001&\u0001&\u0001&\u0005&\u0250\b&\n"+ - "&\f&\u0253\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u025b\b"+ - "&\n&\f&\u025e\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0265\b&\u0001"+ - "&\u0003&\u0268\b&\u0003&\u026a\b&\u0001\'\u0004\'\u026d\b\'\u000b\'\f"+ - "\'\u026e\u0001(\u0004(\u0272\b(\u000b(\f(\u0273\u0001(\u0001(\u0005(\u0278"+ - "\b(\n(\f(\u027b\t(\u0001(\u0001(\u0004(\u027f\b(\u000b(\f(\u0280\u0001"+ - "(\u0004(\u0284\b(\u000b(\f(\u0285\u0001(\u0001(\u0005(\u028a\b(\n(\f("+ - "\u028d\t(\u0003(\u028f\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0295\b("+ - "\u000b(\f(\u0296\u0001(\u0001(\u0003(\u029b\b(\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - "-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001"+ - "0\u00010\u00011\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "2\u00012\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u0001"+ - "4\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ - "9\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001"+ - "<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ - "E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ - "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ - "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005N\u031e\bN\nN\fN\u0321\tN\u0001"+ - "N\u0001N\u0003N\u0325\bN\u0001N\u0004N\u0328\bN\u000bN\fN\u0329\u0003"+ - "N\u032c\bN\u0001O\u0001O\u0004O\u0330\bO\u000bO\fO\u0331\u0001O\u0001"+ - "O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ - "R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ - "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001"+ - "[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003"+ - "\\\u0371\b\\\u0001]\u0004]\u0374\b]\u000b]\f]\u0375\u0001^\u0001^\u0001"+ - "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ - "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ - "e\u0003e\u0399\be\u0001f\u0001f\u0003f\u039d\bf\u0001f\u0005f\u03a0\b"+ - "f\nf\ff\u03a3\tf\u0001f\u0001f\u0003f\u03a7\bf\u0001f\u0004f\u03aa\bf"+ - "\u000bf\ff\u03ab\u0003f\u03ae\bf\u0001g\u0001g\u0004g\u03b2\bg\u000bg"+ - "\fg\u03b3\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001"+ - "j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ - "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ - "n\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001"+ - "q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001"+ - "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ - "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001y\u0004y\u03ff\by\u000by\fy\u0400\u0001y\u0001"+ - "y\u0003y\u0405\by\u0001y\u0004y\u0408\by\u000by\fy\u0409\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ - "|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f"+ - "\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080"+ - "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081"+ - "\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083"+ - "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084"+ - "\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086"+ - "\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087"+ - "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a"+ - "\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097"+ - "\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098"+ - "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ - "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ - "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0004\u009c"+ - "\u04a4\b\u009c\u000b\u009c\f\u009c\u04a5\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0002\u0201\u025c\u0000\u00a0"+ - "\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014\u0005\u0016\u0006\u0018"+ - "\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011"+ - ".\u00120\u00132\u00144\u00156\u00168\u0000:\u0000<\u0017>\u0018@\u0019"+ - "B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000"+ - "V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+"+ - "z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6\u00907"+ - "\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4"+ - "A\u00a6B\u00a8C\u00aa\u0000\u00acD\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6"+ - "\u0000\u00b8\u0000\u00ba\u0000\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4"+ - "\u0000\u00c6J\u00c8\u0000\u00caK\u00ccL\u00ceM\u00d0\u0000\u00d2\u0000"+ - "\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00daN\u00dcO\u00deP\u00e0Q\u00e2"+ - "\u0000\u00e4\u0000\u00e6\u0000\u00e8\u0000\u00eaR\u00ec\u0000\u00eeS\u00f0"+ - "T\u00f2U\u00f4\u0000\u00f6\u0000\u00f8V\u00faW\u00fc\u0000\u00feX\u0100"+ - "\u0000\u0102\u0000\u0104Y\u0106Z\u0108[\u010a\u0000\u010c\u0000\u010e"+ - "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116\u0000\u0118\\\u011a]"+ - "\u011c^\u011e\u0000\u0120\u0000\u0122\u0000\u0124\u0000\u0126_\u0128`"+ - "\u012aa\u012c\u0000\u012eb\u0130c\u0132d\u0134e\u0136\u0000\u0138f\u013a"+ - "g\u013ch\u013ei\u0140\u0000\u0142j\u0144k\u0146l\u0148m\u014an\f\u0000"+ - "\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t"+ - "\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u00000"+ - "9\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\"+ - "\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,/"+ - "/==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04ce"+ - "\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ - "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ - "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ - "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ - "\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000"+ - " \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001"+ - "\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000"+ - "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ - ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ - "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ - "\u0000\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001"+ - "<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001"+ - "\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000"+ - "\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000"+ - "\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b"+ - "\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000"+ - "\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000"+ - "\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p"+ - "\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000"+ - "\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000"+ - "\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~"+ - "\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082"+ - "\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086"+ - "\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a"+ - "\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e"+ - "\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092"+ - "\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096"+ - "\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a"+ - "\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e"+ - "\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2"+ - "\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6"+ - "\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000\u0000\u0002\u00ac"+ - "\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0"+ - "\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4"+ - "\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8"+ - "\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc"+ - "\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000\u0000\u0003\u00c0"+ - "\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000\u0000\u0003\u00c6"+ - "\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca"+ - "\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce"+ - "\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000\u0000\u0004\u00d2"+ - "\u0001\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00da"+ - "\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de"+ - "\u0001\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2"+ - "\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6"+ - "\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea"+ - "\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee"+ - "\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2"+ - "\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f6"+ - "\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa"+ - "\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100"+ - "\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104"+ - "\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108"+ - "\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c"+ - "\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110"+ - "\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114"+ - "\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118"+ - "\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c"+ - "\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001"+ - "\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000"+ - "\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ - "\u0000\b\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000"+ - "\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ - "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001"+ - "\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ - "\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\n\u013e\u0001\u0000\u0000"+ - "\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000"+ - "\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000"+ - "\u0000\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000"+ - "\u0000\f\u014c\u0001\u0000\u0000\u0000\u000e\u0156\u0001\u0000\u0000\u0000"+ - "\u0010\u015d\u0001\u0000\u0000\u0000\u0012\u0166\u0001\u0000\u0000\u0000"+ - "\u0014\u016d\u0001\u0000\u0000\u0000\u0016\u0177\u0001\u0000\u0000\u0000"+ - "\u0018\u017e\u0001\u0000\u0000\u0000\u001a\u0185\u0001\u0000\u0000\u0000"+ - "\u001c\u0193\u0001\u0000\u0000\u0000\u001e\u019a\u0001\u0000\u0000\u0000"+ - " \u01a2\u0001\u0000\u0000\u0000\"\u01a9\u0001\u0000\u0000\u0000$\u01b5"+ - "\u0001\u0000\u0000\u0000&\u01be\u0001\u0000\u0000\u0000(\u01c4\u0001\u0000"+ - "\u0000\u0000*\u01cb\u0001\u0000\u0000\u0000,\u01d2\u0001\u0000\u0000\u0000"+ - ".\u01da\u0001\u0000\u0000\u00000\u01e3\u0001\u0000\u0000\u00002\u01e9"+ - "\u0001\u0000\u0000\u00004\u01fa\u0001\u0000\u0000\u00006\u020a\u0001\u0000"+ - "\u0000\u00008\u0210\u0001\u0000\u0000\u0000:\u0215\u0001\u0000\u0000\u0000"+ - "<\u021a\u0001\u0000\u0000\u0000>\u021e\u0001\u0000\u0000\u0000@\u0222"+ - "\u0001\u0000\u0000\u0000B\u0226\u0001\u0000\u0000\u0000D\u022a\u0001\u0000"+ - "\u0000\u0000F\u022c\u0001\u0000\u0000\u0000H\u022e\u0001\u0000\u0000\u0000"+ - "J\u0231\u0001\u0000\u0000\u0000L\u0233\u0001\u0000\u0000\u0000N\u023c"+ - "\u0001\u0000\u0000\u0000P\u023e\u0001\u0000\u0000\u0000R\u0243\u0001\u0000"+ - "\u0000\u0000T\u0245\u0001\u0000\u0000\u0000V\u024a\u0001\u0000\u0000\u0000"+ - "X\u0269\u0001\u0000\u0000\u0000Z\u026c\u0001\u0000\u0000\u0000\\\u029a"+ - "\u0001\u0000\u0000\u0000^\u029c\u0001\u0000\u0000\u0000`\u029f\u0001\u0000"+ - "\u0000\u0000b\u02a3\u0001\u0000\u0000\u0000d\u02a7\u0001\u0000\u0000\u0000"+ - "f\u02a9\u0001\u0000\u0000\u0000h\u02ac\u0001\u0000\u0000\u0000j\u02ae"+ - "\u0001\u0000\u0000\u0000l\u02b3\u0001\u0000\u0000\u0000n\u02b5\u0001\u0000"+ - "\u0000\u0000p\u02bb\u0001\u0000\u0000\u0000r\u02c1\u0001\u0000\u0000\u0000"+ - "t\u02c6\u0001\u0000\u0000\u0000v\u02c8\u0001\u0000\u0000\u0000x\u02cb"+ - "\u0001\u0000\u0000\u0000z\u02ce\u0001\u0000\u0000\u0000|\u02d3\u0001\u0000"+ - "\u0000\u0000~\u02d7\u0001\u0000\u0000\u0000\u0080\u02dc\u0001\u0000\u0000"+ - "\u0000\u0082\u02e2\u0001\u0000\u0000\u0000\u0084\u02e5\u0001\u0000\u0000"+ - "\u0000\u0086\u02e7\u0001\u0000\u0000\u0000\u0088\u02ed\u0001\u0000\u0000"+ - "\u0000\u008a\u02ef\u0001\u0000\u0000\u0000\u008c\u02f4\u0001\u0000\u0000"+ - "\u0000\u008e\u02f7\u0001\u0000\u0000\u0000\u0090\u02fa\u0001\u0000\u0000"+ - "\u0000\u0092\u02fd\u0001\u0000\u0000\u0000\u0094\u02ff\u0001\u0000\u0000"+ - "\u0000\u0096\u0302\u0001\u0000\u0000\u0000\u0098\u0304\u0001\u0000\u0000"+ - "\u0000\u009a\u0307\u0001\u0000\u0000\u0000\u009c\u0309\u0001\u0000\u0000"+ - "\u0000\u009e\u030b\u0001\u0000\u0000\u0000\u00a0\u030d\u0001\u0000\u0000"+ - "\u0000\u00a2\u030f\u0001\u0000\u0000\u0000\u00a4\u0311\u0001\u0000\u0000"+ - "\u0000\u00a6\u0316\u0001\u0000\u0000\u0000\u00a8\u032b\u0001\u0000\u0000"+ - "\u0000\u00aa\u032d\u0001\u0000\u0000\u0000\u00ac\u0335\u0001\u0000\u0000"+ - "\u0000\u00ae\u0337\u0001\u0000\u0000\u0000\u00b0\u033b\u0001\u0000\u0000"+ - "\u0000\u00b2\u033f\u0001\u0000\u0000\u0000\u00b4\u0343\u0001\u0000\u0000"+ - "\u0000\u00b6\u0348\u0001\u0000\u0000\u0000\u00b8\u034c\u0001\u0000\u0000"+ - "\u0000\u00ba\u0350\u0001\u0000\u0000\u0000\u00bc\u0354\u0001\u0000\u0000"+ - "\u0000\u00be\u0358\u0001\u0000\u0000\u0000\u00c0\u035c\u0001\u0000\u0000"+ - "\u0000\u00c2\u0364\u0001\u0000\u0000\u0000\u00c4\u0370\u0001\u0000\u0000"+ - "\u0000\u00c6\u0373\u0001\u0000\u0000\u0000\u00c8\u0377\u0001\u0000\u0000"+ - "\u0000\u00ca\u037b\u0001\u0000\u0000\u0000\u00cc\u037f\u0001\u0000\u0000"+ - "\u0000\u00ce\u0383\u0001\u0000\u0000\u0000\u00d0\u0387\u0001\u0000\u0000"+ - "\u0000\u00d2\u038c\u0001\u0000\u0000\u0000\u00d4\u0390\u0001\u0000\u0000"+ - "\u0000\u00d6\u0398\u0001\u0000\u0000\u0000\u00d8\u03ad\u0001\u0000\u0000"+ - "\u0000\u00da\u03b1\u0001\u0000\u0000\u0000\u00dc\u03b5\u0001\u0000\u0000"+ - "\u0000\u00de\u03b9\u0001\u0000\u0000\u0000\u00e0\u03bd\u0001\u0000\u0000"+ - "\u0000\u00e2\u03c1\u0001\u0000\u0000\u0000\u00e4\u03c6\u0001\u0000\u0000"+ - "\u0000\u00e6\u03ca\u0001\u0000\u0000\u0000\u00e8\u03ce\u0001\u0000\u0000"+ - "\u0000\u00ea\u03d2\u0001\u0000\u0000\u0000\u00ec\u03d5\u0001\u0000\u0000"+ - "\u0000\u00ee\u03d9\u0001\u0000\u0000\u0000\u00f0\u03dd\u0001\u0000\u0000"+ - "\u0000\u00f2\u03e1\u0001\u0000\u0000\u0000\u00f4\u03e5\u0001\u0000\u0000"+ - "\u0000\u00f6\u03ea\u0001\u0000\u0000\u0000\u00f8\u03ef\u0001\u0000\u0000"+ - "\u0000\u00fa\u03f4\u0001\u0000\u0000\u0000\u00fc\u03fb\u0001\u0000\u0000"+ - "\u0000\u00fe\u0404\u0001\u0000\u0000\u0000\u0100\u040b\u0001\u0000\u0000"+ - "\u0000\u0102\u040f\u0001\u0000\u0000\u0000\u0104\u0413\u0001\u0000\u0000"+ - "\u0000\u0106\u0417\u0001\u0000\u0000\u0000\u0108\u041b\u0001\u0000\u0000"+ - "\u0000\u010a\u041f\u0001\u0000\u0000\u0000\u010c\u0425\u0001\u0000\u0000"+ - "\u0000\u010e\u0429\u0001\u0000\u0000\u0000\u0110\u042d\u0001\u0000\u0000"+ - "\u0000\u0112\u0431\u0001\u0000\u0000\u0000\u0114\u0435\u0001\u0000\u0000"+ - "\u0000\u0116\u0439\u0001\u0000\u0000\u0000\u0118\u043d\u0001\u0000\u0000"+ - "\u0000\u011a\u0441\u0001\u0000\u0000\u0000\u011c\u0445\u0001\u0000\u0000"+ - "\u0000\u011e\u0449\u0001\u0000\u0000\u0000\u0120\u044e\u0001\u0000\u0000"+ - "\u0000\u0122\u0452\u0001\u0000\u0000\u0000\u0124\u0456\u0001\u0000\u0000"+ - "\u0000\u0126\u045a\u0001\u0000\u0000\u0000\u0128\u045e\u0001\u0000\u0000"+ - "\u0000\u012a\u0462\u0001\u0000\u0000\u0000\u012c\u0466\u0001\u0000\u0000"+ - "\u0000\u012e\u046b\u0001\u0000\u0000\u0000\u0130\u0470\u0001\u0000\u0000"+ - "\u0000\u0132\u0474\u0001\u0000\u0000\u0000\u0134\u0478\u0001\u0000\u0000"+ - "\u0000\u0136\u047c\u0001\u0000\u0000\u0000\u0138\u0481\u0001\u0000\u0000"+ - "\u0000\u013a\u048b\u0001\u0000\u0000\u0000\u013c\u048f\u0001\u0000\u0000"+ - "\u0000\u013e\u0493\u0001\u0000\u0000\u0000\u0140\u0497\u0001\u0000\u0000"+ - "\u0000\u0142\u049c\u0001\u0000\u0000\u0000\u0144\u04a3\u0001\u0000\u0000"+ - "\u0000\u0146\u04a7\u0001\u0000\u0000\u0000\u0148\u04ab\u0001\u0000\u0000"+ - "\u0000\u014a\u04af\u0001\u0000\u0000\u0000\u014c\u014d\u0005d\u0000\u0000"+ - "\u014d\u014e\u0005i\u0000\u0000\u014e\u014f\u0005s\u0000\u0000\u014f\u0150"+ - "\u0005s\u0000\u0000\u0150\u0151\u0005e\u0000\u0000\u0151\u0152\u0005c"+ - "\u0000\u0000\u0152\u0153\u0005t\u0000\u0000\u0153\u0154\u0001\u0000\u0000"+ - "\u0000\u0154\u0155\u0006\u0000\u0000\u0000\u0155\r\u0001\u0000\u0000\u0000"+ - "\u0156\u0157\u0005d\u0000\u0000\u0157\u0158\u0005r\u0000\u0000\u0158\u0159"+ - "\u0005o\u0000\u0000\u0159\u015a\u0005p\u0000\u0000\u015a\u015b\u0001\u0000"+ - "\u0000\u0000\u015b\u015c\u0006\u0001\u0001\u0000\u015c\u000f\u0001\u0000"+ - "\u0000\u0000\u015d\u015e\u0005e\u0000\u0000\u015e\u015f\u0005n\u0000\u0000"+ - "\u015f\u0160\u0005r\u0000\u0000\u0160\u0161\u0005i\u0000\u0000\u0161\u0162"+ - "\u0005c\u0000\u0000\u0162\u0163\u0005h\u0000\u0000\u0163\u0164\u0001\u0000"+ - "\u0000\u0000\u0164\u0165\u0006\u0002\u0002\u0000\u0165\u0011\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005v\u0000\u0000"+ - "\u0168\u0169\u0005a\u0000\u0000\u0169\u016a\u0005l\u0000\u0000\u016a\u016b"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0006\u0003\u0000\u0000\u016c\u0013"+ - "\u0001\u0000\u0000\u0000\u016d\u016e\u0005e\u0000\u0000\u016e\u016f\u0005"+ - "x\u0000\u0000\u016f\u0170\u0005p\u0000\u0000\u0170\u0171\u0005l\u0000"+ - "\u0000\u0171\u0172\u0005a\u0000\u0000\u0172\u0173\u0005i\u0000\u0000\u0173"+ - "\u0174\u0005n\u0000\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175\u0176"+ - "\u0006\u0004\u0003\u0000\u0176\u0015\u0001\u0000\u0000\u0000\u0177\u0178"+ - "\u0005f\u0000\u0000\u0178\u0179\u0005r\u0000\u0000\u0179\u017a\u0005o"+ - "\u0000\u0000\u017a\u017b\u0005m\u0000\u0000\u017b\u017c\u0001\u0000\u0000"+ - "\u0000\u017c\u017d\u0006\u0005\u0004\u0000\u017d\u0017\u0001\u0000\u0000"+ - "\u0000\u017e\u017f\u0005g\u0000\u0000\u017f\u0180\u0005r\u0000\u0000\u0180"+ - "\u0181\u0005o\u0000\u0000\u0181\u0182\u0005k\u0000\u0000\u0182\u0183\u0001"+ - "\u0000\u0000\u0000\u0183\u0184\u0006\u0006\u0000\u0000\u0184\u0019\u0001"+ - "\u0000\u0000\u0000\u0185\u0186\u0005i\u0000\u0000\u0186\u0187\u0005n\u0000"+ - "\u0000\u0187\u0188\u0005l\u0000\u0000\u0188\u0189\u0005i\u0000\u0000\u0189"+ - "\u018a\u0005n\u0000\u0000\u018a\u018b\u0005e\u0000\u0000\u018b\u018c\u0005"+ - "s\u0000\u0000\u018c\u018d\u0005t\u0000\u0000\u018d\u018e\u0005a\u0000"+ - "\u0000\u018e\u018f\u0005t\u0000\u0000\u018f\u0190\u0005s\u0000\u0000\u0190"+ - "\u0191\u0001\u0000\u0000\u0000\u0191\u0192\u0006\u0007\u0000\u0000\u0192"+ - "\u001b\u0001\u0000\u0000\u0000\u0193\u0194\u0005k\u0000\u0000\u0194\u0195"+ - "\u0005e\u0000\u0000\u0195\u0196\u0005e\u0000\u0000\u0196\u0197\u0005p"+ - "\u0000\u0000\u0197\u0198\u0001\u0000\u0000\u0000\u0198\u0199\u0006\b\u0001"+ - "\u0000\u0199\u001d\u0001\u0000\u0000\u0000\u019a\u019b\u0005l\u0000\u0000"+ - "\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005m\u0000\u0000\u019d\u019e"+ - "\u0005i\u0000\u0000\u019e\u019f\u0005t\u0000\u0000\u019f\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a0\u01a1\u0006\t\u0000\u0000\u01a1\u001f\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a3\u0005m\u0000\u0000\u01a3\u01a4\u0005e\u0000\u0000\u01a4"+ - "\u01a5\u0005t\u0000\u0000\u01a5\u01a6\u0005a\u0000\u0000\u01a6\u01a7\u0001"+ - "\u0000\u0000\u0000\u01a7\u01a8\u0006\n\u0005\u0000\u01a8!\u0001\u0000"+ - "\u0000\u0000\u01a9\u01aa\u0005m\u0000\u0000\u01aa\u01ab\u0005v\u0000\u0000"+ - "\u01ab\u01ac\u0005_\u0000\u0000\u01ac\u01ad\u0005e\u0000\u0000\u01ad\u01ae"+ - "\u0005x\u0000\u0000\u01ae\u01af\u0005p\u0000\u0000\u01af\u01b0\u0005a"+ - "\u0000\u0000\u01b0\u01b1\u0005n\u0000\u0000\u01b1\u01b2\u0005d\u0000\u0000"+ - "\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u000b\u0006\u0000"+ - "\u01b4#\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005r\u0000\u0000\u01b6\u01b7"+ - "\u0005e\u0000\u0000\u01b7\u01b8\u0005n\u0000\u0000\u01b8\u01b9\u0005a"+ - "\u0000\u0000\u01b9\u01ba\u0005m\u0000\u0000\u01ba\u01bb\u0005e\u0000\u0000"+ - "\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\f\u0007\u0000\u01bd"+ - "%\u0001\u0000\u0000\u0000\u01be\u01bf\u0005r\u0000\u0000\u01bf\u01c0\u0005"+ - "o\u0000\u0000\u01c0\u01c1\u0005w\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000"+ - "\u0000\u01c2\u01c3\u0006\r\u0000\u0000\u01c3\'\u0001\u0000\u0000\u0000"+ - "\u01c4\u01c5\u0005s\u0000\u0000\u01c5\u01c6\u0005h\u0000\u0000\u01c6\u01c7"+ - "\u0005o\u0000\u0000\u01c7\u01c8\u0005w\u0000\u0000\u01c8\u01c9\u0001\u0000"+ - "\u0000\u0000\u01c9\u01ca\u0006\u000e\b\u0000\u01ca)\u0001\u0000\u0000"+ - "\u0000\u01cb\u01cc\u0005s\u0000\u0000\u01cc\u01cd\u0005o\u0000\u0000\u01cd"+ - "\u01ce\u0005r\u0000\u0000\u01ce\u01cf\u0005t\u0000\u0000\u01cf\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d1\u0006\u000f\u0000\u0000\u01d1+\u0001\u0000"+ - "\u0000\u0000\u01d2\u01d3\u0005s\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000"+ - "\u01d4\u01d5\u0005a\u0000\u0000\u01d5\u01d6\u0005t\u0000\u0000\u01d6\u01d7"+ - "\u0005s\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d9\u0006"+ - "\u0010\u0000\u0000\u01d9-\u0001\u0000\u0000\u0000\u01da\u01db\u0005w\u0000"+ - "\u0000\u01db\u01dc\u0005h\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd"+ - "\u01de\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0001"+ - "\u0000\u0000\u0000\u01e0\u01e1\u0006\u0011\u0000\u0000\u01e1/\u0001\u0000"+ - "\u0000\u0000\u01e2\u01e4\b\u0000\u0000\u0000\u01e3\u01e2\u0001\u0000\u0000"+ - "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000"+ - "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000"+ - "\u0000\u01e7\u01e8\u0006\u0012\u0000\u0000\u01e81\u0001\u0000\u0000\u0000"+ - "\u01e9\u01ea\u0005/\u0000\u0000\u01ea\u01eb\u0005/\u0000\u0000\u01eb\u01ef"+ - "\u0001\u0000\u0000\u0000\u01ec\u01ee\b\u0001\u0000\u0000\u01ed\u01ec\u0001"+ - "\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001"+ - "\u0000\u0000\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001"+ - "\u0000\u0000\u0000\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f2\u01f4\u0005"+ - "\r\u0000\u0000\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000"+ - "\u0000\u0000\u01f4\u01f6\u0001\u0000\u0000\u0000\u01f5\u01f7\u0005\n\u0000"+ - "\u0000\u01f6\u01f5\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000"+ - "\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\u0013\t\u0000"+ - "\u01f93\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005/\u0000\u0000\u01fb\u01fc"+ - "\u0005*\u0000\u0000\u01fc\u0201\u0001\u0000\u0000\u0000\u01fd\u0200\u0003"+ - "4\u0014\u0000\u01fe\u0200\t\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000"+ - "\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000"+ - "\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000"+ - "\u0000\u0000\u0202\u0204\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000"+ - "\u0000\u0000\u0204\u0205\u0005*\u0000\u0000\u0205\u0206\u0005/\u0000\u0000"+ - "\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0006\u0014\t\u0000\u0208"+ - "5\u0001\u0000\u0000\u0000\u0209\u020b\u0007\u0002\u0000\u0000\u020a\u0209"+ - "\u0001\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020a"+ - "\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000\u0000\u0000\u020d\u020e"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0006\u0015\t\u0000\u020f7\u0001"+ - "\u0000\u0000\u0000\u0210\u0211\u0003\u00a4L\u0000\u0211\u0212\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0006\u0016\n\u0000\u0213\u0214\u0006\u0016\u000b"+ - "\u0000\u02149\u0001\u0000\u0000\u0000\u0215\u0216\u0003B\u001b\u0000\u0216"+ - "\u0217\u0001\u0000\u0000\u0000\u0217\u0218\u0006\u0017\f\u0000\u0218\u0219"+ - "\u0006\u0017\r\u0000\u0219;\u0001\u0000\u0000\u0000\u021a\u021b\u0003"+ - "6\u0015\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021d\u0006\u0018"+ - "\t\u0000\u021d=\u0001\u0000\u0000\u0000\u021e\u021f\u00032\u0013\u0000"+ - "\u021f\u0220\u0001\u0000\u0000\u0000\u0220\u0221\u0006\u0019\t\u0000\u0221"+ - "?\u0001\u0000\u0000\u0000\u0222\u0223\u00034\u0014\u0000\u0223\u0224\u0001"+ - "\u0000\u0000\u0000\u0224\u0225\u0006\u001a\t\u0000\u0225A\u0001\u0000"+ - "\u0000\u0000\u0226\u0227\u0005|\u0000\u0000\u0227\u0228\u0001\u0000\u0000"+ - "\u0000\u0228\u0229\u0006\u001b\r\u0000\u0229C\u0001\u0000\u0000\u0000"+ - "\u022a\u022b\u0007\u0003\u0000\u0000\u022bE\u0001\u0000\u0000\u0000\u022c"+ - "\u022d\u0007\u0004\u0000\u0000\u022dG\u0001\u0000\u0000\u0000\u022e\u022f"+ - "\u0005\\\u0000\u0000\u022f\u0230\u0007\u0005\u0000\u0000\u0230I\u0001"+ - "\u0000\u0000\u0000\u0231\u0232\b\u0006\u0000\u0000\u0232K\u0001\u0000"+ - "\u0000\u0000\u0233\u0235\u0007\u0007\u0000\u0000\u0234\u0236\u0007\b\u0000"+ - "\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000"+ - "\u0000\u0236\u0238\u0001\u0000\u0000\u0000\u0237\u0239\u0003D\u001c\u0000"+ - "\u0238\u0237\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000"+ - "\u023a\u0238\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000"+ - "\u023bM\u0001\u0000\u0000\u0000\u023c\u023d\u0005@\u0000\u0000\u023dO"+ - "\u0001\u0000\u0000\u0000\u023e\u023f\u0005`\u0000\u0000\u023fQ\u0001\u0000"+ - "\u0000\u0000\u0240\u0244\b\t\u0000\u0000\u0241\u0242\u0005`\u0000\u0000"+ - "\u0242\u0244\u0005`\u0000\u0000\u0243\u0240\u0001\u0000\u0000\u0000\u0243"+ - "\u0241\u0001\u0000\u0000\u0000\u0244S\u0001\u0000\u0000\u0000\u0245\u0246"+ - "\u0005_\u0000\u0000\u0246U\u0001\u0000\u0000\u0000\u0247\u024b\u0003F"+ - "\u001d\u0000\u0248\u024b\u0003D\u001c\u0000\u0249\u024b\u0003T$\u0000"+ - "\u024a\u0247\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000\u0000\u0000"+ - "\u024a\u0249\u0001\u0000\u0000\u0000\u024bW\u0001\u0000\u0000\u0000\u024c"+ - "\u0251\u0005\"\u0000\u0000\u024d\u0250\u0003H\u001e\u0000\u024e\u0250"+ - "\u0003J\u001f\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u024f\u024e\u0001"+ - "\u0000\u0000\u0000\u0250\u0253\u0001\u0000\u0000\u0000\u0251\u024f\u0001"+ - "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0254\u0001"+ - "\u0000\u0000\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0254\u026a\u0005"+ - "\"\u0000\u0000\u0255\u0256\u0005\"\u0000\u0000\u0256\u0257\u0005\"\u0000"+ - "\u0000\u0257\u0258\u0005\"\u0000\u0000\u0258\u025c\u0001\u0000\u0000\u0000"+ - "\u0259\u025b\b\u0001\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025b"+ - "\u025e\u0001\u0000\u0000\u0000\u025c\u025d\u0001\u0000\u0000\u0000\u025c"+ - "\u025a\u0001\u0000\u0000\u0000\u025d\u025f\u0001\u0000\u0000\u0000\u025e"+ - "\u025c\u0001\u0000\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0261"+ - "\u0005\"\u0000\u0000\u0261\u0262\u0005\"\u0000\u0000\u0262\u0264\u0001"+ - "\u0000\u0000\u0000\u0263\u0265\u0005\"\u0000\u0000\u0264\u0263\u0001\u0000"+ - "\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0267\u0001\u0000"+ - "\u0000\u0000\u0266\u0268\u0005\"\u0000\u0000\u0267\u0266\u0001\u0000\u0000"+ - "\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000"+ - "\u0000\u0269\u024c\u0001\u0000\u0000\u0000\u0269\u0255\u0001\u0000\u0000"+ - "\u0000\u026aY\u0001\u0000\u0000\u0000\u026b\u026d\u0003D\u001c\u0000\u026c"+ - "\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e"+ - "\u026c\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f"+ - "[\u0001\u0000\u0000\u0000\u0270\u0272\u0003D\u001c\u0000\u0271\u0270\u0001"+ - "\u0000\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001"+ - "\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ - "\u0000\u0000\u0000\u0275\u0279\u0003l0\u0000\u0276\u0278\u0003D\u001c"+ - "\u0000\u0277\u0276\u0001\u0000\u0000\u0000\u0278\u027b\u0001\u0000\u0000"+ - "\u0000\u0279\u0277\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000"+ - "\u0000\u027a\u029b\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000"+ - "\u0000\u027c\u027e\u0003l0\u0000\u027d\u027f\u0003D\u001c\u0000\u027e"+ - "\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280"+ - "\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000\u0000\u0000\u0281"+ - "\u029b\u0001\u0000\u0000\u0000\u0282\u0284\u0003D\u001c\u0000\u0283\u0282"+ - "\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285\u0283"+ - "\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286\u028e"+ - "\u0001\u0000\u0000\u0000\u0287\u028b\u0003l0\u0000\u0288\u028a\u0003D"+ - "\u001c\u0000\u0289\u0288\u0001\u0000\u0000\u0000\u028a\u028d\u0001\u0000"+ - "\u0000\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000"+ - "\u0000\u0000\u028c\u028f\u0001\u0000\u0000\u0000\u028d\u028b\u0001\u0000"+ - "\u0000\u0000\u028e\u0287\u0001\u0000\u0000\u0000\u028e\u028f\u0001\u0000"+ - "\u0000\u0000\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0003L \u0000"+ - "\u0291\u029b\u0001\u0000\u0000\u0000\u0292\u0294\u0003l0\u0000\u0293\u0295"+ - "\u0003D\u001c\u0000\u0294\u0293\u0001\u0000\u0000\u0000\u0295\u0296\u0001"+ - "\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000\u0296\u0297\u0001"+ - "\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298\u0299\u0003"+ - "L \u0000\u0299\u029b\u0001\u0000\u0000\u0000\u029a\u0271\u0001\u0000\u0000"+ - "\u0000\u029a\u027c\u0001\u0000\u0000\u0000\u029a\u0283\u0001\u0000\u0000"+ - "\u0000\u029a\u0292\u0001\u0000\u0000\u0000\u029b]\u0001\u0000\u0000\u0000"+ - "\u029c\u029d\u0005b\u0000\u0000\u029d\u029e\u0005y\u0000\u0000\u029e_"+ - "\u0001\u0000\u0000\u0000\u029f\u02a0\u0005a\u0000\u0000\u02a0\u02a1\u0005"+ - "n\u0000\u0000\u02a1\u02a2\u0005d\u0000\u0000\u02a2a\u0001\u0000\u0000"+ - "\u0000\u02a3\u02a4\u0005a\u0000\u0000\u02a4\u02a5\u0005s\u0000\u0000\u02a5"+ - "\u02a6\u0005c\u0000\u0000\u02a6c\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005"+ - "=\u0000\u0000\u02a8e\u0001\u0000\u0000\u0000\u02a9\u02aa\u0005:\u0000"+ - "\u0000\u02aa\u02ab\u0005:\u0000\u0000\u02abg\u0001\u0000\u0000\u0000\u02ac"+ - "\u02ad\u0005,\u0000\u0000\u02adi\u0001\u0000\u0000\u0000\u02ae\u02af\u0005"+ - "d\u0000\u0000\u02af\u02b0\u0005e\u0000\u0000\u02b0\u02b1\u0005s\u0000"+ - "\u0000\u02b1\u02b2\u0005c\u0000\u0000\u02b2k\u0001\u0000\u0000\u0000\u02b3"+ - "\u02b4\u0005.\u0000\u0000\u02b4m\u0001\u0000\u0000\u0000\u02b5\u02b6\u0005"+ - "f\u0000\u0000\u02b6\u02b7\u0005a\u0000\u0000\u02b7\u02b8\u0005l\u0000"+ - "\u0000\u02b8\u02b9\u0005s\u0000\u0000\u02b9\u02ba\u0005e\u0000\u0000\u02ba"+ - "o\u0001\u0000\u0000\u0000\u02bb\u02bc\u0005f\u0000\u0000\u02bc\u02bd\u0005"+ - "i\u0000\u0000\u02bd\u02be\u0005r\u0000\u0000\u02be\u02bf\u0005s\u0000"+ - "\u0000\u02bf\u02c0\u0005t\u0000\u0000\u02c0q\u0001\u0000\u0000\u0000\u02c1"+ - "\u02c2\u0005l\u0000\u0000\u02c2\u02c3\u0005a\u0000\u0000\u02c3\u02c4\u0005"+ - "s\u0000\u0000\u02c4\u02c5\u0005t\u0000\u0000\u02c5s\u0001\u0000\u0000"+ - "\u0000\u02c6\u02c7\u0005(\u0000\u0000\u02c7u\u0001\u0000\u0000\u0000\u02c8"+ - "\u02c9\u0005i\u0000\u0000\u02c9\u02ca\u0005n\u0000\u0000\u02caw\u0001"+ - "\u0000\u0000\u0000\u02cb\u02cc\u0005i\u0000\u0000\u02cc\u02cd\u0005s\u0000"+ - "\u0000\u02cdy\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005l\u0000\u0000\u02cf"+ - "\u02d0\u0005i\u0000\u0000\u02d0\u02d1\u0005k\u0000\u0000\u02d1\u02d2\u0005"+ - "e\u0000\u0000\u02d2{\u0001\u0000\u0000\u0000\u02d3\u02d4\u0005n\u0000"+ - "\u0000\u02d4\u02d5\u0005o\u0000\u0000\u02d5\u02d6\u0005t\u0000\u0000\u02d6"+ - "}\u0001\u0000\u0000\u0000\u02d7\u02d8\u0005n\u0000\u0000\u02d8\u02d9\u0005"+ - "u\u0000\u0000\u02d9\u02da\u0005l\u0000\u0000\u02da\u02db\u0005l\u0000"+ - "\u0000\u02db\u007f\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005n\u0000\u0000"+ - "\u02dd\u02de\u0005u\u0000\u0000\u02de\u02df\u0005l\u0000\u0000\u02df\u02e0"+ - "\u0005l\u0000\u0000\u02e0\u02e1\u0005s\u0000\u0000\u02e1\u0081\u0001\u0000"+ - "\u0000\u0000\u02e2\u02e3\u0005o\u0000\u0000\u02e3\u02e4\u0005r\u0000\u0000"+ - "\u02e4\u0083\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005?\u0000\u0000\u02e6"+ - "\u0085\u0001\u0000\u0000\u0000\u02e7\u02e8\u0005r\u0000\u0000\u02e8\u02e9"+ - "\u0005l\u0000\u0000\u02e9\u02ea\u0005i\u0000\u0000\u02ea\u02eb\u0005k"+ - "\u0000\u0000\u02eb\u02ec\u0005e\u0000\u0000\u02ec\u0087\u0001\u0000\u0000"+ - "\u0000\u02ed\u02ee\u0005)\u0000\u0000\u02ee\u0089\u0001\u0000\u0000\u0000"+ - "\u02ef\u02f0\u0005t\u0000\u0000\u02f0\u02f1\u0005r\u0000\u0000\u02f1\u02f2"+ - "\u0005u\u0000\u0000\u02f2\u02f3\u0005e\u0000\u0000\u02f3\u008b\u0001\u0000"+ - "\u0000\u0000\u02f4\u02f5\u0005=\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000"+ - "\u02f6\u008d\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005=\u0000\u0000\u02f8"+ - "\u02f9\u0005~\u0000\u0000\u02f9\u008f\u0001\u0000\u0000\u0000\u02fa\u02fb"+ - "\u0005!\u0000\u0000\u02fb\u02fc\u0005=\u0000\u0000\u02fc\u0091\u0001\u0000"+ - "\u0000\u0000\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u0093\u0001\u0000\u0000"+ - "\u0000\u02ff\u0300\u0005<\u0000\u0000\u0300\u0301\u0005=\u0000\u0000\u0301"+ - "\u0095\u0001\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0097"+ - "\u0001\u0000\u0000\u0000\u0304\u0305\u0005>\u0000\u0000\u0305\u0306\u0005"+ - "=\u0000\u0000\u0306\u0099\u0001\u0000\u0000\u0000\u0307\u0308\u0005+\u0000"+ - "\u0000\u0308\u009b\u0001\u0000\u0000\u0000\u0309\u030a\u0005-\u0000\u0000"+ - "\u030a\u009d\u0001\u0000\u0000\u0000\u030b\u030c\u0005*\u0000\u0000\u030c"+ - "\u009f\u0001\u0000\u0000\u0000\u030d\u030e\u0005/\u0000\u0000\u030e\u00a1"+ - "\u0001\u0000\u0000\u0000\u030f\u0310\u0005%\u0000\u0000\u0310\u00a3\u0001"+ - "\u0000\u0000\u0000\u0311\u0312\u0005[\u0000\u0000\u0312\u0313\u0001\u0000"+ - "\u0000\u0000\u0313\u0314\u0006L\u0000\u0000\u0314\u0315\u0006L\u0000\u0000"+ - "\u0315\u00a5\u0001\u0000\u0000\u0000\u0316\u0317\u0005]\u0000\u0000\u0317"+ - "\u0318\u0001\u0000\u0000\u0000\u0318\u0319\u0006M\r\u0000\u0319\u031a"+ - "\u0006M\r\u0000\u031a\u00a7\u0001\u0000\u0000\u0000\u031b\u031f\u0003"+ - "F\u001d\u0000\u031c\u031e\u0003V%\u0000\u031d\u031c\u0001\u0000\u0000"+ - "\u0000\u031e\u0321\u0001\u0000\u0000\u0000\u031f\u031d\u0001\u0000\u0000"+ - "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u032c\u0001\u0000\u0000"+ - "\u0000\u0321\u031f\u0001\u0000\u0000\u0000\u0322\u0325\u0003T$\u0000\u0323"+ - "\u0325\u0003N!\u0000\u0324\u0322\u0001\u0000\u0000\u0000\u0324\u0323\u0001"+ - "\u0000\u0000\u0000\u0325\u0327\u0001\u0000\u0000\u0000\u0326\u0328\u0003"+ - "V%\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u0329\u0001\u0000\u0000"+ - "\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000"+ - "\u0000\u032a\u032c\u0001\u0000\u0000\u0000\u032b\u031b\u0001\u0000\u0000"+ - "\u0000\u032b\u0324\u0001\u0000\u0000\u0000\u032c\u00a9\u0001\u0000\u0000"+ - "\u0000\u032d\u032f\u0003P\"\u0000\u032e\u0330\u0003R#\u0000\u032f\u032e"+ - "\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000\u0000\u0000\u0331\u032f"+ - "\u0001\u0000\u0000\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332\u0333"+ - "\u0001\u0000\u0000\u0000\u0333\u0334\u0003P\"\u0000\u0334\u00ab\u0001"+ - "\u0000\u0000\u0000\u0335\u0336\u0003\u00aaO\u0000\u0336\u00ad\u0001\u0000"+ - "\u0000\u0000\u0337\u0338\u00032\u0013\u0000\u0338\u0339\u0001\u0000\u0000"+ - "\u0000\u0339\u033a\u0006Q\t\u0000\u033a\u00af\u0001\u0000\u0000\u0000"+ - "\u033b\u033c\u00034\u0014\u0000\u033c\u033d\u0001\u0000\u0000\u0000\u033d"+ - "\u033e\u0006R\t\u0000\u033e\u00b1\u0001\u0000\u0000\u0000\u033f\u0340"+ - "\u00036\u0015\u0000\u0340\u0341\u0001\u0000\u0000\u0000\u0341\u0342\u0006"+ - "S\t\u0000\u0342\u00b3\u0001\u0000\u0000\u0000\u0343\u0344\u0003B\u001b"+ - "\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006T\f\u0000"+ - "\u0346\u0347\u0006T\r\u0000\u0347\u00b5\u0001\u0000\u0000\u0000\u0348"+ - "\u0349\u0003\u00a4L\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a\u034b"+ - "\u0006U\n\u0000\u034b\u00b7\u0001\u0000\u0000\u0000\u034c\u034d\u0003"+ - "\u00a6M\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034f\u0006V\u000e"+ - "\u0000\u034f\u00b9\u0001\u0000\u0000\u0000\u0350\u0351\u0003h.\u0000\u0351"+ - "\u0352\u0001\u0000\u0000\u0000\u0352\u0353\u0006W\u000f\u0000\u0353\u00bb"+ - "\u0001\u0000\u0000\u0000\u0354\u0355\u0003d,\u0000\u0355\u0356\u0001\u0000"+ - "\u0000\u0000\u0356\u0357\u0006X\u0010\u0000\u0357\u00bd\u0001\u0000\u0000"+ - "\u0000\u0358\u0359\u0003X&\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a"+ - "\u035b\u0006Y\u0011\u0000\u035b\u00bf\u0001\u0000\u0000\u0000\u035c\u035d"+ - "\u0005o\u0000\u0000\u035d\u035e\u0005p\u0000\u0000\u035e\u035f\u0005t"+ - "\u0000\u0000\u035f\u0360\u0005i\u0000\u0000\u0360\u0361\u0005o\u0000\u0000"+ - "\u0361\u0362\u0005n\u0000\u0000\u0362\u0363\u0005s\u0000\u0000\u0363\u00c1"+ - "\u0001\u0000\u0000\u0000\u0364\u0365\u0005m\u0000\u0000\u0365\u0366\u0005"+ - "e\u0000\u0000\u0366\u0367\u0005t\u0000\u0000\u0367\u0368\u0005a\u0000"+ - "\u0000\u0368\u0369\u0005d\u0000\u0000\u0369\u036a\u0005a\u0000\u0000\u036a"+ - "\u036b\u0005t\u0000\u0000\u036b\u036c\u0005a\u0000\u0000\u036c\u00c3\u0001"+ - "\u0000\u0000\u0000\u036d\u0371\b\n\u0000\u0000\u036e\u036f\u0005/\u0000"+ - "\u0000\u036f\u0371\b\u000b\u0000\u0000\u0370\u036d\u0001\u0000\u0000\u0000"+ - "\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u00c5\u0001\u0000\u0000\u0000"+ - "\u0372\u0374\u0003\u00c4\\\u0000\u0373\u0372\u0001\u0000\u0000\u0000\u0374"+ - "\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375"+ - "\u0376\u0001\u0000\u0000\u0000\u0376\u00c7\u0001\u0000\u0000\u0000\u0377"+ - "\u0378\u0003\u00acP\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a"+ - "\u0006^\u0012\u0000\u037a\u00c9\u0001\u0000\u0000\u0000\u037b\u037c\u0003"+ - "2\u0013\u0000\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006_\t"+ - "\u0000\u037e\u00cb\u0001\u0000\u0000\u0000\u037f\u0380\u00034\u0014\u0000"+ - "\u0380\u0381\u0001\u0000\u0000\u0000\u0381\u0382\u0006`\t\u0000\u0382"+ - "\u00cd\u0001\u0000\u0000\u0000\u0383\u0384\u00036\u0015\u0000\u0384\u0385"+ - "\u0001\u0000\u0000\u0000\u0385\u0386\u0006a\t\u0000\u0386\u00cf\u0001"+ - "\u0000\u0000\u0000\u0387\u0388\u0003B\u001b\u0000\u0388\u0389\u0001\u0000"+ - "\u0000\u0000\u0389\u038a\u0006b\f\u0000\u038a\u038b\u0006b\r\u0000\u038b"+ - "\u00d1\u0001\u0000\u0000\u0000\u038c\u038d\u0003l0\u0000\u038d\u038e\u0001"+ - "\u0000\u0000\u0000\u038e\u038f\u0006c\u0013\u0000\u038f\u00d3\u0001\u0000"+ - "\u0000\u0000\u0390\u0391\u0003h.\u0000\u0391\u0392\u0001\u0000\u0000\u0000"+ - "\u0392\u0393\u0006d\u000f\u0000\u0393\u00d5\u0001\u0000\u0000\u0000\u0394"+ - "\u0399\u0003F\u001d\u0000\u0395\u0399\u0003D\u001c\u0000\u0396\u0399\u0003"+ - "T$\u0000\u0397\u0399\u0003\u009eI\u0000\u0398\u0394\u0001\u0000\u0000"+ - "\u0000\u0398\u0395\u0001\u0000\u0000\u0000\u0398\u0396\u0001\u0000\u0000"+ - "\u0000\u0398\u0397\u0001\u0000\u0000\u0000\u0399\u00d7\u0001\u0000\u0000"+ - "\u0000\u039a\u039d\u0003F\u001d\u0000\u039b\u039d\u0003\u009eI\u0000\u039c"+ - "\u039a\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d"+ - "\u03a1\u0001\u0000\u0000\u0000\u039e\u03a0\u0003\u00d6e\u0000\u039f\u039e"+ - "\u0001\u0000\u0000\u0000\u03a0\u03a3\u0001\u0000\u0000\u0000\u03a1\u039f"+ - "\u0001\u0000\u0000\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03ae"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a1\u0001\u0000\u0000\u0000\u03a4\u03a7"+ - "\u0003T$\u0000\u03a5\u03a7\u0003N!\u0000\u03a6\u03a4\u0001\u0000\u0000"+ - "\u0000\u03a6\u03a5\u0001\u0000\u0000\u0000\u03a7\u03a9\u0001\u0000\u0000"+ - "\u0000\u03a8\u03aa\u0003\u00d6e\u0000\u03a9\u03a8\u0001\u0000\u0000\u0000"+ - "\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000"+ - "\u03ab\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ae\u0001\u0000\u0000\u0000"+ - "\u03ad\u039c\u0001\u0000\u0000\u0000\u03ad\u03a6\u0001\u0000\u0000\u0000"+ - "\u03ae\u00d9\u0001\u0000\u0000\u0000\u03af\u03b2\u0003\u00d8f\u0000\u03b0"+ - "\u03b2\u0003\u00aaO\u0000\u03b1\u03af\u0001\u0000\u0000\u0000\u03b1\u03b0"+ - "\u0001\u0000\u0000\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b1"+ - "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u00db"+ - "\u0001\u0000\u0000\u0000\u03b5\u03b6\u00032\u0013\u0000\u03b6\u03b7\u0001"+ - "\u0000\u0000\u0000\u03b7\u03b8\u0006h\t\u0000\u03b8\u00dd\u0001\u0000"+ - "\u0000\u0000\u03b9\u03ba\u00034\u0014\u0000\u03ba\u03bb\u0001\u0000\u0000"+ - "\u0000\u03bb\u03bc\u0006i\t\u0000\u03bc\u00df\u0001\u0000\u0000\u0000"+ - "\u03bd\u03be\u00036\u0015\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf"+ - "\u03c0\u0006j\t\u0000\u03c0\u00e1\u0001\u0000\u0000\u0000\u03c1\u03c2"+ - "\u0003B\u001b\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006"+ - "k\f\u0000\u03c4\u03c5\u0006k\r\u0000\u03c5\u00e3\u0001\u0000\u0000\u0000"+ - "\u03c6\u03c7\u0003d,\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9"+ - "\u0006l\u0010\u0000\u03c9\u00e5\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003"+ - "h.\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006m\u000f"+ - "\u0000\u03cd\u00e7\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003l0\u0000\u03cf"+ - "\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006n\u0013\u0000\u03d1\u00e9"+ - "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0005a\u0000\u0000\u03d3\u03d4\u0005"+ - "s\u0000\u0000\u03d4\u00eb\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00da"+ - "g\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006p\u0014\u0000"+ - "\u03d8\u00ed\u0001\u0000\u0000\u0000\u03d9\u03da\u00032\u0013\u0000\u03da"+ - "\u03db\u0001\u0000\u0000\u0000\u03db\u03dc\u0006q\t\u0000\u03dc\u00ef"+ - "\u0001\u0000\u0000\u0000\u03dd\u03de\u00034\u0014\u0000\u03de\u03df\u0001"+ - "\u0000\u0000\u0000\u03df\u03e0\u0006r\t\u0000\u03e0\u00f1\u0001\u0000"+ - "\u0000\u0000\u03e1\u03e2\u00036\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000"+ - "\u0000\u03e3\u03e4\u0006s\t\u0000\u03e4\u00f3\u0001\u0000\u0000\u0000"+ - "\u03e5\u03e6\u0003B\u001b\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7"+ - "\u03e8\u0006t\f\u0000\u03e8\u03e9\u0006t\r\u0000\u03e9\u00f5\u0001\u0000"+ - "\u0000\u0000\u03ea\u03eb\u0003\u00a4L\u0000\u03eb\u03ec\u0001\u0000\u0000"+ - "\u0000\u03ec\u03ed\u0006u\n\u0000\u03ed\u03ee\u0006u\u0015\u0000\u03ee"+ - "\u00f7\u0001\u0000\u0000\u0000\u03ef\u03f0\u0005o\u0000\u0000\u03f0\u03f1"+ - "\u0005n\u0000\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006"+ - "v\u0016\u0000\u03f3\u00f9\u0001\u0000\u0000\u0000\u03f4\u03f5\u0005w\u0000"+ - "\u0000\u03f5\u03f6\u0005i\u0000\u0000\u03f6\u03f7\u0005t\u0000\u0000\u03f7"+ - "\u03f8\u0005h\u0000\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u0006w\u0016\u0000\u03fa\u00fb\u0001\u0000\u0000\u0000\u03fb\u03fc\b"+ - "\f\u0000\u0000\u03fc\u00fd\u0001\u0000\u0000\u0000\u03fd\u03ff\u0003\u00fc"+ - "x\u0000\u03fe\u03fd\u0001\u0000\u0000\u0000\u03ff\u0400\u0001\u0000\u0000"+ - "\u0000\u0400\u03fe\u0001\u0000\u0000\u0000\u0400\u0401\u0001\u0000\u0000"+ - "\u0000\u0401\u0402\u0001\u0000\u0000\u0000\u0402\u0403\u0003\u0142\u009b"+ - "\u0000\u0403\u0405\u0001\u0000\u0000\u0000\u0404\u03fe\u0001\u0000\u0000"+ - "\u0000\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0407\u0001\u0000\u0000"+ - "\u0000\u0406\u0408\u0003\u00fcx\u0000\u0407\u0406\u0001\u0000\u0000\u0000"+ - "\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u0407\u0001\u0000\u0000\u0000"+ - "\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u00ff\u0001\u0000\u0000\u0000"+ - "\u040b\u040c\u0003\u00acP\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d"+ - "\u040e\u0006z\u0012\u0000\u040e\u0101\u0001\u0000\u0000\u0000\u040f\u0410"+ - "\u0003\u00fey\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006"+ - "{\u0017\u0000\u0412\u0103\u0001\u0000\u0000\u0000\u0413\u0414\u00032\u0013"+ - "\u0000\u0414\u0415\u0001\u0000\u0000\u0000\u0415\u0416\u0006|\t\u0000"+ - "\u0416\u0105\u0001\u0000\u0000\u0000\u0417\u0418\u00034\u0014\u0000\u0418"+ - "\u0419\u0001\u0000\u0000\u0000\u0419\u041a\u0006}\t\u0000\u041a\u0107"+ - "\u0001\u0000\u0000\u0000\u041b\u041c\u00036\u0015\u0000\u041c\u041d\u0001"+ - "\u0000\u0000\u0000\u041d\u041e\u0006~\t\u0000\u041e\u0109\u0001\u0000"+ - "\u0000\u0000\u041f\u0420\u0003B\u001b\u0000\u0420\u0421\u0001\u0000\u0000"+ - "\u0000\u0421\u0422\u0006\u007f\f\u0000\u0422\u0423\u0006\u007f\r\u0000"+ - "\u0423\u0424\u0006\u007f\r\u0000\u0424\u010b\u0001\u0000\u0000\u0000\u0425"+ - "\u0426\u0003d,\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427\u0428\u0006"+ - "\u0080\u0010\u0000\u0428\u010d\u0001\u0000\u0000\u0000\u0429\u042a\u0003"+ - "h.\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006\u0081\u000f"+ - "\u0000\u042c\u010f\u0001\u0000\u0000\u0000\u042d\u042e\u0003l0\u0000\u042e"+ - "\u042f\u0001\u0000\u0000\u0000\u042f\u0430\u0006\u0082\u0013\u0000\u0430"+ - "\u0111\u0001\u0000\u0000\u0000\u0431\u0432\u0003\u00faw\u0000\u0432\u0433"+ - "\u0001\u0000\u0000\u0000\u0433\u0434\u0006\u0083\u0018\u0000\u0434\u0113"+ - "\u0001\u0000\u0000\u0000\u0435\u0436\u0003\u00dag\u0000\u0436\u0437\u0001"+ - "\u0000\u0000\u0000\u0437\u0438\u0006\u0084\u0014\u0000\u0438\u0115\u0001"+ - "\u0000\u0000\u0000\u0439\u043a\u0003\u00acP\u0000\u043a\u043b\u0001\u0000"+ - "\u0000\u0000\u043b\u043c\u0006\u0085\u0012\u0000\u043c\u0117\u0001\u0000"+ - "\u0000\u0000\u043d\u043e\u00032\u0013\u0000\u043e\u043f\u0001\u0000\u0000"+ - "\u0000\u043f\u0440\u0006\u0086\t\u0000\u0440\u0119\u0001\u0000\u0000\u0000"+ - "\u0441\u0442\u00034\u0014\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ - "\u0444\u0006\u0087\t\u0000\u0444\u011b\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u00036\u0015\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "\u0088\t\u0000\u0448\u011d\u0001\u0000\u0000\u0000\u0449\u044a\u0003B"+ - "\u001b\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u0089"+ - "\f\u0000\u044c\u044d\u0006\u0089\r\u0000\u044d\u011f\u0001\u0000\u0000"+ - "\u0000\u044e\u044f\u0003l0\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450"+ - "\u0451\u0006\u008a\u0013\u0000\u0451\u0121\u0001\u0000\u0000\u0000\u0452"+ - "\u0453\u0003\u00acP\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455"+ - "\u0006\u008b\u0012\u0000\u0455\u0123\u0001\u0000\u0000\u0000\u0456\u0457"+ - "\u0003\u00a8N\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006"+ - "\u008c\u0019\u0000\u0459\u0125\u0001\u0000\u0000\u0000\u045a\u045b\u0003"+ - "2\u0013\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c\u045d\u0006\u008d"+ - "\t\u0000\u045d\u0127\u0001\u0000\u0000\u0000\u045e\u045f\u00034\u0014"+ - "\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006\u008e\t\u0000"+ - "\u0461\u0129\u0001\u0000\u0000\u0000\u0462\u0463\u00036\u0015\u0000\u0463"+ - "\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006\u008f\t\u0000\u0465\u012b"+ - "\u0001\u0000\u0000\u0000\u0466\u0467\u0003B\u001b\u0000\u0467\u0468\u0001"+ - "\u0000\u0000\u0000\u0468\u0469\u0006\u0090\f\u0000\u0469\u046a\u0006\u0090"+ - "\r\u0000\u046a\u012d\u0001\u0000\u0000\u0000\u046b\u046c\u0005i\u0000"+ - "\u0000\u046c\u046d\u0005n\u0000\u0000\u046d\u046e\u0005f\u0000\u0000\u046e"+ - "\u046f\u0005o\u0000\u0000\u046f\u012f\u0001\u0000\u0000\u0000\u0470\u0471"+ - "\u00032\u0013\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0473\u0006"+ - "\u0092\t\u0000\u0473\u0131\u0001\u0000\u0000\u0000\u0474\u0475\u00034"+ - "\u0014\u0000\u0475\u0476\u0001\u0000\u0000\u0000\u0476\u0477\u0006\u0093"+ - "\t\u0000\u0477\u0133\u0001\u0000\u0000\u0000\u0478\u0479\u00036\u0015"+ - "\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006\u0094\t\u0000"+ - "\u047b\u0135\u0001\u0000\u0000\u0000\u047c\u047d\u0003B\u001b\u0000\u047d"+ - "\u047e\u0001\u0000\u0000\u0000\u047e\u047f\u0006\u0095\f\u0000\u047f\u0480"+ - "\u0006\u0095\r\u0000\u0480\u0137\u0001\u0000\u0000\u0000\u0481\u0482\u0005"+ - "f\u0000\u0000\u0482\u0483\u0005u\u0000\u0000\u0483\u0484\u0005n\u0000"+ - "\u0000\u0484\u0485\u0005c\u0000\u0000\u0485\u0486\u0005t\u0000\u0000\u0486"+ - "\u0487\u0005i\u0000\u0000\u0487\u0488\u0005o\u0000\u0000\u0488\u0489\u0005"+ - "n\u0000\u0000\u0489\u048a\u0005s\u0000\u0000\u048a\u0139\u0001\u0000\u0000"+ - "\u0000\u048b\u048c\u00032\u0013\u0000\u048c\u048d\u0001\u0000\u0000\u0000"+ - "\u048d\u048e\u0006\u0097\t\u0000\u048e\u013b\u0001\u0000\u0000\u0000\u048f"+ - "\u0490\u00034\u0014\u0000\u0490\u0491\u0001\u0000\u0000\u0000\u0491\u0492"+ - "\u0006\u0098\t\u0000\u0492\u013d\u0001\u0000\u0000\u0000\u0493\u0494\u0003"+ - "6\u0015\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006\u0099"+ - "\t\u0000\u0496\u013f\u0001\u0000\u0000\u0000\u0497\u0498\u0003\u00a6M"+ - "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006\u009a\u000e"+ - "\u0000\u049a\u049b\u0006\u009a\r\u0000\u049b\u0141\u0001\u0000\u0000\u0000"+ - "\u049c\u049d\u0005:\u0000\u0000\u049d\u0143\u0001\u0000\u0000\u0000\u049e"+ - "\u04a4\u0003N!\u0000\u049f\u04a4\u0003D\u001c\u0000\u04a0\u04a4\u0003"+ - "l0\u0000\u04a1\u04a4\u0003F\u001d\u0000\u04a2\u04a4\u0003T$\u0000\u04a3"+ - "\u049e\u0001\u0000\u0000\u0000\u04a3\u049f\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a0\u0001\u0000\u0000\u0000\u04a3\u04a1\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a2\u0001\u0000\u0000\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ - "\u04a3\u0001\u0000\u0000\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000\u04a6"+ - "\u0145\u0001\u0000\u0000\u0000\u04a7\u04a8\u00032\u0013\u0000\u04a8\u04a9"+ - "\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006\u009d\t\u0000\u04aa\u0147\u0001"+ - "\u0000\u0000\u0000\u04ab\u04ac\u00034\u0014\u0000\u04ac\u04ad\u0001\u0000"+ - "\u0000\u0000\u04ad\u04ae\u0006\u009e\t\u0000\u04ae\u0149\u0001\u0000\u0000"+ - "\u0000\u04af\u04b0\u00036\u0015\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000"+ - "\u04b1\u04b2\u0006\u009f\t\u0000\u04b2\u014b\u0001\u0000\u0000\u0000:"+ - "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e5\u01ef"+ - "\u01f3\u01f6\u01ff\u0201\u020c\u0235\u023a\u0243\u024a\u024f\u0251\u025c"+ - "\u0264\u0267\u0269\u026e\u0273\u0279\u0280\u0285\u028b\u028e\u0296\u029a"+ - "\u031f\u0324\u0329\u032b\u0331\u0370\u0375\u0398\u039c\u03a1\u03a6\u03ab"+ - "\u03ad\u03b1\u03b3\u0400\u0404\u0409\u04a3\u04a5\u001a\u0005\u0002\u0000"+ - "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ - "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ - "\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ - "\u0007B\u0000\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007D\u0000"+ - "\u0007%\u0000\u0007N\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007X"+ - "\u0000\u0007W\u0000\u0007C\u0000"; + "\u0012\u0004\u0012\u01e2\b\u0012\u000b\u0012\f\u0012\u01e3\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013"+ + "\u01ec\b\u0013\n\u0013\f\u0013\u01ef\t\u0013\u0001\u0013\u0003\u0013\u01f2"+ + "\b\u0013\u0001\u0013\u0003\u0013\u01f5\b\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ + "\u01fe\b\u0014\n\u0014\f\u0014\u0201\t\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004\u0015\u0209\b\u0015\u000b"+ + "\u0015\f\u0015\u020a\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003 \u0234\b \u0001 \u0004"+ + " \u0237\b \u000b \f \u0238\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001"+ + "#\u0001#\u0003#\u0242\b#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u0249"+ + "\b%\u0001&\u0001&\u0001&\u0005&\u024e\b&\n&\f&\u0251\t&\u0001&\u0001&"+ + "\u0001&\u0001&\u0001&\u0001&\u0005&\u0259\b&\n&\f&\u025c\t&\u0001&\u0001"+ + "&\u0001&\u0001&\u0001&\u0003&\u0263\b&\u0001&\u0003&\u0266\b&\u0003&\u0268"+ + "\b&\u0001\'\u0004\'\u026b\b\'\u000b\'\f\'\u026c\u0001(\u0004(\u0270\b"+ + "(\u000b(\f(\u0271\u0001(\u0001(\u0005(\u0276\b(\n(\f(\u0279\t(\u0001("+ + "\u0001(\u0004(\u027d\b(\u000b(\f(\u027e\u0001(\u0004(\u0282\b(\u000b("+ + "\f(\u0283\u0001(\u0001(\u0005(\u0288\b(\n(\f(\u028b\t(\u0003(\u028d\b"+ + "(\u0001(\u0001(\u0001(\u0001(\u0004(\u0293\b(\u000b(\f(\u0294\u0001(\u0001"+ + "(\u0003(\u0299\b(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0001"+ + "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ + "1\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00012\u0001"+ + "3\u00013\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u0001"+ + "6\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u00018\u0001"+ + "8\u00018\u00019\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ + ":\u0001:\u0001:\u0001;\u0001;\u0001;\u0001<\u0001<\u0001=\u0001=\u0001"+ + "=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001"+ + "C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001"+ + "G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001"+ + "L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001"+ + "N\u0001N\u0005N\u031c\bN\nN\fN\u031f\tN\u0001N\u0001N\u0003N\u0323\bN"+ + "\u0001N\u0004N\u0326\bN\u000bN\fN\u0327\u0003N\u032a\bN\u0001O\u0001O"+ + "\u0004O\u032e\bO\u000bO\fO\u032f\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001"+ + "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001"+ + "S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001"+ + "V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001"+ + "X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ + "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001"+ + "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003\\\u036f\b\\\u0001]\u0004"+ + "]\u0372\b]\u000b]\f]\u0373\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ + "_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001"+ + "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ + "d\u0001d\u0001d\u0003d\u0393\bd\u0001e\u0001e\u0003e\u0397\be\u0001e\u0005"+ + "e\u039a\be\ne\fe\u039d\te\u0001e\u0001e\u0003e\u03a1\be\u0001e\u0004e"+ + "\u03a4\be\u000be\fe\u03a5\u0003e\u03a8\be\u0001f\u0001f\u0004f\u03ac\b"+ + "f\u000bf\ff\u03ad\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001"+ + "h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001j\u0001"+ + "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ + "m\u0001m\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ + "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ + "r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ + "t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001"+ + "v\u0001v\u0001v\u0001w\u0001w\u0001x\u0004x\u03f9\bx\u000bx\fx\u03fa\u0001"+ + "x\u0001x\u0003x\u03ff\bx\u0001x\u0004x\u0402\bx\u000bx\fx\u0403\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001"+ + "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001"+ + "~\u0001~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ + "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ + "\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084"+ + "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085"+ + "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087"+ + "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088"+ + "\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089"+ + "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b"+ + "\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ + "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e"+ + "\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f"+ + "\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ + "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092"+ + "\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093"+ + "\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098"+ + "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099"+ + "\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ + "\u0001\u009b\u0001\u009b\u0001\u009b\u0004\u009b\u049e\b\u009b\u000b\u009b"+ + "\f\u009b\u049f\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d"+ + "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e"+ + "\u0001\u009e\u0002\u01ff\u025a\u0000\u009f\f\u0001\u000e\u0002\u0010\u0003"+ + "\u0012\u0004\u0014\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c\t\u001e"+ + "\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u00144\u0015"+ + "6\u00168\u0000:\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000H\u0000"+ + "J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c\\\u001d"+ + "^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u0086"+ + "2\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a"+ + "<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aa\u0000\u00acD"+ + "\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6\u0000\u00b8\u0000\u00ba\u0000"+ + "\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4\u0000\u00c6J\u00c8K\u00ca"+ + "L\u00ccM\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6\u0000\u00d8"+ + "N\u00daO\u00dcP\u00deQ\u00e0\u0000\u00e2\u0000\u00e4\u0000\u00e6\u0000"+ + "\u00e8R\u00ea\u0000\u00ecS\u00eeT\u00f0U\u00f2\u0000\u00f4\u0000\u00f6"+ + "V\u00f8W\u00fa\u0000\u00fcX\u00fe\u0000\u0100\u0000\u0102Y\u0104Z\u0106"+ + "[\u0108\u0000\u010a\u0000\u010c\u0000\u010e\u0000\u0110\u0000\u0112\u0000"+ + "\u0114\u0000\u0116\\\u0118]\u011a^\u011c\u0000\u011e\u0000\u0120\u0000"+ + "\u0122\u0000\u0124_\u0126`\u0128a\u012a\u0000\u012cb\u012ec\u0130d\u0132"+ + "e\u0134\u0000\u0136f\u0138g\u013ah\u013ci\u013e\u0000\u0140j\u0142k\u0144"+ + "l\u0146m\u0148n\f\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t"+ + "\n\u000b\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ + "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004"+ + "\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n"+ + "\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \""+ + "#,,//::<<>?\\\\||\u04c8\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001"+ + "\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ + "\u00006\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000\u0000\u0001:"+ + "\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000"+ + "\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000"+ + "\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\"+ + "\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000"+ + "\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000"+ + "\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j"+ + "\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000"+ + "\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000"+ + "\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x"+ + "\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000"+ + "\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000"+ + "\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000"+ + "\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000"+ + "\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000"+ + "\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000"+ + "\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000"+ + "\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000"+ + "\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000"+ + "\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000"+ + "\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000"+ + "\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000"+ + "\u0000\u0002\u00ac\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000"+ + "\u0000\u0002\u00b0\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000"+ + "\u0000\u0003\u00b4\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000"+ + "\u0000\u0003\u00b8\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000"+ + "\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000"+ + "\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000"+ + "\u0000\u0003\u00c6\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000"+ + "\u0000\u0003\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000"+ + "\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000"+ + "\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000"+ + "\u0000\u0004\u00da\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000"+ + "\u0000\u0004\u00de\u0001\u0000\u0000\u0000\u0005\u00e0\u0001\u0000\u0000"+ + "\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000"+ + "\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000"+ + "\u0000\u0005\u00ea\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000"+ + "\u0000\u0005\u00ee\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000"+ + "\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000"+ + "\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000"+ + "\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000"+ + "\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000"+ + "\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000"+ + "\u0000\u0007\u0108\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000"+ + "\u0000\u0007\u010c\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000"+ + "\u0000\u0007\u0110\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000"+ + "\u0000\u0007\u0114\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000"+ + "\u0000\u0007\u0118\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000"+ + "\u0000\b\u011c\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000"+ + "\b\u0120\u0001\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124"+ + "\u0001\u0000\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001"+ + "\u0000\u0000\u0000\t\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000"+ + "\u0000\u0000\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000"+ + "\u0000\t\u0132\u0001\u0000\u0000\u0000\n\u0134\u0001\u0000\u0000\u0000"+ + "\n\u0136\u0001\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a"+ + "\u0001\u0000\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\u000b\u013e\u0001"+ + "\u0000\u0000\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001"+ + "\u0000\u0000\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001"+ + "\u0000\u0000\u0000\u000b\u0148\u0001\u0000\u0000\u0000\f\u014a\u0001\u0000"+ + "\u0000\u0000\u000e\u0154\u0001\u0000\u0000\u0000\u0010\u015b\u0001\u0000"+ + "\u0000\u0000\u0012\u0164\u0001\u0000\u0000\u0000\u0014\u016b\u0001\u0000"+ + "\u0000\u0000\u0016\u0175\u0001\u0000\u0000\u0000\u0018\u017c\u0001\u0000"+ + "\u0000\u0000\u001a\u0183\u0001\u0000\u0000\u0000\u001c\u0191\u0001\u0000"+ + "\u0000\u0000\u001e\u0198\u0001\u0000\u0000\u0000 \u01a0\u0001\u0000\u0000"+ + "\u0000\"\u01a7\u0001\u0000\u0000\u0000$\u01b3\u0001\u0000\u0000\u0000"+ + "&\u01bc\u0001\u0000\u0000\u0000(\u01c2\u0001\u0000\u0000\u0000*\u01c9"+ + "\u0001\u0000\u0000\u0000,\u01d0\u0001\u0000\u0000\u0000.\u01d8\u0001\u0000"+ + "\u0000\u00000\u01e1\u0001\u0000\u0000\u00002\u01e7\u0001\u0000\u0000\u0000"+ + "4\u01f8\u0001\u0000\u0000\u00006\u0208\u0001\u0000\u0000\u00008\u020e"+ + "\u0001\u0000\u0000\u0000:\u0213\u0001\u0000\u0000\u0000<\u0218\u0001\u0000"+ + "\u0000\u0000>\u021c\u0001\u0000\u0000\u0000@\u0220\u0001\u0000\u0000\u0000"+ + "B\u0224\u0001\u0000\u0000\u0000D\u0228\u0001\u0000\u0000\u0000F\u022a"+ + "\u0001\u0000\u0000\u0000H\u022c\u0001\u0000\u0000\u0000J\u022f\u0001\u0000"+ + "\u0000\u0000L\u0231\u0001\u0000\u0000\u0000N\u023a\u0001\u0000\u0000\u0000"+ + "P\u023c\u0001\u0000\u0000\u0000R\u0241\u0001\u0000\u0000\u0000T\u0243"+ + "\u0001\u0000\u0000\u0000V\u0248\u0001\u0000\u0000\u0000X\u0267\u0001\u0000"+ + "\u0000\u0000Z\u026a\u0001\u0000\u0000\u0000\\\u0298\u0001\u0000\u0000"+ + "\u0000^\u029a\u0001\u0000\u0000\u0000`\u029d\u0001\u0000\u0000\u0000b"+ + "\u02a1\u0001\u0000\u0000\u0000d\u02a5\u0001\u0000\u0000\u0000f\u02a7\u0001"+ + "\u0000\u0000\u0000h\u02aa\u0001\u0000\u0000\u0000j\u02ac\u0001\u0000\u0000"+ + "\u0000l\u02b1\u0001\u0000\u0000\u0000n\u02b3\u0001\u0000\u0000\u0000p"+ + "\u02b9\u0001\u0000\u0000\u0000r\u02bf\u0001\u0000\u0000\u0000t\u02c4\u0001"+ + "\u0000\u0000\u0000v\u02c6\u0001\u0000\u0000\u0000x\u02c9\u0001\u0000\u0000"+ + "\u0000z\u02cc\u0001\u0000\u0000\u0000|\u02d1\u0001\u0000\u0000\u0000~"+ + "\u02d5\u0001\u0000\u0000\u0000\u0080\u02da\u0001\u0000\u0000\u0000\u0082"+ + "\u02e0\u0001\u0000\u0000\u0000\u0084\u02e3\u0001\u0000\u0000\u0000\u0086"+ + "\u02e5\u0001\u0000\u0000\u0000\u0088\u02eb\u0001\u0000\u0000\u0000\u008a"+ + "\u02ed\u0001\u0000\u0000\u0000\u008c\u02f2\u0001\u0000\u0000\u0000\u008e"+ + "\u02f5\u0001\u0000\u0000\u0000\u0090\u02f8\u0001\u0000\u0000\u0000\u0092"+ + "\u02fb\u0001\u0000\u0000\u0000\u0094\u02fd\u0001\u0000\u0000\u0000\u0096"+ + "\u0300\u0001\u0000\u0000\u0000\u0098\u0302\u0001\u0000\u0000\u0000\u009a"+ + "\u0305\u0001\u0000\u0000\u0000\u009c\u0307\u0001\u0000\u0000\u0000\u009e"+ + "\u0309\u0001\u0000\u0000\u0000\u00a0\u030b\u0001\u0000\u0000\u0000\u00a2"+ + "\u030d\u0001\u0000\u0000\u0000\u00a4\u030f\u0001\u0000\u0000\u0000\u00a6"+ + "\u0314\u0001\u0000\u0000\u0000\u00a8\u0329\u0001\u0000\u0000\u0000\u00aa"+ + "\u032b\u0001\u0000\u0000\u0000\u00ac\u0333\u0001\u0000\u0000\u0000\u00ae"+ + "\u0335\u0001\u0000\u0000\u0000\u00b0\u0339\u0001\u0000\u0000\u0000\u00b2"+ + "\u033d\u0001\u0000\u0000\u0000\u00b4\u0341\u0001\u0000\u0000\u0000\u00b6"+ + "\u0346\u0001\u0000\u0000\u0000\u00b8\u034a\u0001\u0000\u0000\u0000\u00ba"+ + "\u034e\u0001\u0000\u0000\u0000\u00bc\u0352\u0001\u0000\u0000\u0000\u00be"+ + "\u0356\u0001\u0000\u0000\u0000\u00c0\u035a\u0001\u0000\u0000\u0000\u00c2"+ + "\u0362\u0001\u0000\u0000\u0000\u00c4\u036e\u0001\u0000\u0000\u0000\u00c6"+ + "\u0371\u0001\u0000\u0000\u0000\u00c8\u0375\u0001\u0000\u0000\u0000\u00ca"+ + "\u0379\u0001\u0000\u0000\u0000\u00cc\u037d\u0001\u0000\u0000\u0000\u00ce"+ + "\u0381\u0001\u0000\u0000\u0000\u00d0\u0386\u0001\u0000\u0000\u0000\u00d2"+ + "\u038a\u0001\u0000\u0000\u0000\u00d4\u0392\u0001\u0000\u0000\u0000\u00d6"+ + "\u03a7\u0001\u0000\u0000\u0000\u00d8\u03ab\u0001\u0000\u0000\u0000\u00da"+ + "\u03af\u0001\u0000\u0000\u0000\u00dc\u03b3\u0001\u0000\u0000\u0000\u00de"+ + "\u03b7\u0001\u0000\u0000\u0000\u00e0\u03bb\u0001\u0000\u0000\u0000\u00e2"+ + "\u03c0\u0001\u0000\u0000\u0000\u00e4\u03c4\u0001\u0000\u0000\u0000\u00e6"+ + "\u03c8\u0001\u0000\u0000\u0000\u00e8\u03cc\u0001\u0000\u0000\u0000\u00ea"+ + "\u03cf\u0001\u0000\u0000\u0000\u00ec\u03d3\u0001\u0000\u0000\u0000\u00ee"+ + "\u03d7\u0001\u0000\u0000\u0000\u00f0\u03db\u0001\u0000\u0000\u0000\u00f2"+ + "\u03df\u0001\u0000\u0000\u0000\u00f4\u03e4\u0001\u0000\u0000\u0000\u00f6"+ + "\u03e9\u0001\u0000\u0000\u0000\u00f8\u03ee\u0001\u0000\u0000\u0000\u00fa"+ + "\u03f5\u0001\u0000\u0000\u0000\u00fc\u03fe\u0001\u0000\u0000\u0000\u00fe"+ + "\u0405\u0001\u0000\u0000\u0000\u0100\u0409\u0001\u0000\u0000\u0000\u0102"+ + "\u040d\u0001\u0000\u0000\u0000\u0104\u0411\u0001\u0000\u0000\u0000\u0106"+ + "\u0415\u0001\u0000\u0000\u0000\u0108\u0419\u0001\u0000\u0000\u0000\u010a"+ + "\u041f\u0001\u0000\u0000\u0000\u010c\u0423\u0001\u0000\u0000\u0000\u010e"+ + "\u0427\u0001\u0000\u0000\u0000\u0110\u042b\u0001\u0000\u0000\u0000\u0112"+ + "\u042f\u0001\u0000\u0000\u0000\u0114\u0433\u0001\u0000\u0000\u0000\u0116"+ + "\u0437\u0001\u0000\u0000\u0000\u0118\u043b\u0001\u0000\u0000\u0000\u011a"+ + "\u043f\u0001\u0000\u0000\u0000\u011c\u0443\u0001\u0000\u0000\u0000\u011e"+ + "\u0448\u0001\u0000\u0000\u0000\u0120\u044c\u0001\u0000\u0000\u0000\u0122"+ + "\u0450\u0001\u0000\u0000\u0000\u0124\u0454\u0001\u0000\u0000\u0000\u0126"+ + "\u0458\u0001\u0000\u0000\u0000\u0128\u045c\u0001\u0000\u0000\u0000\u012a"+ + "\u0460\u0001\u0000\u0000\u0000\u012c\u0465\u0001\u0000\u0000\u0000\u012e"+ + "\u046a\u0001\u0000\u0000\u0000\u0130\u046e\u0001\u0000\u0000\u0000\u0132"+ + "\u0472\u0001\u0000\u0000\u0000\u0134\u0476\u0001\u0000\u0000\u0000\u0136"+ + "\u047b\u0001\u0000\u0000\u0000\u0138\u0485\u0001\u0000\u0000\u0000\u013a"+ + "\u0489\u0001\u0000\u0000\u0000\u013c\u048d\u0001\u0000\u0000\u0000\u013e"+ + "\u0491\u0001\u0000\u0000\u0000\u0140\u0496\u0001\u0000\u0000\u0000\u0142"+ + "\u049d\u0001\u0000\u0000\u0000\u0144\u04a1\u0001\u0000\u0000\u0000\u0146"+ + "\u04a5\u0001\u0000\u0000\u0000\u0148\u04a9\u0001\u0000\u0000\u0000\u014a"+ + "\u014b\u0005d\u0000\u0000\u014b\u014c\u0005i\u0000\u0000\u014c\u014d\u0005"+ + "s\u0000\u0000\u014d\u014e\u0005s\u0000\u0000\u014e\u014f\u0005e\u0000"+ + "\u0000\u014f\u0150\u0005c\u0000\u0000\u0150\u0151\u0005t\u0000\u0000\u0151"+ + "\u0152\u0001\u0000\u0000\u0000\u0152\u0153\u0006\u0000\u0000\u0000\u0153"+ + "\r\u0001\u0000\u0000\u0000\u0154\u0155\u0005d\u0000\u0000\u0155\u0156"+ + "\u0005r\u0000\u0000\u0156\u0157\u0005o\u0000\u0000\u0157\u0158\u0005p"+ + "\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u0159\u015a\u0006\u0001"+ + "\u0001\u0000\u015a\u000f\u0001\u0000\u0000\u0000\u015b\u015c\u0005e\u0000"+ + "\u0000\u015c\u015d\u0005n\u0000\u0000\u015d\u015e\u0005r\u0000\u0000\u015e"+ + "\u015f\u0005i\u0000\u0000\u015f\u0160\u0005c\u0000\u0000\u0160\u0161\u0005"+ + "h\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162\u0163\u0006\u0002"+ + "\u0002\u0000\u0163\u0011\u0001\u0000\u0000\u0000\u0164\u0165\u0005e\u0000"+ + "\u0000\u0165\u0166\u0005v\u0000\u0000\u0166\u0167\u0005a\u0000\u0000\u0167"+ + "\u0168\u0005l\u0000\u0000\u0168\u0169\u0001\u0000\u0000\u0000\u0169\u016a"+ + "\u0006\u0003\u0000\u0000\u016a\u0013\u0001\u0000\u0000\u0000\u016b\u016c"+ + "\u0005e\u0000\u0000\u016c\u016d\u0005x\u0000\u0000\u016d\u016e\u0005p"+ + "\u0000\u0000\u016e\u016f\u0005l\u0000\u0000\u016f\u0170\u0005a\u0000\u0000"+ + "\u0170\u0171\u0005i\u0000\u0000\u0171\u0172\u0005n\u0000\u0000\u0172\u0173"+ + "\u0001\u0000\u0000\u0000\u0173\u0174\u0006\u0004\u0003\u0000\u0174\u0015"+ + "\u0001\u0000\u0000\u0000\u0175\u0176\u0005f\u0000\u0000\u0176\u0177\u0005"+ + "r\u0000\u0000\u0177\u0178\u0005o\u0000\u0000\u0178\u0179\u0005m\u0000"+ + "\u0000\u0179\u017a\u0001\u0000\u0000\u0000\u017a\u017b\u0006\u0005\u0004"+ + "\u0000\u017b\u0017\u0001\u0000\u0000\u0000\u017c\u017d\u0005g\u0000\u0000"+ + "\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005o\u0000\u0000\u017f\u0180"+ + "\u0005k\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u0181\u0182\u0006"+ + "\u0006\u0000\u0000\u0182\u0019\u0001\u0000\u0000\u0000\u0183\u0184\u0005"+ + "i\u0000\u0000\u0184\u0185\u0005n\u0000\u0000\u0185\u0186\u0005l\u0000"+ + "\u0000\u0186\u0187\u0005i\u0000\u0000\u0187\u0188\u0005n\u0000\u0000\u0188"+ + "\u0189\u0005e\u0000\u0000\u0189\u018a\u0005s\u0000\u0000\u018a\u018b\u0005"+ + "t\u0000\u0000\u018b\u018c\u0005a\u0000\u0000\u018c\u018d\u0005t\u0000"+ + "\u0000\u018d\u018e\u0005s\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000"+ + "\u018f\u0190\u0006\u0007\u0000\u0000\u0190\u001b\u0001\u0000\u0000\u0000"+ + "\u0191\u0192\u0005k\u0000\u0000\u0192\u0193\u0005e\u0000\u0000\u0193\u0194"+ + "\u0005e\u0000\u0000\u0194\u0195\u0005p\u0000\u0000\u0195\u0196\u0001\u0000"+ + "\u0000\u0000\u0196\u0197\u0006\b\u0001\u0000\u0197\u001d\u0001\u0000\u0000"+ + "\u0000\u0198\u0199\u0005l\u0000\u0000\u0199\u019a\u0005i\u0000\u0000\u019a"+ + "\u019b\u0005m\u0000\u0000\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005"+ + "t\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e\u019f\u0006\t"+ + "\u0000\u0000\u019f\u001f\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005m\u0000"+ + "\u0000\u01a1\u01a2\u0005e\u0000\u0000\u01a2\u01a3\u0005t\u0000\u0000\u01a3"+ + "\u01a4\u0005a\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a6"+ + "\u0006\n\u0005\u0000\u01a6!\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005"+ + "m\u0000\u0000\u01a8\u01a9\u0005v\u0000\u0000\u01a9\u01aa\u0005_\u0000"+ + "\u0000\u01aa\u01ab\u0005e\u0000\u0000\u01ab\u01ac\u0005x\u0000\u0000\u01ac"+ + "\u01ad\u0005p\u0000\u0000\u01ad\u01ae\u0005a\u0000\u0000\u01ae\u01af\u0005"+ + "n\u0000\u0000\u01af\u01b0\u0005d\u0000\u0000\u01b0\u01b1\u0001\u0000\u0000"+ + "\u0000\u01b1\u01b2\u0006\u000b\u0006\u0000\u01b2#\u0001\u0000\u0000\u0000"+ + "\u01b3\u01b4\u0005r\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000\u01b5\u01b6"+ + "\u0005n\u0000\u0000\u01b6\u01b7\u0005a\u0000\u0000\u01b7\u01b8\u0005m"+ + "\u0000\u0000\u01b8\u01b9\u0005e\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000"+ + "\u0000\u01ba\u01bb\u0006\f\u0007\u0000\u01bb%\u0001\u0000\u0000\u0000"+ + "\u01bc\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005o\u0000\u0000\u01be\u01bf"+ + "\u0005w\u0000\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c1\u0006"+ + "\r\u0000\u0000\u01c1\'\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005s\u0000"+ + "\u0000\u01c3\u01c4\u0005h\u0000\u0000\u01c4\u01c5\u0005o\u0000\u0000\u01c5"+ + "\u01c6\u0005w\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u01c8"+ + "\u0006\u000e\b\u0000\u01c8)\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005"+ + "s\u0000\u0000\u01ca\u01cb\u0005o\u0000\u0000\u01cb\u01cc\u0005r\u0000"+ + "\u0000\u01cc\u01cd\u0005t\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000\u0000"+ + "\u01ce\u01cf\u0006\u000f\u0000\u0000\u01cf+\u0001\u0000\u0000\u0000\u01d0"+ + "\u01d1\u0005s\u0000\u0000\u01d1\u01d2\u0005t\u0000\u0000\u01d2\u01d3\u0005"+ + "a\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000\u01d4\u01d5\u0005s\u0000"+ + "\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0010\u0000"+ + "\u0000\u01d7-\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005w\u0000\u0000\u01d9"+ + "\u01da\u0005h\u0000\u0000\u01da\u01db\u0005e\u0000\u0000\u01db\u01dc\u0005"+ + "r\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd\u01de\u0001\u0000\u0000"+ + "\u0000\u01de\u01df\u0006\u0011\u0000\u0000\u01df/\u0001\u0000\u0000\u0000"+ + "\u01e0\u01e2\b\u0000\u0000\u0000\u01e1\u01e0\u0001\u0000\u0000\u0000\u01e2"+ + "\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3"+ + "\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e6\u0006\u0012\u0000\u0000\u01e61\u0001\u0000\u0000\u0000\u01e7\u01e8"+ + "\u0005/\u0000\u0000\u01e8\u01e9\u0005/\u0000\u0000\u01e9\u01ed\u0001\u0000"+ + "\u0000\u0000\u01ea\u01ec\b\u0001\u0000\u0000\u01eb\u01ea\u0001\u0000\u0000"+ + "\u0000\u01ec\u01ef\u0001\u0000\u0000\u0000\u01ed\u01eb\u0001\u0000\u0000"+ + "\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000"+ + "\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f2\u0005\r\u0000\u0000"+ + "\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f4\u0001\u0000\u0000\u0000\u01f3\u01f5\u0005\n\u0000\u0000\u01f4"+ + "\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5"+ + "\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0013\t\u0000\u01f73"+ + "\u0001\u0000\u0000\u0000\u01f8\u01f9\u0005/\u0000\u0000\u01f9\u01fa\u0005"+ + "*\u0000\u0000\u01fa\u01ff\u0001\u0000\u0000\u0000\u01fb\u01fe\u00034\u0014"+ + "\u0000\u01fc\u01fe\t\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fd\u01fc\u0001\u0000\u0000\u0000\u01fe\u0201\u0001\u0000\u0000\u0000"+ + "\u01ff\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000\u0000"+ + "\u0200\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0005*\u0000\u0000\u0203\u0204\u0005/\u0000\u0000\u0204\u0205"+ + "\u0001\u0000\u0000\u0000\u0205\u0206\u0006\u0014\t\u0000\u02065\u0001"+ + "\u0000\u0000\u0000\u0207\u0209\u0007\u0002\u0000\u0000\u0208\u0207\u0001"+ + "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u0208\u0001"+ + "\u0000\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0001"+ + "\u0000\u0000\u0000\u020c\u020d\u0006\u0015\t\u0000\u020d7\u0001\u0000"+ + "\u0000\u0000\u020e\u020f\u0003\u00a4L\u0000\u020f\u0210\u0001\u0000\u0000"+ + "\u0000\u0210\u0211\u0006\u0016\n\u0000\u0211\u0212\u0006\u0016\u000b\u0000"+ + "\u02129\u0001\u0000\u0000\u0000\u0213\u0214\u0003B\u001b\u0000\u0214\u0215"+ + "\u0001\u0000\u0000\u0000\u0215\u0216\u0006\u0017\f\u0000\u0216\u0217\u0006"+ + "\u0017\r\u0000\u0217;\u0001\u0000\u0000\u0000\u0218\u0219\u00036\u0015"+ + "\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0018\t\u0000"+ + "\u021b=\u0001\u0000\u0000\u0000\u021c\u021d\u00032\u0013\u0000\u021d\u021e"+ + "\u0001\u0000\u0000\u0000\u021e\u021f\u0006\u0019\t\u0000\u021f?\u0001"+ + "\u0000\u0000\u0000\u0220\u0221\u00034\u0014\u0000\u0221\u0222\u0001\u0000"+ + "\u0000\u0000\u0222\u0223\u0006\u001a\t\u0000\u0223A\u0001\u0000\u0000"+ + "\u0000\u0224\u0225\u0005|\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000"+ + "\u0226\u0227\u0006\u001b\r\u0000\u0227C\u0001\u0000\u0000\u0000\u0228"+ + "\u0229\u0007\u0003\u0000\u0000\u0229E\u0001\u0000\u0000\u0000\u022a\u022b"+ + "\u0007\u0004\u0000\u0000\u022bG\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ + "\\\u0000\u0000\u022d\u022e\u0007\u0005\u0000\u0000\u022eI\u0001\u0000"+ + "\u0000\u0000\u022f\u0230\b\u0006\u0000\u0000\u0230K\u0001\u0000\u0000"+ + "\u0000\u0231\u0233\u0007\u0007\u0000\u0000\u0232\u0234\u0007\b\u0000\u0000"+ + "\u0233\u0232\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000"+ + "\u0234\u0236\u0001\u0000\u0000\u0000\u0235\u0237\u0003D\u001c\u0000\u0236"+ + "\u0235\u0001\u0000\u0000\u0000\u0237\u0238\u0001\u0000\u0000\u0000\u0238"+ + "\u0236\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239"+ + "M\u0001\u0000\u0000\u0000\u023a\u023b\u0005@\u0000\u0000\u023bO\u0001"+ + "\u0000\u0000\u0000\u023c\u023d\u0005`\u0000\u0000\u023dQ\u0001\u0000\u0000"+ + "\u0000\u023e\u0242\b\t\u0000\u0000\u023f\u0240\u0005`\u0000\u0000\u0240"+ + "\u0242\u0005`\u0000\u0000\u0241\u023e\u0001\u0000\u0000\u0000\u0241\u023f"+ + "\u0001\u0000\u0000\u0000\u0242S\u0001\u0000\u0000\u0000\u0243\u0244\u0005"+ + "_\u0000\u0000\u0244U\u0001\u0000\u0000\u0000\u0245\u0249\u0003F\u001d"+ + "\u0000\u0246\u0249\u0003D\u001c\u0000\u0247\u0249\u0003T$\u0000\u0248"+ + "\u0245\u0001\u0000\u0000\u0000\u0248\u0246\u0001\u0000\u0000\u0000\u0248"+ + "\u0247\u0001\u0000\u0000\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024f"+ + "\u0005\"\u0000\u0000\u024b\u024e\u0003H\u001e\u0000\u024c\u024e\u0003"+ + "J\u001f\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024d\u024c\u0001\u0000"+ + "\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000"+ + "\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0252\u0001\u0000"+ + "\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0268\u0005\"\u0000"+ + "\u0000\u0253\u0254\u0005\"\u0000\u0000\u0254\u0255\u0005\"\u0000\u0000"+ + "\u0255\u0256\u0005\"\u0000\u0000\u0256\u025a\u0001\u0000\u0000\u0000\u0257"+ + "\u0259\b\u0001\u0000\u0000\u0258\u0257\u0001\u0000\u0000\u0000\u0259\u025c"+ + "\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025a\u0258"+ + "\u0001\u0000\u0000\u0000\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025a"+ + "\u0001\u0000\u0000\u0000\u025d\u025e\u0005\"\u0000\u0000\u025e\u025f\u0005"+ + "\"\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0262\u0001\u0000"+ + "\u0000\u0000\u0261\u0263\u0005\"\u0000\u0000\u0262\u0261\u0001\u0000\u0000"+ + "\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0265\u0001\u0000\u0000"+ + "\u0000\u0264\u0266\u0005\"\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000"+ + "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0268\u0001\u0000\u0000\u0000"+ + "\u0267\u024a\u0001\u0000\u0000\u0000\u0267\u0253\u0001\u0000\u0000\u0000"+ + "\u0268Y\u0001\u0000\u0000\u0000\u0269\u026b\u0003D\u001c\u0000\u026a\u0269"+ + "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ + "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d[\u0001"+ + "\u0000\u0000\u0000\u026e\u0270\u0003D\u001c\u0000\u026f\u026e\u0001\u0000"+ + "\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u026f\u0001\u0000"+ + "\u0000\u0000\u0271\u0272\u0001\u0000\u0000\u0000\u0272\u0273\u0001\u0000"+ + "\u0000\u0000\u0273\u0277\u0003l0\u0000\u0274\u0276\u0003D\u001c\u0000"+ + "\u0275\u0274\u0001\u0000\u0000\u0000\u0276\u0279\u0001\u0000\u0000\u0000"+ + "\u0277\u0275\u0001\u0000\u0000\u0000\u0277\u0278\u0001\u0000\u0000\u0000"+ + "\u0278\u0299\u0001\u0000\u0000\u0000\u0279\u0277\u0001\u0000\u0000\u0000"+ + "\u027a\u027c\u0003l0\u0000\u027b\u027d\u0003D\u001c\u0000\u027c\u027b"+ + "\u0001\u0000\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u027c"+ + "\u0001\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u0299"+ + "\u0001\u0000\u0000\u0000\u0280\u0282\u0003D\u001c\u0000\u0281\u0280\u0001"+ + "\u0000\u0000\u0000\u0282\u0283\u0001\u0000\u0000\u0000\u0283\u0281\u0001"+ + "\u0000\u0000\u0000\u0283\u0284\u0001\u0000\u0000\u0000\u0284\u028c\u0001"+ + "\u0000\u0000\u0000\u0285\u0289\u0003l0\u0000\u0286\u0288\u0003D\u001c"+ + "\u0000\u0287\u0286\u0001\u0000\u0000\u0000\u0288\u028b\u0001\u0000\u0000"+ + "\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u0289\u028a\u0001\u0000\u0000"+ + "\u0000\u028a\u028d\u0001\u0000\u0000\u0000\u028b\u0289\u0001\u0000\u0000"+ + "\u0000\u028c\u0285\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000"+ + "\u0000\u028d\u028e\u0001\u0000\u0000\u0000\u028e\u028f\u0003L \u0000\u028f"+ + "\u0299\u0001\u0000\u0000\u0000\u0290\u0292\u0003l0\u0000\u0291\u0293\u0003"+ + "D\u001c\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293\u0294\u0001\u0000"+ + "\u0000\u0000\u0294\u0292\u0001\u0000\u0000\u0000\u0294\u0295\u0001\u0000"+ + "\u0000\u0000\u0295\u0296\u0001\u0000\u0000\u0000\u0296\u0297\u0003L \u0000"+ + "\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u026f\u0001\u0000\u0000\u0000"+ + "\u0298\u027a\u0001\u0000\u0000\u0000\u0298\u0281\u0001\u0000\u0000\u0000"+ + "\u0298\u0290\u0001\u0000\u0000\u0000\u0299]\u0001\u0000\u0000\u0000\u029a"+ + "\u029b\u0005b\u0000\u0000\u029b\u029c\u0005y\u0000\u0000\u029c_\u0001"+ + "\u0000\u0000\u0000\u029d\u029e\u0005a\u0000\u0000\u029e\u029f\u0005n\u0000"+ + "\u0000\u029f\u02a0\u0005d\u0000\u0000\u02a0a\u0001\u0000\u0000\u0000\u02a1"+ + "\u02a2\u0005a\u0000\u0000\u02a2\u02a3\u0005s\u0000\u0000\u02a3\u02a4\u0005"+ + "c\u0000\u0000\u02a4c\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005=\u0000"+ + "\u0000\u02a6e\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005:\u0000\u0000\u02a8"+ + "\u02a9\u0005:\u0000\u0000\u02a9g\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005"+ + ",\u0000\u0000\u02abi\u0001\u0000\u0000\u0000\u02ac\u02ad\u0005d\u0000"+ + "\u0000\u02ad\u02ae\u0005e\u0000\u0000\u02ae\u02af\u0005s\u0000\u0000\u02af"+ + "\u02b0\u0005c\u0000\u0000\u02b0k\u0001\u0000\u0000\u0000\u02b1\u02b2\u0005"+ + ".\u0000\u0000\u02b2m\u0001\u0000\u0000\u0000\u02b3\u02b4\u0005f\u0000"+ + "\u0000\u02b4\u02b5\u0005a\u0000\u0000\u02b5\u02b6\u0005l\u0000\u0000\u02b6"+ + "\u02b7\u0005s\u0000\u0000\u02b7\u02b8\u0005e\u0000\u0000\u02b8o\u0001"+ + "\u0000\u0000\u0000\u02b9\u02ba\u0005f\u0000\u0000\u02ba\u02bb\u0005i\u0000"+ + "\u0000\u02bb\u02bc\u0005r\u0000\u0000\u02bc\u02bd\u0005s\u0000\u0000\u02bd"+ + "\u02be\u0005t\u0000\u0000\u02beq\u0001\u0000\u0000\u0000\u02bf\u02c0\u0005"+ + "l\u0000\u0000\u02c0\u02c1\u0005a\u0000\u0000\u02c1\u02c2\u0005s\u0000"+ + "\u0000\u02c2\u02c3\u0005t\u0000\u0000\u02c3s\u0001\u0000\u0000\u0000\u02c4"+ + "\u02c5\u0005(\u0000\u0000\u02c5u\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005"+ + "i\u0000\u0000\u02c7\u02c8\u0005n\u0000\u0000\u02c8w\u0001\u0000\u0000"+ + "\u0000\u02c9\u02ca\u0005i\u0000\u0000\u02ca\u02cb\u0005s\u0000\u0000\u02cb"+ + "y\u0001\u0000\u0000\u0000\u02cc\u02cd\u0005l\u0000\u0000\u02cd\u02ce\u0005"+ + "i\u0000\u0000\u02ce\u02cf\u0005k\u0000\u0000\u02cf\u02d0\u0005e\u0000"+ + "\u0000\u02d0{\u0001\u0000\u0000\u0000\u02d1\u02d2\u0005n\u0000\u0000\u02d2"+ + "\u02d3\u0005o\u0000\u0000\u02d3\u02d4\u0005t\u0000\u0000\u02d4}\u0001"+ + "\u0000\u0000\u0000\u02d5\u02d6\u0005n\u0000\u0000\u02d6\u02d7\u0005u\u0000"+ + "\u0000\u02d7\u02d8\u0005l\u0000\u0000\u02d8\u02d9\u0005l\u0000\u0000\u02d9"+ + "\u007f\u0001\u0000\u0000\u0000\u02da\u02db\u0005n\u0000\u0000\u02db\u02dc"+ + "\u0005u\u0000\u0000\u02dc\u02dd\u0005l\u0000\u0000\u02dd\u02de\u0005l"+ + "\u0000\u0000\u02de\u02df\u0005s\u0000\u0000\u02df\u0081\u0001\u0000\u0000"+ + "\u0000\u02e0\u02e1\u0005o\u0000\u0000\u02e1\u02e2\u0005r\u0000\u0000\u02e2"+ + "\u0083\u0001\u0000\u0000\u0000\u02e3\u02e4\u0005?\u0000\u0000\u02e4\u0085"+ + "\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005r\u0000\u0000\u02e6\u02e7\u0005"+ + "l\u0000\u0000\u02e7\u02e8\u0005i\u0000\u0000\u02e8\u02e9\u0005k\u0000"+ + "\u0000\u02e9\u02ea\u0005e\u0000\u0000\u02ea\u0087\u0001\u0000\u0000\u0000"+ + "\u02eb\u02ec\u0005)\u0000\u0000\u02ec\u0089\u0001\u0000\u0000\u0000\u02ed"+ + "\u02ee\u0005t\u0000\u0000\u02ee\u02ef\u0005r\u0000\u0000\u02ef\u02f0\u0005"+ + "u\u0000\u0000\u02f0\u02f1\u0005e\u0000\u0000\u02f1\u008b\u0001\u0000\u0000"+ + "\u0000\u02f2\u02f3\u0005=\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000\u02f4"+ + "\u008d\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000\u02f6\u02f7"+ + "\u0005~\u0000\u0000\u02f7\u008f\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005"+ + "!\u0000\u0000\u02f9\u02fa\u0005=\u0000\u0000\u02fa\u0091\u0001\u0000\u0000"+ + "\u0000\u02fb\u02fc\u0005<\u0000\u0000\u02fc\u0093\u0001\u0000\u0000\u0000"+ + "\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u02ff\u0005=\u0000\u0000\u02ff\u0095"+ + "\u0001\u0000\u0000\u0000\u0300\u0301\u0005>\u0000\u0000\u0301\u0097\u0001"+ + "\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0304\u0005=\u0000"+ + "\u0000\u0304\u0099\u0001\u0000\u0000\u0000\u0305\u0306\u0005+\u0000\u0000"+ + "\u0306\u009b\u0001\u0000\u0000\u0000\u0307\u0308\u0005-\u0000\u0000\u0308"+ + "\u009d\u0001\u0000\u0000\u0000\u0309\u030a\u0005*\u0000\u0000\u030a\u009f"+ + "\u0001\u0000\u0000\u0000\u030b\u030c\u0005/\u0000\u0000\u030c\u00a1\u0001"+ + "\u0000\u0000\u0000\u030d\u030e\u0005%\u0000\u0000\u030e\u00a3\u0001\u0000"+ + "\u0000\u0000\u030f\u0310\u0005[\u0000\u0000\u0310\u0311\u0001\u0000\u0000"+ + "\u0000\u0311\u0312\u0006L\u0000\u0000\u0312\u0313\u0006L\u0000\u0000\u0313"+ + "\u00a5\u0001\u0000\u0000\u0000\u0314\u0315\u0005]\u0000\u0000\u0315\u0316"+ + "\u0001\u0000\u0000\u0000\u0316\u0317\u0006M\r\u0000\u0317\u0318\u0006"+ + "M\r\u0000\u0318\u00a7\u0001\u0000\u0000\u0000\u0319\u031d\u0003F\u001d"+ + "\u0000\u031a\u031c\u0003V%\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c"+ + "\u031f\u0001\u0000\u0000\u0000\u031d\u031b\u0001\u0000\u0000\u0000\u031d"+ + "\u031e\u0001\u0000\u0000\u0000\u031e\u032a\u0001\u0000\u0000\u0000\u031f"+ + "\u031d\u0001\u0000\u0000\u0000\u0320\u0323\u0003T$\u0000\u0321\u0323\u0003"+ + "N!\u0000\u0322\u0320\u0001\u0000\u0000\u0000\u0322\u0321\u0001\u0000\u0000"+ + "\u0000\u0323\u0325\u0001\u0000\u0000\u0000\u0324\u0326\u0003V%\u0000\u0325"+ + "\u0324\u0001\u0000\u0000\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327"+ + "\u0325\u0001\u0000\u0000\u0000\u0327\u0328\u0001\u0000\u0000\u0000\u0328"+ + "\u032a\u0001\u0000\u0000\u0000\u0329\u0319\u0001\u0000\u0000\u0000\u0329"+ + "\u0322\u0001\u0000\u0000\u0000\u032a\u00a9\u0001\u0000\u0000\u0000\u032b"+ + "\u032d\u0003P\"\u0000\u032c\u032e\u0003R#\u0000\u032d\u032c\u0001\u0000"+ + "\u0000\u0000\u032e\u032f\u0001\u0000\u0000\u0000\u032f\u032d\u0001\u0000"+ + "\u0000\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000"+ + "\u0000\u0000\u0331\u0332\u0003P\"\u0000\u0332\u00ab\u0001\u0000\u0000"+ + "\u0000\u0333\u0334\u0003\u00aaO\u0000\u0334\u00ad\u0001\u0000\u0000\u0000"+ + "\u0335\u0336\u00032\u0013\u0000\u0336\u0337\u0001\u0000\u0000\u0000\u0337"+ + "\u0338\u0006Q\t\u0000\u0338\u00af\u0001\u0000\u0000\u0000\u0339\u033a"+ + "\u00034\u0014\u0000\u033a\u033b\u0001\u0000\u0000\u0000\u033b\u033c\u0006"+ + "R\t\u0000\u033c\u00b1\u0001\u0000\u0000\u0000\u033d\u033e\u00036\u0015"+ + "\u0000\u033e\u033f\u0001\u0000\u0000\u0000\u033f\u0340\u0006S\t\u0000"+ + "\u0340\u00b3\u0001\u0000\u0000\u0000\u0341\u0342\u0003B\u001b\u0000\u0342"+ + "\u0343\u0001\u0000\u0000\u0000\u0343\u0344\u0006T\f\u0000\u0344\u0345"+ + "\u0006T\r\u0000\u0345\u00b5\u0001\u0000\u0000\u0000\u0346\u0347\u0003"+ + "\u00a4L\u0000\u0347\u0348\u0001\u0000\u0000\u0000\u0348\u0349\u0006U\n"+ + "\u0000\u0349\u00b7\u0001\u0000\u0000\u0000\u034a\u034b\u0003\u00a6M\u0000"+ + "\u034b\u034c\u0001\u0000\u0000\u0000\u034c\u034d\u0006V\u000e\u0000\u034d"+ + "\u00b9\u0001\u0000\u0000\u0000\u034e\u034f\u0003h.\u0000\u034f\u0350\u0001"+ + "\u0000\u0000\u0000\u0350\u0351\u0006W\u000f\u0000\u0351\u00bb\u0001\u0000"+ + "\u0000\u0000\u0352\u0353\u0003d,\u0000\u0353\u0354\u0001\u0000\u0000\u0000"+ + "\u0354\u0355\u0006X\u0010\u0000\u0355\u00bd\u0001\u0000\u0000\u0000\u0356"+ + "\u0357\u0003X&\u0000\u0357\u0358\u0001\u0000\u0000\u0000\u0358\u0359\u0006"+ + "Y\u0011\u0000\u0359\u00bf\u0001\u0000\u0000\u0000\u035a\u035b\u0005o\u0000"+ + "\u0000\u035b\u035c\u0005p\u0000\u0000\u035c\u035d\u0005t\u0000\u0000\u035d"+ + "\u035e\u0005i\u0000\u0000\u035e\u035f\u0005o\u0000\u0000\u035f\u0360\u0005"+ + "n\u0000\u0000\u0360\u0361\u0005s\u0000\u0000\u0361\u00c1\u0001\u0000\u0000"+ + "\u0000\u0362\u0363\u0005m\u0000\u0000\u0363\u0364\u0005e\u0000\u0000\u0364"+ + "\u0365\u0005t\u0000\u0000\u0365\u0366\u0005a\u0000\u0000\u0366\u0367\u0005"+ + "d\u0000\u0000\u0367\u0368\u0005a\u0000\u0000\u0368\u0369\u0005t\u0000"+ + "\u0000\u0369\u036a\u0005a\u0000\u0000\u036a\u00c3\u0001\u0000\u0000\u0000"+ + "\u036b\u036f\b\n\u0000\u0000\u036c\u036d\u0005/\u0000\u0000\u036d\u036f"+ + "\b\u000b\u0000\u0000\u036e\u036b\u0001\u0000\u0000\u0000\u036e\u036c\u0001"+ + "\u0000\u0000\u0000\u036f\u00c5\u0001\u0000\u0000\u0000\u0370\u0372\u0003"+ + "\u00c4\\\u0000\u0371\u0370\u0001\u0000\u0000\u0000\u0372\u0373\u0001\u0000"+ + "\u0000\u0000\u0373\u0371\u0001\u0000\u0000\u0000\u0373\u0374\u0001\u0000"+ + "\u0000\u0000\u0374\u00c7\u0001\u0000\u0000\u0000\u0375\u0376\u00032\u0013"+ + "\u0000\u0376\u0377\u0001\u0000\u0000\u0000\u0377\u0378\u0006^\t\u0000"+ + "\u0378\u00c9\u0001\u0000\u0000\u0000\u0379\u037a\u00034\u0014\u0000\u037a"+ + "\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u0006_\t\u0000\u037c\u00cb"+ + "\u0001\u0000\u0000\u0000\u037d\u037e\u00036\u0015\u0000\u037e\u037f\u0001"+ + "\u0000\u0000\u0000\u037f\u0380\u0006`\t\u0000\u0380\u00cd\u0001\u0000"+ + "\u0000\u0000\u0381\u0382\u0003B\u001b\u0000\u0382\u0383\u0001\u0000\u0000"+ + "\u0000\u0383\u0384\u0006a\f\u0000\u0384\u0385\u0006a\r\u0000\u0385\u00cf"+ + "\u0001\u0000\u0000\u0000\u0386\u0387\u0003l0\u0000\u0387\u0388\u0001\u0000"+ + "\u0000\u0000\u0388\u0389\u0006b\u0012\u0000\u0389\u00d1\u0001\u0000\u0000"+ + "\u0000\u038a\u038b\u0003h.\u0000\u038b\u038c\u0001\u0000\u0000\u0000\u038c"+ + "\u038d\u0006c\u000f\u0000\u038d\u00d3\u0001\u0000\u0000\u0000\u038e\u0393"+ + "\u0003F\u001d\u0000\u038f\u0393\u0003D\u001c\u0000\u0390\u0393\u0003T"+ + "$\u0000\u0391\u0393\u0003\u009eI\u0000\u0392\u038e\u0001\u0000\u0000\u0000"+ + "\u0392\u038f\u0001\u0000\u0000\u0000\u0392\u0390\u0001\u0000\u0000\u0000"+ + "\u0392\u0391\u0001\u0000\u0000\u0000\u0393\u00d5\u0001\u0000\u0000\u0000"+ + "\u0394\u0397\u0003F\u001d\u0000\u0395\u0397\u0003\u009eI\u0000\u0396\u0394"+ + "\u0001\u0000\u0000\u0000\u0396\u0395\u0001\u0000\u0000\u0000\u0397\u039b"+ + "\u0001\u0000\u0000\u0000\u0398\u039a\u0003\u00d4d\u0000\u0399\u0398\u0001"+ + "\u0000\u0000\u0000\u039a\u039d\u0001\u0000\u0000\u0000\u039b\u0399\u0001"+ + "\u0000\u0000\u0000\u039b\u039c\u0001\u0000\u0000\u0000\u039c\u03a8\u0001"+ + "\u0000\u0000\u0000\u039d\u039b\u0001\u0000\u0000\u0000\u039e\u03a1\u0003"+ + "T$\u0000\u039f\u03a1\u0003N!\u0000\u03a0\u039e\u0001\u0000\u0000\u0000"+ + "\u03a0\u039f\u0001\u0000\u0000\u0000\u03a1\u03a3\u0001\u0000\u0000\u0000"+ + "\u03a2\u03a4\u0003\u00d4d\u0000\u03a3\u03a2\u0001\u0000\u0000\u0000\u03a4"+ + "\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a3\u0001\u0000\u0000\u0000\u03a5"+ + "\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a8\u0001\u0000\u0000\u0000\u03a7"+ + "\u0396\u0001\u0000\u0000\u0000\u03a7\u03a0\u0001\u0000\u0000\u0000\u03a8"+ + "\u00d7\u0001\u0000\u0000\u0000\u03a9\u03ac\u0003\u00d6e\u0000\u03aa\u03ac"+ + "\u0003\u00aaO\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000\u03ab\u03aa\u0001"+ + "\u0000\u0000\u0000\u03ac\u03ad\u0001\u0000\u0000\u0000\u03ad\u03ab\u0001"+ + "\u0000\u0000\u0000\u03ad\u03ae\u0001\u0000\u0000\u0000\u03ae\u00d9\u0001"+ + "\u0000\u0000\u0000\u03af\u03b0\u00032\u0013\u0000\u03b0\u03b1\u0001\u0000"+ + "\u0000\u0000\u03b1\u03b2\u0006g\t\u0000\u03b2\u00db\u0001\u0000\u0000"+ + "\u0000\u03b3\u03b4\u00034\u0014\u0000\u03b4\u03b5\u0001\u0000\u0000\u0000"+ + "\u03b5\u03b6\u0006h\t\u0000\u03b6\u00dd\u0001\u0000\u0000\u0000\u03b7"+ + "\u03b8\u00036\u0015\u0000\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9\u03ba"+ + "\u0006i\t\u0000\u03ba\u00df\u0001\u0000\u0000\u0000\u03bb\u03bc\u0003"+ + "B\u001b\u0000\u03bc\u03bd\u0001\u0000\u0000\u0000\u03bd\u03be\u0006j\f"+ + "\u0000\u03be\u03bf\u0006j\r\u0000\u03bf\u00e1\u0001\u0000\u0000\u0000"+ + "\u03c0\u03c1\u0003d,\u0000\u03c1\u03c2\u0001\u0000\u0000\u0000\u03c2\u03c3"+ + "\u0006k\u0010\u0000\u03c3\u00e3\u0001\u0000\u0000\u0000\u03c4\u03c5\u0003"+ + "h.\u0000\u03c5\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7\u0006l\u000f"+ + "\u0000\u03c7\u00e5\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003l0\u0000\u03c9"+ + "\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006m\u0012\u0000\u03cb\u00e7"+ + "\u0001\u0000\u0000\u0000\u03cc\u03cd\u0005a\u0000\u0000\u03cd\u03ce\u0005"+ + "s\u0000\u0000\u03ce\u00e9\u0001\u0000\u0000\u0000\u03cf\u03d0\u0003\u00d8"+ + "f\u0000\u03d0\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2\u0006o\u0013\u0000"+ + "\u03d2\u00eb\u0001\u0000\u0000\u0000\u03d3\u03d4\u00032\u0013\u0000\u03d4"+ + "\u03d5\u0001\u0000\u0000\u0000\u03d5\u03d6\u0006p\t\u0000\u03d6\u00ed"+ + "\u0001\u0000\u0000\u0000\u03d7\u03d8\u00034\u0014\u0000\u03d8\u03d9\u0001"+ + "\u0000\u0000\u0000\u03d9\u03da\u0006q\t\u0000\u03da\u00ef\u0001\u0000"+ + "\u0000\u0000\u03db\u03dc\u00036\u0015\u0000\u03dc\u03dd\u0001\u0000\u0000"+ + "\u0000\u03dd\u03de\u0006r\t\u0000\u03de\u00f1\u0001\u0000\u0000\u0000"+ + "\u03df\u03e0\u0003B\u001b\u0000\u03e0\u03e1\u0001\u0000\u0000\u0000\u03e1"+ + "\u03e2\u0006s\f\u0000\u03e2\u03e3\u0006s\r\u0000\u03e3\u00f3\u0001\u0000"+ + "\u0000\u0000\u03e4\u03e5\u0003\u00a4L\u0000\u03e5\u03e6\u0001\u0000\u0000"+ + "\u0000\u03e6\u03e7\u0006t\n\u0000\u03e7\u03e8\u0006t\u0014\u0000\u03e8"+ + "\u00f5\u0001\u0000\u0000\u0000\u03e9\u03ea\u0005o\u0000\u0000\u03ea\u03eb"+ + "\u0005n\u0000\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0006"+ + "u\u0015\u0000\u03ed\u00f7\u0001\u0000\u0000\u0000\u03ee\u03ef\u0005w\u0000"+ + "\u0000\u03ef\u03f0\u0005i\u0000\u0000\u03f0\u03f1\u0005t\u0000\u0000\u03f1"+ + "\u03f2\u0005h\u0000\u0000\u03f2\u03f3\u0001\u0000\u0000\u0000\u03f3\u03f4"+ + "\u0006v\u0015\u0000\u03f4\u00f9\u0001\u0000\u0000\u0000\u03f5\u03f6\b"+ + "\f\u0000\u0000\u03f6\u00fb\u0001\u0000\u0000\u0000\u03f7\u03f9\u0003\u00fa"+ + "w\u0000\u03f8\u03f7\u0001\u0000\u0000\u0000\u03f9\u03fa\u0001\u0000\u0000"+ + "\u0000\u03fa\u03f8\u0001\u0000\u0000\u0000\u03fa\u03fb\u0001\u0000\u0000"+ + "\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0003\u0140\u009a"+ + "\u0000\u03fd\u03ff\u0001\u0000\u0000\u0000\u03fe\u03f8\u0001\u0000\u0000"+ + "\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0401\u0001\u0000\u0000"+ + "\u0000\u0400\u0402\u0003\u00faw\u0000\u0401\u0400\u0001\u0000\u0000\u0000"+ + "\u0402\u0403\u0001\u0000\u0000\u0000\u0403\u0401\u0001\u0000\u0000\u0000"+ + "\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u00fd\u0001\u0000\u0000\u0000"+ + "\u0405\u0406\u0003\u00acP\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407"+ + "\u0408\u0006y\u0016\u0000\u0408\u00ff\u0001\u0000\u0000\u0000\u0409\u040a"+ + "\u0003\u00fcx\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006"+ + "z\u0017\u0000\u040c\u0101\u0001\u0000\u0000\u0000\u040d\u040e\u00032\u0013"+ + "\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410\u0006{\t\u0000"+ + "\u0410\u0103\u0001\u0000\u0000\u0000\u0411\u0412\u00034\u0014\u0000\u0412"+ + "\u0413\u0001\u0000\u0000\u0000\u0413\u0414\u0006|\t\u0000\u0414\u0105"+ + "\u0001\u0000\u0000\u0000\u0415\u0416\u00036\u0015\u0000\u0416\u0417\u0001"+ + "\u0000\u0000\u0000\u0417\u0418\u0006}\t\u0000\u0418\u0107\u0001\u0000"+ + "\u0000\u0000\u0419\u041a\u0003B\u001b\u0000\u041a\u041b\u0001\u0000\u0000"+ + "\u0000\u041b\u041c\u0006~\f\u0000\u041c\u041d\u0006~\r\u0000\u041d\u041e"+ + "\u0006~\r\u0000\u041e\u0109\u0001\u0000\u0000\u0000\u041f\u0420\u0003"+ + "d,\u0000\u0420\u0421\u0001\u0000\u0000\u0000\u0421\u0422\u0006\u007f\u0010"+ + "\u0000\u0422\u010b\u0001\u0000\u0000\u0000\u0423\u0424\u0003h.\u0000\u0424"+ + "\u0425\u0001\u0000\u0000\u0000\u0425\u0426\u0006\u0080\u000f\u0000\u0426"+ + "\u010d\u0001\u0000\u0000\u0000\u0427\u0428\u0003l0\u0000\u0428\u0429\u0001"+ + "\u0000\u0000\u0000\u0429\u042a\u0006\u0081\u0012\u0000\u042a\u010f\u0001"+ + "\u0000\u0000\u0000\u042b\u042c\u0003\u00f8v\u0000\u042c\u042d\u0001\u0000"+ + "\u0000\u0000\u042d\u042e\u0006\u0082\u0018\u0000\u042e\u0111\u0001\u0000"+ + "\u0000\u0000\u042f\u0430\u0003\u00d8f\u0000\u0430\u0431\u0001\u0000\u0000"+ + "\u0000\u0431\u0432\u0006\u0083\u0013\u0000\u0432\u0113\u0001\u0000\u0000"+ + "\u0000\u0433\u0434\u0003\u00acP\u0000\u0434\u0435\u0001\u0000\u0000\u0000"+ + "\u0435\u0436\u0006\u0084\u0016\u0000\u0436\u0115\u0001\u0000\u0000\u0000"+ + "\u0437\u0438\u00032\u0013\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439"+ + "\u043a\u0006\u0085\t\u0000\u043a\u0117\u0001\u0000\u0000\u0000\u043b\u043c"+ + "\u00034\u0014\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006"+ + "\u0086\t\u0000\u043e\u0119\u0001\u0000\u0000\u0000\u043f\u0440\u00036"+ + "\u0015\u0000\u0440\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006\u0087"+ + "\t\u0000\u0442\u011b\u0001\u0000\u0000\u0000\u0443\u0444\u0003B\u001b"+ + "\u0000\u0444\u0445\u0001\u0000\u0000\u0000\u0445\u0446\u0006\u0088\f\u0000"+ + "\u0446\u0447\u0006\u0088\r\u0000\u0447\u011d\u0001\u0000\u0000\u0000\u0448"+ + "\u0449\u0003l0\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006"+ + "\u0089\u0012\u0000\u044b\u011f\u0001\u0000\u0000\u0000\u044c\u044d\u0003"+ + "\u00acP\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006\u008a"+ + "\u0016\u0000\u044f\u0121\u0001\u0000\u0000\u0000\u0450\u0451\u0003\u00a8"+ + "N\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006\u008b\u0019"+ + "\u0000\u0453\u0123\u0001\u0000\u0000\u0000\u0454\u0455\u00032\u0013\u0000"+ + "\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457\u0006\u008c\t\u0000\u0457"+ + "\u0125\u0001\u0000\u0000\u0000\u0458\u0459\u00034\u0014\u0000\u0459\u045a"+ + "\u0001\u0000\u0000\u0000\u045a\u045b\u0006\u008d\t\u0000\u045b\u0127\u0001"+ + "\u0000\u0000\u0000\u045c\u045d\u00036\u0015\u0000\u045d\u045e\u0001\u0000"+ + "\u0000\u0000\u045e\u045f\u0006\u008e\t\u0000\u045f\u0129\u0001\u0000\u0000"+ + "\u0000\u0460\u0461\u0003B\u001b\u0000\u0461\u0462\u0001\u0000\u0000\u0000"+ + "\u0462\u0463\u0006\u008f\f\u0000\u0463\u0464\u0006\u008f\r\u0000\u0464"+ + "\u012b\u0001\u0000\u0000\u0000\u0465\u0466\u0005i\u0000\u0000\u0466\u0467"+ + "\u0005n\u0000\u0000\u0467\u0468\u0005f\u0000\u0000\u0468\u0469\u0005o"+ + "\u0000\u0000\u0469\u012d\u0001\u0000\u0000\u0000\u046a\u046b\u00032\u0013"+ + "\u0000\u046b\u046c\u0001\u0000\u0000\u0000\u046c\u046d\u0006\u0091\t\u0000"+ + "\u046d\u012f\u0001\u0000\u0000\u0000\u046e\u046f\u00034\u0014\u0000\u046f"+ + "\u0470\u0001\u0000\u0000\u0000\u0470\u0471\u0006\u0092\t\u0000\u0471\u0131"+ + "\u0001\u0000\u0000\u0000\u0472\u0473\u00036\u0015\u0000\u0473\u0474\u0001"+ + "\u0000\u0000\u0000\u0474\u0475\u0006\u0093\t\u0000\u0475\u0133\u0001\u0000"+ + "\u0000\u0000\u0476\u0477\u0003B\u001b\u0000\u0477\u0478\u0001\u0000\u0000"+ + "\u0000\u0478\u0479\u0006\u0094\f\u0000\u0479\u047a\u0006\u0094\r\u0000"+ + "\u047a\u0135\u0001\u0000\u0000\u0000\u047b\u047c\u0005f\u0000\u0000\u047c"+ + "\u047d\u0005u\u0000\u0000\u047d\u047e\u0005n\u0000\u0000\u047e\u047f\u0005"+ + "c\u0000\u0000\u047f\u0480\u0005t\u0000\u0000\u0480\u0481\u0005i\u0000"+ + "\u0000\u0481\u0482\u0005o\u0000\u0000\u0482\u0483\u0005n\u0000\u0000\u0483"+ + "\u0484\u0005s\u0000\u0000\u0484\u0137\u0001\u0000\u0000\u0000\u0485\u0486"+ + "\u00032\u0013\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0006"+ + "\u0096\t\u0000\u0488\u0139\u0001\u0000\u0000\u0000\u0489\u048a\u00034"+ + "\u0014\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b\u048c\u0006\u0097"+ + "\t\u0000\u048c\u013b\u0001\u0000\u0000\u0000\u048d\u048e\u00036\u0015"+ + "\u0000\u048e\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0006\u0098\t\u0000"+ + "\u0490\u013d\u0001\u0000\u0000\u0000\u0491\u0492\u0003\u00a6M\u0000\u0492"+ + "\u0493\u0001\u0000\u0000\u0000\u0493\u0494\u0006\u0099\u000e\u0000\u0494"+ + "\u0495\u0006\u0099\r\u0000\u0495\u013f\u0001\u0000\u0000\u0000\u0496\u0497"+ + "\u0005:\u0000\u0000\u0497\u0141\u0001\u0000\u0000\u0000\u0498\u049e\u0003"+ + "N!\u0000\u0499\u049e\u0003D\u001c\u0000\u049a\u049e\u0003l0\u0000\u049b"+ + "\u049e\u0003F\u001d\u0000\u049c\u049e\u0003T$\u0000\u049d\u0498\u0001"+ + "\u0000\u0000\u0000\u049d\u0499\u0001\u0000\u0000\u0000\u049d\u049a\u0001"+ + "\u0000\u0000\u0000\u049d\u049b\u0001\u0000\u0000\u0000\u049d\u049c\u0001"+ + "\u0000\u0000\u0000\u049e\u049f\u0001\u0000\u0000\u0000\u049f\u049d\u0001"+ + "\u0000\u0000\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u0143\u0001"+ + "\u0000\u0000\u0000\u04a1\u04a2\u00032\u0013\u0000\u04a2\u04a3\u0001\u0000"+ + "\u0000\u0000\u04a3\u04a4\u0006\u009c\t\u0000\u04a4\u0145\u0001\u0000\u0000"+ + "\u0000\u04a5\u04a6\u00034\u0014\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000"+ + "\u04a7\u04a8\u0006\u009d\t\u0000\u04a8\u0147\u0001\u0000\u0000\u0000\u04a9"+ + "\u04aa\u00036\u0015\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000\u04ab\u04ac"+ + "\u0006\u009e\t\u0000\u04ac\u0149\u0001\u0000\u0000\u0000:\u0000\u0001"+ + "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e3\u01ed\u01f1\u01f4"+ + "\u01fd\u01ff\u020a\u0233\u0238\u0241\u0248\u024d\u024f\u025a\u0262\u0265"+ + "\u0267\u026c\u0271\u0277\u027e\u0283\u0289\u028c\u0294\u0298\u031d\u0322"+ + "\u0327\u0329\u032f\u036e\u0373\u0392\u0396\u039b\u03a0\u03a5\u03a7\u03ab"+ + "\u03ad\u03fa\u03fe\u0403\u049d\u049f\u001a\u0005\u0002\u0000\u0005\u0004"+ + "\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\n\u0000"+ + "\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007"+ + "A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007B\u0000"+ + "\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007%\u0000\u0007N\u0000"+ + "\u0005\u000b\u0000\u0005\u0007\u0000\u0007D\u0000\u0007X\u0000\u0007W"+ + "\u0000\u0007C\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index b4a8e60dd69aa..2b887065985d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -282,4 +282,4 @@ enrichWithClause atn: -[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 68, 68, 74, 74, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 7, 2, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 3, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 4, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 5, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 6, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 7, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file +[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 7, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 5, 74, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 2, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 3, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 4, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 5, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 6, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 1f9c13c16cdd4..2f7f0468e455a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -2101,7 +2101,6 @@ public final FromCommandContext fromCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class FromIdentifierContext extends ParserRuleContext { public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } - public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") public FromIdentifierContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2125,20 +2124,11 @@ public T accept(ParseTreeVisitor visitor) { public final FromIdentifierContext fromIdentifier() throws RecognitionException { FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); enterRule(_localctx, 32, RULE_fromIdentifier); - int _la; try { enterOuterAlt(_localctx, 1); { setState(296); - _la = _input.LA(1); - if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { - _errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } + match(FROM_UNQUOTED_IDENTIFIER); } } catch (RecognitionException re) { @@ -4971,32 +4961,32 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u00015\u00015\u00035\u021b\b5\u00015\u00015\u00015\u0000\u0004\u0002"+ "\n\u0010\u00126\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014"+ "\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh"+ - "j\u0000\b\u0001\u0000<=\u0001\u0000>@\u0002\u0000DDJJ\u0001\u0000CD\u0002"+ - "\u0000 $$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000"+ - "l\u0001\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001"+ - "\u0000\u0000\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000"+ - "\u0000\u0000\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000"+ - "\u0000\u000e\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000"+ - "\u0000\u0012\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000"+ - "\u0000\u0016\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000"+ - "\u0000\u001a\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000"+ - "\u0000\u001e\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000"+ - "\"\u012a\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139"+ - "\u0001\u0000\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000"+ - "\u0000\u0000,\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u0000"+ - "0\u0153\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161"+ - "\u0001\u0000\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000"+ - "\u0000\u0000:\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000"+ - ">\u019c\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad"+ - "\u0001\u0000\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000"+ - "\u0000\u0000H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000"+ - "L\u01d2\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9"+ - "\u0001\u0000\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000"+ - "\u0000\u0000V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000"+ - "Z\u01f1\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7"+ - "\u0001\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000"+ - "\u0000\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000"+ - "h\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ + "j\u0000\u0007\u0001\u0000<=\u0001\u0000>@\u0001\u0000CD\u0002\u0000 "+ + "$$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000l\u0001\u0000"+ + "\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001\u0000\u0000"+ + "\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000\u0000\u0000"+ + "\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000\u0000\u000e"+ + "\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000\u0000\u0012"+ + "\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000\u0000\u0016"+ + "\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000\u0000\u001a"+ + "\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000\u0000\u001e"+ + "\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000\"\u012a"+ + "\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139\u0001\u0000"+ + "\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000\u0000\u0000"+ + ",\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u00000\u0153"+ + "\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161\u0001\u0000"+ + "\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000\u0000\u0000"+ + ":\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000>\u019c"+ + "\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad\u0001\u0000"+ + "\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000\u0000\u0000"+ + "H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000L\u01d2"+ + "\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9\u0001\u0000"+ + "\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000\u0000\u0000"+ + "V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000Z\u01f1"+ + "\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7\u0001"+ + "\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000\u0000"+ + "\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000h"+ + "\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ "\u0002\u0001\u0000mn\u0005\u0000\u0000\u0001n\u0001\u0001\u0000\u0000"+ "\u0000op\u0006\u0001\uffff\uffff\u0000pq\u0003\u0004\u0002\u0000qw\u0001"+ "\u0000\u0000\u0000rs\n\u0001\u0000\u0000st\u0005\u001a\u0000\u0000tv\u0003"+ @@ -5105,42 +5095,42 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0000\u0000\u0123\u0124\u0001\u0000\u0000\u0000\u0124\u0126\u0001\u0000"+ "\u0000\u0000\u0125\u0127\u0003\"\u0011\u0000\u0126\u0125\u0001\u0000\u0000"+ "\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u001f\u0001\u0000\u0000"+ - "\u0000\u0128\u0129\u0007\u0002\u0000\u0000\u0129!\u0001\u0000\u0000\u0000"+ - "\u012a\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d"+ - "\u0005#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000"+ - "\u0000\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000"+ - "\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000"+ - "\u0134\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001"+ - "\u0000\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015"+ - "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000"+ - "\u0000\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000"+ - "\u013c\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140"+ - "\u0003 \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001"+ - "\u0000\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001"+ - "\u0000\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000"+ - "\u0000\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000"+ - "\u0146\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149"+ - "\u0005\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001"+ - "\u0000\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003"+ - "\u001a\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ - "\u0000\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e"+ - "\u0000\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000"+ - "\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000"+ - "\u0153\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155"+ - "\u0156\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155"+ - "\u0001\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001"+ - "\u0000\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000"+ - "\u0000\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000"+ - "\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000"+ - "\u015e\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160"+ - "\u015e\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163"+ - "\u0005%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000"+ - "\u0000\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000"+ - "\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0003\u0000"+ - "\u0000\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c"+ - "9\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ + "\u0000\u0128\u0129\u0005J\u0000\u0000\u0129!\u0001\u0000\u0000\u0000\u012a"+ + "\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d\u0005"+ + "#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000\u0000"+ + "\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000\u0000"+ + "\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000\u0000"+ + "\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000\u0134"+ + "\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001\u0000"+ + "\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015\u0000"+ + "\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000\u0000"+ + "\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000\u013c"+ + "\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140\u0003"+ + " \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001\u0000"+ + "\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001\u0000"+ + "\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000"+ + "\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000\u0146"+ + "\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149\u0005"+ + "\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001\u0000"+ + "\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003\u001a"+ + "\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000\u0000"+ + "\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e\u0000"+ + "\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000\u0000"+ + "\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000\u0153"+ + "\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155\u0156"+ + "\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155\u0001"+ + "\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001\u0000"+ + "\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000\u0000"+ + "\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000\u015d"+ + "\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015e"+ + "\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160\u015e"+ + "\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163\u0005"+ + "%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000\u0000"+ + "\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000"+ + "\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000\u0000"+ + "\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0002\u0000\u0000"+ + "\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c9"+ + "\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ "Z-\u0000\u016f\u0170\u0005C\u0000\u0000\u0170\u0198\u0001\u0000\u0000"+ "\u0000\u0171\u0198\u0003X,\u0000\u0172\u0198\u0003Z-\u0000\u0173\u0198"+ "\u0003T*\u0000\u0174\u0198\u00051\u0000\u0000\u0175\u0198\u0003\\.\u0000"+ @@ -5172,10 +5162,10 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "@ \u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000"+ "\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000"+ "\u0000\u01a3?\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0004\u0000\u0000\u01a7"+ + "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0003\u0000\u0000\u01a7"+ "\u01a6\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000\u01a8"+ "\u01ab\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005/\u0000\u0000\u01aa\u01ac"+ - "\u0007\u0005\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ + "\u0007\u0004\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ "\u0001\u0000\u0000\u0000\u01acA\u0001\u0000\u0000\u0000\u01ad\u01ae\u0005"+ "\t\u0000\u0000\u01ae\u01b3\u00034\u001a\u0000\u01af\u01b0\u0005#\u0000"+ "\u0000\u01b0\u01b2\u00034\u001a\u0000\u01b1\u01af\u0001\u0000\u0000\u0000"+ @@ -5204,7 +5194,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01dfQ\u0001\u0000"+ "\u0000\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e2\u00036\u001b"+ "\u0000\u01e2\u01e3\u0005!\u0000\u0000\u01e3\u01e4\u0003:\u001d\u0000\u01e4"+ - "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0006\u0000\u0000\u01e6U\u0001"+ + "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0005\u0000\u0000\u01e6U\u0001"+ "\u0000\u0000\u0000\u01e7\u01ea\u0003X,\u0000\u01e8\u01ea\u0003Z-\u0000"+ "\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000"+ "\u01eaW\u0001\u0000\u0000\u0000\u01eb\u01ed\u0007\u0000\u0000\u0000\u01ec"+ @@ -5214,7 +5204,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3"+ "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u001c\u0000\u0000\u01f4[\u0001"+ "\u0000\u0000\u0000\u01f5\u01f6\u0005\u001b\u0000\u0000\u01f6]\u0001\u0000"+ - "\u0000\u0000\u01f7\u01f8\u0007\u0007\u0000\u0000\u01f8_\u0001\u0000\u0000"+ + "\u0000\u0000\u01f7\u01f8\u0007\u0006\u0000\u0000\u01f8_\u0001\u0000\u0000"+ "\u0000\u01f9\u01fa\u0005\u0005\u0000\u0000\u01fa\u01fb\u0003b1\u0000\u01fb"+ "a\u0001\u0000\u0000\u0000\u01fc\u01fd\u0005A\u0000\u0000\u01fd\u01fe\u0003"+ "\u0002\u0001\u0000\u01fe\u01ff\u0005B\u0000\u0000\u01ffc\u0001\u0000\u0000"+ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 67f8eb407ee11..7f0b5c73b9fb7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -25,7 +25,7 @@ public String visitIdentifier(IdentifierContext ctx) { @Override public String visitFromIdentifier(FromIdentifierContext ctx) { - return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.FROM_UNQUOTED_IDENTIFIER()); + return ctx == null ? null : unquoteIdentifier(null, ctx.FROM_UNQUOTED_IDENTIFIER()); } protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index cf0dfa372ea3f..1a36616cb647b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -338,17 +338,17 @@ public void testInlineStatsWithoutGroups() { } public void testIdentifiersAsIndexPattern() { - assertIdentifierAsIndexPattern("foo", "from `foo`"); - assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); + // assertIdentifierAsIndexPattern("foo", "from `foo`"); + // assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); assertIdentifierAsIndexPattern("foo,test-*", "from foo,test-*"); assertIdentifierAsIndexPattern("123-test@foo_bar+baz1", "from 123-test@foo_bar+baz1"); - assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); + // assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); assertIdentifierAsIndexPattern("foo,test,xyz", "from foo, test,xyz"); assertIdentifierAsIndexPattern( - ",", - "from , ``" + "", // , + "from " // , `` ); } From 155e7c58cd85ee9178cb818de0f637c2235feb5b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 8 May 2024 21:34:36 -0700 Subject: [PATCH 074/117] Add factories for time series aggregation (#107803) This change introduces operator factories for time-series aggregations. A time-series aggregation executes in three stages, deviating from the typical two-stage aggregation. For example: `sum(rate(write_requests)), avg(cpu) BY cluster, time-bucket` **1. Initial Stage:** In this stage, a standard hash aggregation is executed, grouped by tsid and time-bucket. The `values` aggregations are added to collect values of the grouping keys excluding the time-bucket, which are then used for final result grouping. ``` rate[INITIAL](write_requests), avg[INITIAL](cpu), values[SINGLE](cluster) BY tsid, time-bucket ``` **2. Intermediate Stage:** Equivalent to the final mode of a standard hash aggregation. This stage merges and reduces the result of the rate aggregations, but merges without reducing the results of non-rate aggregations. Certain aggregations, such as count_distinct, cannot have their final results combined. ``` rate[FINAL](write_requests), avg[INTERMEDIATE](cpu), values[SINGLE](cluster) BY tsid, time-bucket ``` **3. Final Stage:** This extra stage performs outer aggregations over the rate results and combines the intermediate results of non-rate aggregations using the specified user-defined grouping keys. ``` sum[SINGLE](rate_result), avg[FINAL](cpu) BY cluster, bucket ``` --- ...imeSeriesAggregationOperatorFactories.java | 157 +++++++++ .../TimeSeriesAggregationOperatorFactory.java | 48 --- .../TimeSeriesAggregationOperatorTests.java | 314 +++++++++++------- 3 files changed, 349 insertions(+), 170 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java new file mode 100644 index 0000000000000..bb8d3fd269a8a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; +import org.elasticsearch.compute.data.ElementType; + +import java.util.ArrayList; +import java.util.List; + +/** + * This class provides operator factories for time-series aggregations. + * A time-series aggregation executes in three stages, deviating from the typical two-stage aggregation. + * For example: {@code sum(rate(write_requests)), avg(cpu) BY cluster, time-bucket} + * + * 1. Initial Stage: + * In this stage, a standard hash aggregation is executed, grouped by tsid and time-bucket. + * The {@code values} aggregations are added to collect values of the grouping keys excluding the time-bucket, + * which are then used for final result grouping. + * {@code rate[INITIAL](write_requests), avg[INITIAL](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 2. Intermediate Stage: + * Equivalent to the final mode of a standard hash aggregation. + * This stage merges and reduces the result of the rate aggregations, + * but merges (without reducing) the results of non-rate aggregations. + * {@code rate[FINAL](write_requests), avg[INTERMEDIATE](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 3. Final Stage: + * This extra stage performs outer aggregations over the rate results + * and combines the intermediate results of non-rate aggregations using the specified user-defined grouping keys. + * {@code sum[SINGLE](rate_result), avg[FINAL](cpu) BY cluster, bucket} + */ +public final class TimeSeriesAggregationOperatorFactories { + + public record Initial( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + return new HashAggregationOperator( + aggregators, + () -> new TimeSeriesBlockHash(tsHashChannel, timeBucketChannel, driverContext), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesInitialAggregationOperatorFactory"; + } + } + + public record Intermediate( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + List hashGroups = List.of( + new BlockHash.GroupSpec(tsHashChannel, ElementType.BYTES_REF), + new BlockHash.GroupSpec(timeBucketChannel, ElementType.LONG) + ); + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(hashGroups, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesIntermediateAggregationOperatorFactory"; + } + } + + public record Final( + List groupings, + List outerRates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(outerRates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : outerRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(groupings, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesFinalAggregationOperatorFactory"; + } + } + + static List valuesAggregatorForGroupings(List groupings, int timeBucketChannel) { + List aggregators = new ArrayList<>(); + for (BlockHash.GroupSpec g : groupings) { + if (g.channel() != timeBucketChannel) { + final List channels = List.of(g.channel()); + // TODO: perhaps introduce a specialized aggregator for this? + var aggregatorSupplier = (switch (g.elementType()) { + case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(channels); + case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(channels); + case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(channels); + case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(channels); + case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(channels); + case NULL, DOC, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); + }); + aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + } + return aggregators; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java deleted file mode 100644 index 0cf0854a9b0c7..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.aggregation.GroupingAggregator; -import org.elasticsearch.compute.aggregation.blockhash.BlockHash; -import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; -import org.elasticsearch.core.TimeValue; - -import java.util.List; - -public record TimeSeriesAggregationOperatorFactory( - AggregatorMode mode, - int tsHashChannel, - int timestampIntervalChannel, - TimeValue timeSeriesPeriod, - List aggregators, - int maxPageSize -) implements Operator.OperatorFactory { - - @Override - public String describe() { - return "TimeSeriesAggregationOperator[mode=" - + mode - + ", tsHashChannel = " - + tsHashChannel - + ", timestampIntervalChannel = " - + timestampIntervalChannel - + ", timeSeriesPeriod = " - + timeSeriesPeriod - + ", maxPageSize = " - + maxPageSize - + "]"; - } - - @Override - public Operator get(DriverContext driverContext) { - BlockHash blockHash = new TimeSeriesBlockHash(tsHashChannel, timestampIntervalChannel, driverContext); - return new HashAggregationOperator(aggregators, () -> blockHash, driverContext); - } - -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index 79135b12b2a83..573c960e86b9c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -11,65 +11,49 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; -import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorTests; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.hamcrest.Matcher; import org.junit.After; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; +import java.util.stream.IntStream; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.createTimeSeriesSourceOperator; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.writeTS; -import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.equalTo; -public class TimeSeriesAggregationOperatorTests extends AnyOperatorTestCase { +public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { - private IndexReader reader; - private final Directory directory = newDirectory(); + private IndexReader reader = null; + private Directory directory = null; @After public void cleanup() throws IOException { IOUtils.close(reader, directory); } - @Override - protected Operator.OperatorFactory simple() { - return new TimeSeriesAggregationOperatorFactory(AggregatorMode.FINAL, 0, 1, TimeValue.ZERO, List.of(), 100); + /** + * A {@link DriverContext} with a nonBreakingBigArrays. + */ + protected DriverContext driverContext() { // TODO make this final once all operators support memory tracking + BlockFactory blockFactory = blockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); } - @Override - protected Matcher expectedDescriptionOfSimple() { - return equalTo( - "TimeSeriesAggregationOperator[mode=FINAL, tsHashChannel = 0, timestampIntervalChannel = 1, " - + "timeSeriesPeriod = 0s, maxPageSize = 100]" - ); - } - - @Override - protected Matcher expectedToStringOfSimple() { - return equalTo( - "HashAggregationOperator[blockHash=TimeSeriesBlockHash{keys=[BytesRefKey[channel=0], " - + "LongKey[channel=1]], entries=-1b}, aggregators=[]]" - ); - } - - public void testBasicRate() { + public void testBasicRate() throws Exception { long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; @@ -78,25 +62,51 @@ public void testBasicRate() { long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - long unit = between(1, 5); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit), TimeValue.ZERO); - assertThat( - actualRates, - equalTo( - Map.of( - new Group("\u0001\u0003pods\u0002p1", 0), - 35.0 * unit / 111.0, - new Group("\u0001\u0003pods\u0002p2", 0), - 42.0 * unit / 13.0, - new Group("\u0001\u0003pods\u0002p3", 0), - 10.0 * unit / 20.0 - ) - ) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(2100, t1, v1)), + new Pod("p2", "cluster_1", new Interval(600, t2, v2)), + new Pod("p3", "cluster_2", new Interval(1100, t3, v3)) ); + long unit = between(1, 5); + { + List> actual = runRateTest( + pods, + List.of("cluster"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 35.0 * unit / 111.0 + 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest(pods, List.of("pod"), TimeValue.timeValueMillis(unit), TimeValue.timeValueMillis(500)); + List> expected = List.of( + List.of(new BytesRef("p1"), 35.0 * unit / 111.0), + List.of(new BytesRef("p2"), 42.0 * unit / 13.0), + List.of(new BytesRef("p3"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest( + pods, + List.of("cluster", "bucket"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 2000L, 35.0 * unit / 111.0), + List.of(new BytesRef("cluster_1"), 500L, 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 1000L, 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } } - public void testRateWithInterval() { + public void testRateWithInterval() throws Exception { long[] v1 = { 1, 2, 3, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3 }; long[] t1 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; @@ -105,59 +115,71 @@ public void testRateWithInterval() { long[] v3 = { 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 }; long[] t3 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1)); - assertMap( - actualRates, - matchesMap().entry(new Group("\u0001\u0003pods\u0002p1", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p1", 60_000), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p1", 0), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p2", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 60_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 0), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 60_000), 0.07936D) - .entry(new Group("\u0001\u0003pods\u0002p3", 0), 0.00124D) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(0, t1, v1)), + new Pod("p2", "cluster_2", new Interval(0, t2, v2)), + new Pod("p3", "cluster_2", new Interval(0, t3, v3)) + ); + List> actual = runRateTest( + pods, + List.of("pod", "bucket"), + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1) + ); + List> expected = List.of( + List.of(new BytesRef("p1]"), 120_000L, 0.0D), + List.of(new BytesRef("p1"), 60_000L, 8.0E-5D), + List.of(new BytesRef("p1"), 0, 8.0E-5D), + List.of(new BytesRef("p2"), 120_000L, 0.0D), + List.of(new BytesRef("p2"), 60_000L, 0.0D), + List.of(new BytesRef("p2"), 0L, 0.0D), + List.of(new BytesRef("p3"), 120_000L, 0.0D), + List.of(new BytesRef("p3"), 60_000L, 0.07936D), + List.of(new BytesRef("p3"), 0L, 0.00124D) ); } - public void testRandomRate() { + public void testRandomRate() throws Exception { int numPods = between(1, 10); List pods = new ArrayList<>(); - Map expectedRates = new HashMap<>(); TimeValue unit = TimeValue.timeValueSeconds(1); + List> expected = new ArrayList<>(); for (int p = 0; p < numPods; p++) { - int numValues = between(2, 100); - long[] values = new long[numValues]; - long[] times = new long[numValues]; - long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - for (int i = 0; i < numValues; i++) { - values[i] = randomIntBetween(0, 100); - t += TimeValue.timeValueSeconds(between(1, 10)).millis(); - times[i] = t; + int numIntervals = randomIntBetween(1, 3); + Interval[] intervals = new Interval[numIntervals]; + long startTimeInHours = between(10, 100); + String podName = "p" + p; + for (int interval = 0; interval < numIntervals; interval++) { + final long startInterval = TimeValue.timeValueHours(--startTimeInHours).millis(); + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long delta = 0; + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + delta += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = delta; + } + intervals[interval] = new Interval(startInterval, times, values); + if (numValues == 1) { + expected.add(List.of(new BytesRef(podName), startInterval, null)); + } else { + expected.add(List.of(new BytesRef(podName), startInterval, intervals[interval].expectedRate(unit))); + } } - Pod pod = new Pod("p" + p, times, values); + Pod pod = new Pod(podName, "cluster", intervals); pods.add(pod); - if (numValues == 1) { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), null); - } else { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), pod.expectedRate(unit)); - } } - Map actualRates = runRateTest(pods, unit, TimeValue.ZERO); - assertThat(actualRates, equalTo(expectedRates)); + List> actual = runRateTest(pods, List.of("pod", "bucket"), unit, TimeValue.timeValueHours(1)); + assertThat(actual, equalTo(expected)); } - record Pod(String name, long[] times, long[] values) { - Pod { - assert times.length == values.length : times.length + "!=" + values.length; - } - + record Interval(long offset, long[] times, long[] values) { double expectedRate(TimeValue unit) { double dv = 0; - for (int i = 0; i < values.length - 1; i++) { - if (values[i + 1] < values[i]) { - dv += values[i]; + for (int v = 0; v < values.length - 1; v++) { + if (values[v + 1] < values[v]) { + dv += values[v]; } } dv += (values[values.length - 1] - values[0]); @@ -166,9 +188,13 @@ record Pod(String name, long[] times, long[] values) { } } - Map runRateTest(List pods, TimeValue unit, TimeValue interval) { + record Pod(String name, String cluster, Interval... intervals) {} + + List> runRateTest(List pods, List groupings, TimeValue unit, TimeValue bucketInterval) throws IOException { + cleanup(); + directory = newDirectory(); long unitInMillis = unit.millis(); - record Doc(String pod, long timestamp, long requests) { + record Doc(String pod, String cluster, long timestamp, long requests) { } var sourceOperatorFactory = createTimeSeriesSourceOperator( @@ -177,70 +203,114 @@ record Doc(String pod, long timestamp, long requests) { Integer.MAX_VALUE, between(1, 100), randomBoolean(), - interval, + bucketInterval, writer -> { List docs = new ArrayList<>(); for (Pod pod : pods) { - for (int i = 0; i < pod.times.length; i++) { - docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + for (Interval interval : pod.intervals) { + for (int i = 0; i < interval.times.length; i++) { + docs.add(new Doc(pod.name, pod.cluster, interval.offset + interval.times[i], interval.values[i])); + } } } Randomness.shuffle(docs); for (Doc doc : docs) { - writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + writeTS( + writer, + doc.timestamp, + new Object[] { "pod", doc.pod, "cluster", doc.cluster }, + new Object[] { "requests", doc.requests } + ); } return docs.size(); } ); var ctx = driverContext(); - var aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) - ); - Operator initialHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.INITIAL, + List extractOperators = new ArrayList<>(); + var rateField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + Operator extractRate = (ValuesSourceReaderOperatorTests.factory(reader, rateField, ElementType.LONG).get(ctx)); + extractOperators.add(extractRate); + List nonBucketGroupings = new ArrayList<>(groupings); + nonBucketGroupings.remove("bucket"); + for (String grouping : nonBucketGroupings) { + var groupingField = new KeywordFieldMapper.KeywordFieldType(grouping); + extractOperators.add(ValuesSourceReaderOperatorTests.factory(reader, groupingField, ElementType.BYTES_REF).get(ctx)); + } + // _doc, tsid, timestamp, bucket, requests, grouping1, grouping2 + Operator intialAgg = new TimeSeriesAggregationOperatorFactories.Initial( 1, 3, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); - aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL) - ); - Operator finalHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.FINAL, + // tsid, bucket, rate[0][0],rate[0][1],rate[0][2], grouping1, grouping2 + Operator intermediateAgg = new TimeSeriesAggregationOperatorFactories.Intermediate( 0, 1, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); + // tsid, bucket, rate, grouping1, grouping2 + List finalGroups = new ArrayList<>(); + int groupChannel = 3; + for (String grouping : groupings) { + if (grouping.equals("bucket")) { + finalGroups.add(new BlockHash.GroupSpec(1, ElementType.LONG)); + } else { + finalGroups.add(new BlockHash.GroupSpec(groupChannel++, ElementType.BYTES_REF)); + } + } + Operator finalAgg = new TimeSeriesAggregationOperatorFactories.Final( + finalGroups, + List.of(new SumDoubleAggregatorFunctionSupplier(List.of(2))), + List.of(), + between(1, 100) + ).get(ctx); + List results = new ArrayList<>(); - var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( ctx, sourceOperatorFactory.get(ctx), - List.of(ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), initialHash, finalHash), + CollectionUtils.concatLists(extractOperators, List.of(intialAgg, intermediateAgg, finalAgg)), new TestResultPageSinkOperator(results::add), () -> {} ) ); - Map rates = new HashMap<>(); + List> values = new ArrayList<>(); for (Page result : results) { - BytesRefBlock keysBlock = result.getBlock(0); - LongBlock timestampIntervalsBock = result.getBlock(1); - DoubleBlock ratesBlock = result.getBlock(2); - for (int i = 0; i < result.getPositionCount(); i++) { - var key = new Group(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), timestampIntervalsBock.getLong(i)); - rates.put(key, ratesBlock.getDouble(i)); + for (int p = 0; p < result.getPositionCount(); p++) { + int blockCount = result.getBlockCount(); + List row = new ArrayList<>(); + for (int b = 0; b < blockCount; b++) { + row.add(BlockUtils.toJavaObject(result.getBlock(b), p)); + } + values.add(row); } result.releaseBlocks(); } - return rates; + values.sort((v1, v2) -> { + for (int i = 0; i < v1.size(); i++) { + if (v1.get(i) instanceof BytesRef b1) { + int cmp = b1.compareTo((BytesRef) v2.get(i)); + if (cmp != 0) { + return cmp; + } + } else if (v1.get(i) instanceof Long b1) { + int cmp = b1.compareTo((Long) v2.get(i)); + if (cmp != 0) { + return -cmp; + } + } + } + return 0; + }); + return values; } - - record Group(String tsidHash, long timestampInterval) {} } From c9b8d7239f038078f0736614d25f25ba19c319e5 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Thu, 9 May 2024 08:56:07 +0200 Subject: [PATCH 075/117] ES|QL: account for page overhead when calculating memory used by blocks (#108347) --- .../xpack/esql/heap_attack/HeapAttackIT.java | 1 - .../compute/data/BooleanArrayVector.java | 4 +- .../compute/data/BytesRefArrayVector.java | 4 +- .../compute/data/DoubleArrayVector.java | 4 +- .../compute/data/IntArrayVector.java | 4 +- .../compute/data/LongArrayVector.java | 4 +- .../org/elasticsearch/compute/data/Block.java | 12 ++++ .../compute/data/X-ArrayVector.java.st | 4 +- .../compute/data/BlockAccountingTests.java | 65 ++++++++----------- 9 files changed, 56 insertions(+), 46 deletions(-) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 38f8ad4766b7e..5c034a81fc9cd 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -269,7 +269,6 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108104") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(490)); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 3cebcd75cbe7a..e195bda3a6dbb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -23,7 +23,9 @@ final class BooleanArrayVector extends AbstractVector implements BooleanVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BooleanArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final boolean[] values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 81f507a4fa55a..75cf4a2e1fe5a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -25,7 +25,9 @@ final class BytesRefArrayVector extends AbstractVector implements BytesRefVector static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final BytesRefArray values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 451b6cc7b655b..476d5e55c55a0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -23,7 +23,9 @@ final class DoubleArrayVector extends AbstractVector implements DoubleVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final double[] values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index a2b6697a38634..97bf1675a9a37 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -23,7 +23,9 @@ final class IntArrayVector extends AbstractVector implements IntVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final int[] values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 6eec82528c8da..4b504943b760a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -23,7 +23,9 @@ final class LongArrayVector extends AbstractVector implements LongVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final long[] values; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index ed7ee93c99325..cfa1d3656ba3a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.unit.ByteSizeValue; @@ -44,6 +45,17 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R */ long MAX_LOOKUP = 100_000; + /** + * We do not track memory for pages directly (only for single blocks), + * but the page memory overhead can still be significant, especially for pages containing thousands of blocks. + * For now, we approximate this overhead, per block, using this value. + * + * The exact overhead per block would be (more correctly) {@link RamUsageEstimator#NUM_BYTES_OBJECT_REF}, + * but we approximate it with {@link RamUsageEstimator#NUM_BYTES_OBJECT_ALIGNMENT} to avoid further alignments + * to object size (at the end of the alignment, it would make no practical difference). + */ + int PAGE_MEM_OVERHEAD_PER_BLOCK = RamUsageEstimator.NUM_BYTES_OBJECT_ALIGNMENT; + /** * {@return an efficient dense single-value view of this block}. * Null, if the block is not dense single-valued. That is, if diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 7eeb7765e3b1e..4afd8db62f848 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -38,7 +38,9 @@ final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$ArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; $if(BytesRef)$ private final BytesRefArray values; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java index ae43e3954935d..86bfec5120945 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java @@ -42,9 +42,8 @@ public class BlockAccountingTests extends ComputeTestCase { public void testBooleanVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newBooleanArrayVector(new boolean[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 1); @@ -62,9 +61,8 @@ public void testBooleanVector() { public void testIntVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newIntArrayVector(new int[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newIntArrayVector(new int[] { randomInt() }, 1); @@ -82,9 +80,8 @@ public void testIntVector() { public void testLongVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newLongArrayVector(new long[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newLongArrayVector(new long[] { randomLong() }, 1); @@ -103,9 +100,8 @@ public void testLongVector() { public void testDoubleVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newDoubleArrayVector(new double[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newDoubleArrayVector(new double[] { randomDouble() }, 1); @@ -127,9 +123,8 @@ public void testBytesRefVector() { var emptyArray = new BytesRefArray(0, blockFactory.bigArrays()); var arrayWithOne = new BytesRefArray(0, blockFactory.bigArrays()); Vector emptyVector = blockFactory.newBytesRefArrayVector(emptyArray, 0); - long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BytesRefVectorBlock.class - ); + long expectedEmptyVectorUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); assertThat(emptyVector.ramBytesUsed(), is(expectedEmptyVectorUsed)); var bytesRef = new BytesRef(randomAlphaOfLengthBetween(1, 16)); @@ -146,9 +141,8 @@ public void testBytesRefVector() { public void testBooleanBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new BooleanArrayBlock(new boolean[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new BooleanArrayBlock( @@ -194,18 +188,16 @@ public void testBooleanBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), lessThanOrEqualTo(expectedEmptyUsed)); } public void testIntBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new IntArrayBlock( @@ -242,18 +234,16 @@ public void testIntBlock() { public void testIntBlockWithNullFirstValues() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testLongBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new LongArrayBlock(new long[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new LongArrayBlock( @@ -299,18 +289,16 @@ public void testLongBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testDoubleBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new DoubleArrayBlock(new double[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new DoubleArrayBlock( @@ -356,9 +344,8 @@ public void testDoubleBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } From 68a8664c2105bff5765c3fc0f6701ad21f23c2b1 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Thu, 9 May 2024 17:17:10 +1000 Subject: [PATCH 076/117] [DOCS] Fix stored_fields parameter description (#98385) (#108445) (referenced from get and multi_get API docs) Closes #98385 --- docs/reference/rest-api/common-parms.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index dd264c0e5bcd2..a2a397c4efe65 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1062,8 +1062,8 @@ end::stats[] tag::stored_fields[] `stored_fields`:: -(Optional, Boolean) If `true`, retrieves the document fields stored in the -index rather than the document `_source`. Defaults to `false`. +(Optional, string) +A comma-separated list of <> to include in the response. end::stored_fields[] tag::sync[] From 05d5abe94e76d26044936c282dcdc28033cad827 Mon Sep 17 00:00:00 2001 From: Andrew Wilkins Date: Thu, 9 May 2024 15:33:07 +0800 Subject: [PATCH 077/117] apm-data: ignore malformed fields, and too many dynamic fields (#108444) * apm-data: ignore_{malformed,dynamic_beyond_limit} Enable ignore_malformed on all non-metrics APM data streams, and enable ignore_dynamic_beyond_limit for all APM data streams. We can enable ignore_malformed on metrics data streams when https://github.com/elastic/elasticsearch/issues/90007 is fixed. * Update docs/changelog/108444.yaml --- docs/changelog/108444.yaml | 5 + .../component-templates/apm@settings.yaml | 4 + .../metrics-apm@settings.yaml | 6 ++ .../src/main/resources/resources.yaml | 2 +- .../test/30_lenient_mappings.yml | 100 ++++++++++++++++++ 5 files changed, 116 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/108444.yaml create mode 100644 x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml diff --git a/docs/changelog/108444.yaml b/docs/changelog/108444.yaml new file mode 100644 index 0000000000000..c946ab24f939a --- /dev/null +++ b/docs/changelog/108444.yaml @@ -0,0 +1,5 @@ +pr: 108444 +summary: "Apm-data: ignore malformed fields, and too many dynamic fields" +area: Data streams +type: enhancement +issues: [] diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml index 3ca15224dafc4..75671948de11a 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml @@ -8,3 +8,7 @@ template: sort: field: "@timestamp" order: desc + mapping: + ignore_malformed: true + total_fields: + ignore_dynamic_beyond_limit: true diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml index e6c84b6ed06f9..819d5d7eafb8e 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml @@ -6,3 +6,9 @@ _meta: template: settings: codec: best_compression + mapping: + # apm@settings sets `ignore_malformed: true`, but we need + # to disable this for metrics since they use synthetic source, + # and this combination is incompatible with the + # aggregate_metric_double field type. + ignore_malformed: false diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index 0e27e454f867d..772057d4931a3 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 3 +version: 4 component-templates: # Data lifecycle. diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml new file mode 100644 index 0000000000000..97265a9b81a75 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml @@ -0,0 +1,100 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + + - do: + cluster.put_component_template: + name: "logs-apm.app@custom" + body: + template: + settings: + mapping: + total_fields: + limit: 20 + +--- +"Test ignore_malformed": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + # Passing a (non-coercable) string into a numeric field should not + # cause an indexing failure; it should just not be indexed. + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": "string"}}' + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": 123}}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["numeric_labels.*", "_ignored"] + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields: {"_ignored": ["numeric_labels.key"]} } + - match: { hits.hits.1.fields: {"numeric_labels.key": [123.0]} } + +--- +"Test ignore_dynamic_beyond_limit": + - do: + bulk: + index: logs-apm.app.svc1-testing + refresh: true + body: + - create: {} + - {"@timestamp": "2017-06-22", "k1": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k2": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k3": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k4": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k5": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k6": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k7": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k8": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k9": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k10": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k11": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k12": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k13": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k14": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k15": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k16": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k17": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k18": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k19": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k20": ""} + + - is_false: errors + + - do: + search: + index: logs-apm.app.svc1-testing + body: + query: + term: + _ignored: + value: k20 + - length: { hits.hits: 1 } From fa2f81353e3ec44b3945bfbf3792db2d9c5a6030 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 9 May 2024 09:59:56 +0200 Subject: [PATCH 078/117] [DOCS] Adds complete Cohere tutorial (#108415) --- .../search-your-data/cohere-es.asciidoc | 371 ++++++++++++++++++ .../search-your-data/semantic-search.asciidoc | 1 + 2 files changed, 372 insertions(+) create mode 100644 docs/reference/search/search-your-data/cohere-es.asciidoc diff --git a/docs/reference/search/search-your-data/cohere-es.asciidoc b/docs/reference/search/search-your-data/cohere-es.asciidoc new file mode 100644 index 0000000000000..751cfebca8c78 --- /dev/null +++ b/docs/reference/search/search-your-data/cohere-es.asciidoc @@ -0,0 +1,371 @@ +[[cohere-es]] +=== Tutorial: Using Cohere with {es} +++++ +Using Cohere with {es} +++++ + +The instructions in this tutorial shows you how to compute embeddings with +Cohere using the {infer} API and store them for efficient vector or hybrid +search in {es}. This tutorial will use the Python {es} client to perform the +operations. + +You'll learn how to: + +* create an {infer} endpoint for text embedding using the Cohere service, +* create the necessary index mapping for the {es} index, +* build an {infer} pipeline to ingest documents into the index together with the +embeddings, +* perform hybrid search on the data, +* rerank search results by using Cohere's rerank model, +* design a RAG system with Cohere's Chat API. + +The tutorial uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set. + +Refer to https://docs.cohere.com/docs/elasticsearch-and-cohere[Cohere's tutorial] +for an example using a different data set. + + +[discrete] +[[cohere-es-req]] +==== Requirements + +* A https://cohere.com/[Cohere account], +* an https://www.elastic.co/guide/en/cloud/current/ec-getting-started.html[Elastic Cloud] +account, +* Python 3.7 or higher. + + +[discrete] +[[cohere-es-packages]] +==== Istall required packages + +Install {es} and Cohere: + +[source,py] +------------------------------------------------------------ +!pip install elasticsearch +!pip install cohere +------------------------------------------------------------ + +Import the required packages: + +[source,py] +------------------------------------------------------------ +from elasticsearch import Elasticsearch, helpers +import cohere +import json +import requests +------------------------------------------------------------ + +[discrete] +[[cohere-es-client]] +==== Create the {es} client + +To create your {es} client, you need: +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#finding-your-cloud-id[your Cloud ID], +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#creating-an-api-key[an encoded API key]. + +[source,py] +------------------------------------------------------------ +ELASTICSEARCH_ENDPOINT = "elastic_endpoint" +ELASTIC_API_KEY = "elastic_api_key" + +client = Elasticsearch( + cloud_id=ELASTICSEARCH_ENDPOINT, + api_key=ELASTIC_API_KEY +) + +# Confirm the client has connected +print(client.info()) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-endpoint]] +==== Create the {infer} endpoint + +<> first. In this example, the +{infer} endpoint uses Cohere's `embed-english-v3.0` model and the +`embedding_type` is set to `byte`. + +[source,py] +------------------------------------------------------------ +COHERE_API_KEY = "cohere_api_key" + +client.inference.put_model( + task_type="text_embedding", + inference_id="cohere_embeddings", + body={ + "service": "cohere", + "service_settings": { + "api_key": COHERE_API_KEY, + "model_id": "embed-english-v3.0", + "embedding_type": "byte" + } + }, +) +------------------------------------------------------------ + +You can find your API keys in your Cohere dashboard under the +https://dashboard.cohere.com/api-keys[API keys section]. + + +[discrete] +[[cohere-es-index-mapping]] +==== Create the index mapping + +Create the index mapping for the index that will contain the embeddings. + +[source,py] +------------------------------------------------------------ +client.indices.create( + index="cohere-embeddings", + settings={"index": {"default_pipeline": "cohere_embeddings"}}, + mappings={ + "properties": { + "text_embedding": { + "type": "dense_vector", + "dims": 1024, + "element_type": "byte", + }, + "text": {"type": "text"}, + "id": {"type": "integer"}, + "title": {"type": "text"} + } + }, +) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-pipeline]] +==== Create the {infer} pipeline + +Now you have an {infer} endpoint and an index ready to store embeddings. The +next step is to create an <> with an +<> that will create the embeddings using +the {infer} endpoint and stores them in the index. + +[source,py] +-------------------------------------------------- +client.ingest.put_pipeline( + id="cohere_embeddings", + description="Ingest pipeline for Cohere inference.", + processors=[ + { + "inference": { + "model_id": "cohere_embeddings", + "input_output": { + "input_field": "text", + "output_field": "text_embedding", + }, + } + } + ], +) +-------------------------------------------------- + + +[discrete] +[[cohere-es-insert-documents]] +==== Prepare data and insert documents + +This example uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set that you can find on HuggingFace. + +[source,py] +-------------------------------------------------- +url = 'https://huggingface.co/datasets/mteb/scifact/raw/main/corpus.jsonl' + +# Fetch the JSONL data from the URL +response = requests.get(url) +response.raise_for_status() # Ensure noticing bad responses + +# Split the content by new lines and parse each line as JSON +data = [json.loads(line) for line in response.text.strip().split('\n') if line] +# Now data is a list of dictionaries + +# Change `_id` key to `id` as `_id` is a reserved key in Elasticsearch. +for item in data: + if '_id' in item: + item['id'] = item.pop('_id') + +# Prepare the documents to be indexed +documents = [] +for line in data: + data_dict = line + documents.append({ + "_index": "cohere-embeddings", + "_source": data_dict, + } + ) + +# Use the bulk endpoint to index +helpers.bulk(client, documents) + +print("Data ingestion completed, text embeddings generated!") +-------------------------------------------------- + +Your index is populated with the SciFact data and text embeddings for the text +field. + + +[discrete] +[[cohere-es-hybrid-search]] +==== Hybrid search + +Let's start querying the index! + +The code below performs a hybrid search. The `kNN` query computes the relevance +of search results based on vector similarity using the `text_embedding` field, +the lexical search query uses BM25 retrieval to compute keyword similarity on +the `title` and `text` fields. + +[source,py] +-------------------------------------------------- +query = "What is biosimilarity?" + +response = client.search( + index="cohere-embeddings", + size=100, + knn={ + "field": "text_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "cohere_embeddings", + "model_text": query, + } + }, + "k": 10, + "num_candidates": 50, + }, + query={ + "multi_match": { + "query": query, + "fields": ["text", "title"] + } + } +) + +raw_documents = response["hits"]["hits"] + +# Display the first 10 results +for document in raw_documents[0:10]: + print(f'Title: {document["_source"]["title"]}\nText: {document["_source"]["text"]}\n') + +# Format the documents for ranking +documents = [] +for hit in response["hits"]["hits"]: + documents.append(hit["_source"]["text"]) +-------------------------------------------------- + + +[discrete] +[[cohere-es-rerank-results]] +===== Rerank search results + +To combine the results more effectively, use +https://docs.cohere.com/docs/rerank-2[Cohere's Rerank v3] model through the +{infer} API to provide a more precise semantic reranking of the results. + +Create an {infer} endpoint with your Cohere API key and the used model name as +the `model_id` (`rerank-english-v3.0` in this example). + +[source,py] +-------------------------------------------------- +client.inference.put_model( + task_type="rerank", + inference_id="cohere_rerank", + body={ + "service": "cohere", + "service_settings":{ + "api_key": COHERE_API_KEY, + "model_id": "rerank-english-v3.0" + }, + "task_settings": { + "top_n": 10, + }, + } +) +-------------------------------------------------- + +Rerank the results using the new {infer} endpoint. + +[source,py] +-------------------------------------------------- +# Pass the query and the search results to the service +response = client.inference.inference( + inference_id="cohere_rerank", + body={ + "query": query, + "input": documents, + "task_settings": { + "return_documents": False + } + } +) + +# Reconstruct the input documents based on the index provided in the rereank response +ranked_documents = [] +for document in response.body["rerank"]: + ranked_documents.append({ + "title": raw_documents[int(document["index"])]["_source"]["title"], + "text": raw_documents[int(document["index"])]["_source"]["text"] + }) + +# Print the top 10 results +for document in ranked_documents[0:10]: + print(f"Title: {document['title']}\nText: {document['text']}\n") +-------------------------------------------------- + +The response is a list of documents in descending order of relevance. Each +document has a corresponding index that reflects the order of the documents when +they were sent to the {infer} endpoint. + + +[discrete] +[[cohere-es-rag]] +==== Retrieval Augmented Generation (RAG) with Cohere and {es} + +RAG is a method for generating text using additional information fetched from an +external data source. With the ranked results, you can build a RAG system on the +top of what you previously created by using +https://docs.cohere.com/docs/chat-api[Cohere's Chat API]. + +Pass in the retrieved documents and the query to receive a grounded response +using Cohere's newest generative model +https://docs.cohere.com/docs/command-r-plus[Command R+]. + +Then pass in the query and the documents to the Chat API, and print out the +response. + +[source,py] +-------------------------------------------------- +response = co.chat(message=query, documents=ranked_documents, model='command-r-plus') + +source_documents = [] +for citation in response.citations: + for document_id in citation.document_ids: + if document_id not in source_documents: + source_documents.append(document_id) + +print(f"Query: {query}") +print(f"Response: {response.text}") +print("Sources:") +for document in response.documents: + if document['id'] in source_documents: + print(f"{document['title']}: {document['text']}") + +-------------------------------------------------- + +The response will look similar to this: + +[source,consol-result] +-------------------------------------------------- +Query: What is biosimilarity? +Response: Biosimilarity is based on the comparability concept, which has been used successfully for several decades to ensure close similarity of a biological product before and after a manufacturing change. Over the last 10 years, experience with biosimilars has shown that even complex biotechnology-derived proteins can be copied successfully. +Sources: +Interchangeability of Biosimilars: A European Perspective: (...) +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index a4d892c98645b..a1197e7bbbd3a 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -136,3 +136,4 @@ include::{es-ref-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc[ include::semantic-search-elser.asciidoc[] include::semantic-search-inference.asciidoc[] +include::cohere-es.asciidoc[] From 864543b305da2eb16a5ade5e1453cdba245e7283 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 9 May 2024 09:37:56 +0100 Subject: [PATCH 079/117] Move conceptual docs about `ActionListener` (#107875) This information is more discoverable as the class-level javadocs for `ActionListener` itself rather than hidden away in a separate Markdown file. Also this way the links all stay up to date. --- docs/internal/DistributedArchitectureGuide.md | 65 +------------- .../elasticsearch/action/ActionListener.java | 85 ++++++++++++++++++- 2 files changed, 82 insertions(+), 68 deletions(-) diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index 7f10a1b3a8cae..59305c6305737 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -10,70 +10,7 @@ ### ActionListener -Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code which -doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become available. -They support several useful control flows: - -- They can be completed immediately on the calling thread. -- They can be completed concurrently on a different thread. -- They can be stored in a data structure and completed later on when the system reaches a particular state. -- Most commonly, they can be passed on to other methods that themselves require a callback. -- They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run - before or after completion, before passing them on. - -`ActionListener` is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. `ActionListener` is -used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes it easier to compose -parts of the system together without needing to build adapters to convert back and forth between different kinds of callback. It also makes -it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely takes practice and is -certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with `ActionListener` instances -themselves, creating new instances out of existing ones and completing them in interesting ways. See for instance: - -- all the static methods on [ActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java) itself -- [`ThreadedActionListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java) for forking work elsewhere -- [`RefCountingListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java) for running work in parallel -- [`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) for constructing flexible workflows - -Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous code -without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too expensive to -waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means that most of our -code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes a callback. The -entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at -[`org.elasticsearch.rest.BaseRestHandler#prepareRequest`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java#L158-L171), -and transport APIs all start at -[`org.elasticsearch.action.support.TransportAction#doExecute`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/TransportAction.java#L65)) -and the whole system fundamentally works in terms of an event loop (a `io.netty.channel.EventLoop`) which processes network events via -callbacks. - -`ActionListener` is not an _ad-hoc_ invention. Formally speaking, it is our implementation of the general concept of a continuation in the -sense of [_continuation-passing style_](https://en.wikipedia.org/wiki/Continuation-passing_style) (CPS): an extra argument to a function -which defines how to continue the computation when the result is available. This is in contrast to _direct style_ which is the more usual -style of calling methods that return values directly back to the caller so they can continue executing as normal. There's essentially two -ways that computation can continue in Java (it can return a value or it can throw an exception) which is why `ActionListener` has both an -`onResponse()` and an `onFailure()` method. - -CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS also -enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in parallel, -perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be satisfied before -proceeding (e.g. -[`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) -amongst many others). Some languages have first-class support for continuations (e.g. the `async` and `await` primitives in C#) allowing the -programmer to write code in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all -the callbacks ourselves. - -Strictly speaking, CPS requires that a computation _only_ continues by calling the continuation. In Elasticsearch, this means that -asynchronous methods must have `void` return type and may not throw any exceptions. This is mostly the case in our code as written today, -and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In -particular, it's not uncommon to permit some methods to throw an exception, using things like -[`ActionListener#run`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java#L381-L390) -(or an equivalent `try ... catch ...` block) further up the stack to handle it. Some methods also take (and may complete) an -`ActionListener` parameter, but still return a value separately for other local synchronous work. - -This pattern is often used in the transport action layer with the use of the -[ChannelActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java) -class, which wraps a `TransportChannel` produced by the transport layer. `TransportChannel` implementations can hold a reference to a Netty -channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so a -call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, barring -caller timeouts. +See the [Javadocs for `ActionListener`](https://github.com/elastic/elasticsearch/blob/main/server/src/main/java/org/elasticsearch/action/ActionListener.java) (TODO: add useful starter references and explanations for a range of Listener classes. Reference the Netty section.) diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index d07717857169b..21f3df2ab7175 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -31,17 +31,94 @@ import static org.elasticsearch.action.ActionListenerImplementations.safeOnFailure; /** - * A listener for action responses or failures. + *

+ * Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code + * which doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become + * available. They support several useful control flows: + *

+ *
    + *
  • They can be completed immediately on the calling thread.
  • + *
  • They can be completed concurrently on a different thread.
  • + *
  • They can be stored in a data structure and completed later on when the system reaches a particular state.
  • + *
  • Most commonly, they can be passed on to other methods that themselves require a callback.
  • + *
  • They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run + * before or after completion, before passing them on.
  • + *
+ *

+ * {@link ActionListener} is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. {@link + * ActionListener} is used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes + * it easier to compose parts of the system together without needing to build adapters to convert back and forth between different kinds of + * callback. It also makes it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely + * takes practice and is certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with {@link + * ActionListener} instances themselves, creating new instances out of existing ones and completing them in interesting ways. See for + * instance: + *

+ *
    + *
  • All the static methods on {@link ActionListener} itself.
  • + *
  • {@link org.elasticsearch.action.support.ThreadedActionListener} for forking work elsewhere.
  • + *
  • {@link org.elasticsearch.action.support.RefCountingListener} for running work in parallel.
  • + *
  • {@link org.elasticsearch.action.support.SubscribableListener} for constructing flexible workflows.
  • + *
+ *

+ * Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous + * code without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too + * expensive to waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means + * that most of our code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes + * a callback. The entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at {@link + * org.elasticsearch.rest.BaseRestHandler}{@code #prepareRequest} and transport APIs all start at {@link + * org.elasticsearch.action.support.TransportAction}{@code #doExecute} and the whole system fundamentally works in terms of an event loop + * (an {@code io.netty.channel.EventLoop}) which processes network events via callbacks. + *

+ *

+ * {@link ActionListener} is not an ad-hoc invention. Formally speaking, it is our implementation of the general concept of a + * continuation in the sense of continuation-passing style + * (CPS): an extra argument to a function which defines how to continue the computation when the result is available. This is in contrast to + * direct style which is the more usual style of calling methods that return values directly back to the caller so they can continue + * executing as normal. There's essentially two ways that computation can continue in Java (it can return a value or it can throw an + * exception) which is why {@link ActionListener} has both an {@link #onResponse} and an {@link #onFailure} method. + *

+ *

+ * CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS + * also enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in + * parallel, perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be + * satisfied before proceeding (e.g. {@link org.elasticsearch.action.support.SubscribableListener} amongst many others). Some languages have + * first-class support for continuations (e.g. the {@code async} and {@code await} primitives in C#) allowing the programmer to write code + * in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all the callbacks + * ourselves. + *

+ *

+ * Strictly speaking, CPS requires that a computation only continues by calling the continuation. In Elasticsearch, this means that + * asynchronous methods must have {@code void} return type and may not throw any exceptions. This is mostly the case in our code as written + * today, and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In + * particular, it's not uncommon to permit some methods to throw an exception, using things like {@link ActionListener#run} (or an + * equivalent {@code try ... catch ...} block) further up the stack to handle it. Some methods also take (and may complete) an {@link + * ActionListener} parameter, but still return a value separately for other local synchronous work. + *

+ *

+ * This pattern is often used in the transport action layer with the use of the {@link + * org.elasticsearch.action.support.ChannelActionListener} class, which wraps a {@link org.elasticsearch.transport.TransportChannel} + * produced by the transport layer.{@link org.elasticsearch.transport.TransportChannel} implementations can hold a reference to a Netty + * channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, + * so a call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, + * barring caller timeouts. + *

+ *

+ * Note that we explicitly avoid {@link java.util.concurrent.CompletableFuture} and other similar mechanisms as much as possible. They + * can achieve the same goals as {@link ActionListener}, but can also easily be misused in various ways that lead to severe bugs. In + * particular, futures support blocking while waiting for a result, but this is almost never appropriate in Elasticsearch's production code + * where threads are such a precious resource. Moreover if something throws an {@link Error} then the JVM should exit pretty much straight + * away, but {@link java.util.concurrent.CompletableFuture} can catch an {@link Error} which delays the JVM exit until its result is + * observed. This may be much later, or possibly even never. It's not possible to introduce such bugs when using {@link ActionListener}. + *

*/ public interface ActionListener { /** - * Handle action response. This response may constitute a failure or a - * success but it is up to the listener to make that decision. + * Complete this listener with a successful (or at least, non-exceptional) response. */ void onResponse(Response response); /** - * A failure caused by an exception at some phase of the task. + * Complete this listener with an exceptional response. */ void onFailure(Exception e); From f5b356d11c3a6760a9487ef38587db0f5a8d06b7 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 9 May 2024 10:15:56 +0100 Subject: [PATCH 080/117] Fix race in SpawnerNoBootstrapTests (#108416) --- .../bootstrap/SpawnerNoBootstrapTests.java | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index c4aa3c9b1f1e6..08e3ac2cbce8c 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -39,8 +39,10 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; /** * Create a simple "daemon controller", put it in the right place and check that it runs. @@ -70,13 +72,13 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { static class ExpectedStreamMessage implements MockLogAppender.LoggingExpectation { final String expectedLogger; final String expectedMessage; - final CountDownLatch matchCalledLatch; - boolean saw; + final CountDownLatch matched; + volatile boolean saw; - ExpectedStreamMessage(String logger, String message, CountDownLatch matchCalledLatch) { + ExpectedStreamMessage(String logger, String message, CountDownLatch matched) { this.expectedLogger = logger; this.expectedMessage = message; - this.matchCalledLatch = matchCalledLatch; + this.matched = matched; } @Override @@ -85,8 +87,8 @@ public void match(LogEvent event) { && event.getLevel().equals(Level.WARN) && event.getMessage().getFormattedMessage().equals(expectedMessage)) { saw = true; + matched.countDown(); } - matchCalledLatch.countDown(); } @Override @@ -130,7 +132,7 @@ public void testNoControllerSpawn() throws IOException { try (Spawner spawner = new Spawner()) { spawner.spawnNativeControllers(environment); - assertThat(spawner.getProcesses(), hasSize(0)); + assertThat(spawner.getProcesses(), is(empty())); } } @@ -229,7 +231,7 @@ private void assertControllerSpawns(final Function pluginsDir // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling assertTrue(process.waitFor(1, TimeUnit.SECONDS)); } else { - assertThat(processes, hasSize(0)); + assertThat(processes, is(empty())); } appender.assertAllExpectationsMatched(); } From 06a07587699f9c39b4c793e45f3576c44fa81885 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 9 May 2024 15:09:11 +0200 Subject: [PATCH 081/117] [DOCS] Fixes typo in Cohere ES tutorial (#108456) * [DOCS] Fixes typo in Cohere ES tutorial. * [DOCS] Fixes list. --- docs/reference/search/search-your-data/cohere-es.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/search/search-your-data/cohere-es.asciidoc b/docs/reference/search/search-your-data/cohere-es.asciidoc index 751cfebca8c78..f12f23ad2c5dc 100644 --- a/docs/reference/search/search-your-data/cohere-es.asciidoc +++ b/docs/reference/search/search-your-data/cohere-es.asciidoc @@ -38,7 +38,7 @@ account, [discrete] [[cohere-es-packages]] -==== Istall required packages +==== Install required packages Install {es} and Cohere: @@ -63,6 +63,7 @@ import requests ==== Create the {es} client To create your {es} client, you need: + * https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#finding-your-cloud-id[your Cloud ID], * https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#creating-an-api-key[an encoded API key]. From 5a612d4100133b28730dfacb1eaa94a749183ce1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 9 May 2024 09:32:46 -0400 Subject: [PATCH 082/117] ESQL: Remove remaining IT_tests_only (#108434) This moves examples from files marked to run in integration tests only to the files where they belong and disables this pattern matching. We now use supported features. --- .../esql/processing-commands/enrich.asciidoc | 16 ++--- .../resources/docs-IT_tests_only.csv-spec | 67 ------------------- .../src/main/resources/enrich.csv-spec | 54 ++++++++++++++- .../elasticsearch/xpack/esql/CsvTests.java | 9 ++- 4 files changed, 64 insertions(+), 82 deletions(-) delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec diff --git a/docs/reference/esql/processing-commands/enrich.asciidoc b/docs/reference/esql/processing-commands/enrich.asciidoc index f73eea6018cbc..5470d81b2f40b 100644 --- a/docs/reference/esql/processing-commands/enrich.asciidoc +++ b/docs/reference/esql/processing-commands/enrich.asciidoc @@ -57,11 +57,11 @@ in this example). `ENRICH` will look for records in the [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich] +include::{esql-specs}/enrich.csv-spec[tag=enrich] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich-result] |=== To use a column with a different name than the `match_field` defined in the @@ -69,11 +69,11 @@ policy as the match field, use `ON `: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on] +include::{esql-specs}/enrich.csv-spec[tag=enrich_on] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_on-result] |=== By default, each of the enrich fields defined in the policy is added as a @@ -82,22 +82,22 @@ column. To explicitly select the enrich fields that are added, use [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with] +include::{esql-specs}/enrich.csv-spec[tag=enrich_with] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_with-result] |=== You can rename the columns that are added using `WITH new_name=`: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename] +include::{esql-specs}/enrich.csv-spec[tag=enrich_rename] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_rename-result] |=== In case of name collisions, the newly created columns will override existing diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec deleted file mode 100644 index f4bf2333cae86..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec +++ /dev/null @@ -1,67 +0,0 @@ -// This file contains any ESQL snippets from the docs that don't have a home -// anywhere else. The Isle of Misfit Toys. When you need to add new examples -// for the docs you should try to convert an existing test first. Just add -// the comments in whatever file the test already lives in. If you have to -// write a new test to make an example in the docs then put it in whatever -// file matches it's "theme" best. Put it next to similar tests. Not here. - -// Also! When Nik originally extracted examples from the docs to make them -// testable he didn't spend a lot of time putting the docs into appropriate -// files. He just made this one. He didn't put his toys away. We'd be better -// off not adding to this strange toy-pile and instead moving things into -// the appropriate files. - -enrich -// tag::enrich[] -ROW language_code = "1" -| ENRICH languages_policy -// end::enrich[] -; - -// tag::enrich-result[] -language_code:keyword | language_name:keyword -1 | English -// end::enrich-result[] -; - - -enrichOn -// tag::enrich_on[] -ROW a = "1" -| ENRICH languages_policy ON a -// end::enrich_on[] -; - -// tag::enrich_on-result[] -a:keyword | language_name:keyword -1 | English -// end::enrich_on-result[] -; - - -enrichWith -// tag::enrich_with[] -ROW a = "1" -| ENRICH languages_policy ON a WITH language_name -// end::enrich_with[] -; - -// tag::enrich_with-result[] -a:keyword | language_name:keyword -1 | English -// end::enrich_with-result[] -; - - -enrichRename -// tag::enrich_rename[] -ROW a = "1" -| ENRICH languages_policy ON a WITH name = language_name -// end::enrich_rename[] -; - -// tag::enrich_rename-result[] -a:keyword | name:keyword -1 | English -// end::enrich_rename-result[] -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index e84e79748c179..f044989ec9cce 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -34,12 +34,31 @@ median_duration:double | env:keyword simple required_feature: esql.enrich_load -row language_code = "1" -| enrich languages_policy +// tag::enrich[] +ROW language_code = "1" +| ENRICH languages_policy +// end::enrich[] ; +// tag::enrich-result[] language_code:keyword | language_name:keyword 1 | English +// end::enrich-result[] +; + +enrichOnSimple +required_feature: esql.enrich_load + +// tag::enrich_on[] +ROW a = "1" +| ENRICH languages_policy ON a +// end::enrich_on[] +; + +// tag::enrich_on-result[] +a:keyword | language_name:keyword +1 | English +// end::enrich_on-result[] ; @@ -83,6 +102,22 @@ emp_no:integer | x:keyword | language_name:keyword ; +withSimple +required_feature: esql.enrich_load + +// tag::enrich_with[] +ROW a = "1" +| ENRICH languages_policy ON a WITH language_name +// end::enrich_with[] +; + +// tag::enrich_with-result[] +a:keyword | language_name:keyword +1 | English +// end::enrich_with-result[] +; + + withAlias required_feature: esql.enrich_load @@ -95,6 +130,21 @@ emp_no:integer | x:keyword | lang:keyword 10003 | 4 | German ; +withAliasSimple +required_feature: esql.enrich_load + +// tag::enrich_rename[] +ROW a = "1" +| ENRICH languages_policy ON a WITH name = language_name +// end::enrich_rename[] +; + +// tag::enrich_rename-result[] +a:keyword | name:keyword +1 | English +// end::enrich_rename-result[] +; + withAliasSort required_feature: esql.enrich_load diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 3539138e670eb..86f595810a49c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -110,6 +110,8 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -144,7 +146,6 @@ public class CsvTests extends ESTestCase { private static final Logger LOGGER = LogManager.getLogger(CsvTests.class); - private static final String IGNORED_CSV_FILE_NAMES_PATTERN = "-IT_tests_only"; private final String fileName; private final String groupName; @@ -164,10 +165,8 @@ public class CsvTests extends ESTestCase { @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { - List urls = classpathResources("/*.csv-spec").stream() - .filter(x -> x.toString().contains(IGNORED_CSV_FILE_NAMES_PATTERN) == false) - .toList(); - assertTrue("Not enough specs found " + urls, urls.size() > 0); + List urls = classpathResources("/*.csv-spec"); + assertThat("Not enough specs found " + urls, urls, hasSize(greaterThan(0))); return SpecReader.readScriptSpec(urls, specParser()); } From e178684d32a32b7222547cd09bc6c21ce58235c2 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 9 May 2024 10:33:14 -0400 Subject: [PATCH 083/117] Correct typo in documentation (#108462) Correct a small typo: one closing ">" was missing. --- .../tab-widgets/semantic-search/hybrid-search.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc index 47403df450bd2..93edc0918614d 100644 --- a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc @@ -1,7 +1,7 @@ // tag::elser[] Hybrid search between a semantic and lexical query can be achieved by using an -< as part of your search request. Provide a +<> as part of your search request. Provide a `text_expansion` query and a full-text query as <> for the `rrf` retriever. The `rrf` retriever uses <> to rank the top documents. From 4dcbc3bd76eeb9f25334d9fadf7c6e8d8bb3d6eb Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 9 May 2024 17:34:06 +0300 Subject: [PATCH 084/117] Version-guard checking for lossy params in _source (#108460) --- .../elasticsearch/index/IndexVersions.java | 1 + .../index/mapper/SourceFieldMapper.java | 13 ++++-- .../mapper/DynamicFieldsBuilderTests.java | 2 +- .../index/mapper/SourceFieldMapperTests.java | 42 +++++++++++++++++++ .../query/SearchExecutionContextTests.java | 2 +- 5 files changed, 54 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 78f07c8a137b9..f076ee0be5540 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -105,6 +105,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_00_1, Version.LUCENE_9_10_0); + public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_00_0, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 233faf462400b..1b6d6dd1141f4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -134,10 +134,11 @@ public static class Builder extends MetadataFieldMapper.Builder { private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { super(Defaults.NAME); this.indexMode = indexMode; - this.supportsNonDefaultParameterValues = settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false + || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); } public Builder setSynthetic() { @@ -212,7 +213,11 @@ public SourceFieldMapper build() { c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT : DEFAULT, - c -> new Builder(c.getIndexSettings().getMode(), c.getSettings()) + c -> new Builder( + c.getIndexSettings().getMode(), + c.getSettings(), + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) + ) ); static final class SourceFieldType extends MappedFieldType { @@ -347,7 +352,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode, Settings.EMPTY).init(this); + return new Builder(indexMode, Settings.EMPTY, false).init(this); } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 229e2e6f72cc1..bab046d41b6e5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -68,7 +68,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new PassThroughObjectMapper.Builder("labels").setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index a5264512d8086..802a18645eab6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -298,4 +300,44 @@ public void testSupportsNonDefaultParameterValues() throws IOException { ); assertThat(e.getMessage(), containsString("Parameters [enabled,includes,excludes] are not allowed in source")); } + + public void testBypassCheckForNonDefaultParameterValuesInEarlierVersions() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, false).build(); + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").field("enabled", false).endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").array("includes", "foo").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").array("excludes", "foo").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").field("mode", "disabled").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 3085ff89603ce..e541c680ada1b 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -382,7 +382,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); From 0a8c6d28c108c0979a10d572a9a2bf3cc882696d Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 9 May 2024 07:52:04 -0700 Subject: [PATCH 085/117] Log running tasks in EsqlDisruptionIT (#108440) This PR logs tasks that are running after the disruption is cleared, allowing us to investigate why the disruption tests failed in #107347. Relates #107347 --- .../xpack/esql/action/EsqlActionIT.java | 1 - .../xpack/esql/action/EsqlDisruptionIT.java | 17 +++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 1bc9bd4766c2e..686fb831aa042 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -121,7 +121,6 @@ public void testRow() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107347") public void testFromStatsGroupingAvgWithSort() { testFromStatsGroupingAvgImpl("from test | stats avg(count) by data | sort data | limit 2", "data", "avg(count)"); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java index 4bbcff44ec740..e005e2143522b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.FollowersChecker; import org.elasticsearch.cluster.coordination.LeaderChecker; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; @@ -91,6 +93,21 @@ private EsqlQueryResponse runQueryWithDisruption(EsqlQueryRequest request) { try { return future.actionGet(2, TimeUnit.MINUTES); } catch (Exception e) { + logger.info( + "running tasks: {}", + client().admin() + .cluster() + .prepareListTasks() + .get() + .getTasks() + .stream() + .filter( + // Skip the tasks we that'd get in the way while debugging + t -> false == t.action().contains(TransportListTasksAction.TYPE.name()) + && false == t.action().contains(HealthNode.TASK_NAME) + ) + .toList() + ); assertTrue("request must be failed or completed after clearing disruption", future.isDone()); ensureBlocksReleased(); logger.info("--> failed to execute esql query with disruption; retrying...", e); From 1b7cad185ee015a93d62c419d3d29a79ef3ae567 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Thu, 9 May 2024 17:57:53 +0300 Subject: [PATCH 086/117] ESQL: Add aggregates node level reduction (#107876) * Add aggregation intermediate reduction level and estimatedRowSize computed value --- docs/changelog/107876.yaml | 5 +++ .../xpack/esql/action/EsqlActionTaskIT.java | 40 +++++++++++++++++-- .../esql/plan/physical/AggregateExec.java | 4 ++ .../AbstractPhysicalOperationProviders.java | 37 ++++++++++------- .../xpack/esql/planner/Mapper.java | 2 +- .../xpack/esql/planner/PlannerUtils.java | 23 +++++------ 6 files changed, 79 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/107876.yaml diff --git a/docs/changelog/107876.yaml b/docs/changelog/107876.yaml new file mode 100644 index 0000000000000..21624cacf7e1d --- /dev/null +++ b/docs/changelog/107876.yaml @@ -0,0 +1,5 @@ +pr: 107876 +summary: "ESQL: Add aggregates node level reduction" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index a1cd71da6c63b..d18bf0e23fd29 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -81,6 +81,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { @Before public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); + nodeLevelReduction = randomBoolean(); READ_DESCRIPTION = """ \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] \\_ValuesSourceReaderOperator[fields = [pause_me]] @@ -92,10 +93,10 @@ public void setup() { \\_ProjectOperator[projection = [0]] \\_LimitOperator[limit = 1000] \\_OutputOperator[columns = [sum(pause_me)]]"""; - REDUCE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_ExchangeSinkOperator"""; - nodeLevelReduction = randomBoolean(); + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n" : "") + + "\\_ExchangeSinkOperator"; + } public void testTaskContents() throws Exception { @@ -480,6 +481,37 @@ public void testTaskContentsForLimitQuery() throws Exception { } } + public void testTaskContentsForGroupingStatsQuery() throws Exception { + READ_DESCRIPTION = """ + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] + \\_ValuesSourceReaderOperator[fields = [foo]] + \\_OrdinalsGroupingOperator(aggs = max of longs) + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); + MERGE_DESCRIPTION = """ + \\_ExchangeSourceOperator[] + \\_HashAggregationOperator[mode = , aggs = max of longs] + \\_ProjectOperator[projection = [1, 0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [max(foo), pause_me]]"""; + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_HashAggregationOperator[mode = , aggs = max of longs]\n" : "") + + "\\_ExchangeSinkOperator"; + + ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); + try { + getTasksStarting(); + scriptPermits.release(pageSize()); + getTasksRunning(); + } finally { + scriptPermits.release(numberOfDocs()); + try (EsqlQueryResponse esqlResponse = response.get()) { + var it = Iterators.flatMap(esqlResponse.values(), i -> i); + assertThat(it.next(), equalTo(numberOfDocs() - 1L)); // max of numberOfDocs() generated int values + assertThat(it.next(), equalTo(1L)); // pause_me always emits 1 + } + } + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java index 9feb5e9b009d1..490ec174eea5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java @@ -68,6 +68,10 @@ public List aggregates() { return aggregates; } + public AggregateExec withMode(Mode newMode) { + return new AggregateExec(source(), child(), groupings, aggregates, newMode, estimatedRowSize); + } + /** * Estimate of the number of bytes that'll be loaded per position before * the stream of pages is consumed. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 8c5392ccc1781..f5e4dead67347 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; import org.elasticsearch.xpack.ql.InvalidArgumentException; @@ -54,6 +55,20 @@ public final PhysicalOperation groupingPhysicalOperation( var aggregates = aggregateExec.aggregates(); var sourceLayout = source.layout; + AggregatorMode aggregatorMode; + + if (mode == AggregateExec.Mode.FINAL) { + aggregatorMode = AggregatorMode.FINAL; + } else if (mode == AggregateExec.Mode.PARTIAL) { + if (aggregateExec.child() instanceof ExchangeSourceExec) {// the reducer step at data node (local) level + aggregatorMode = AggregatorMode.INTERMEDIATE; + } else { + aggregatorMode = AggregatorMode.INITIAL; + } + } else { + assert false : "Invalid aggregator mode [" + mode + "]"; + aggregatorMode = AggregatorMode.SINGLE; + } if (aggregateExec.groupings().isEmpty()) { // not grouping @@ -65,20 +80,18 @@ public final PhysicalOperation groupingPhysicalOperation( } else { layout.append(aggregateMapper.mapNonGrouping(aggregates)); } + // create the agg factories aggregatesToFactory( aggregates, - mode, + aggregatorMode, sourceLayout, false, // non-grouping s -> aggregatorFactories.add(s.supplier.aggregatorFactory(s.mode)) ); if (aggregatorFactories.isEmpty() == false) { - operatorFactory = new AggregationOperator.AggregationOperatorFactory( - aggregatorFactories, - mode == AggregateExec.Mode.FINAL ? AggregatorMode.FINAL : AggregatorMode.INITIAL - ); + operatorFactory = new AggregationOperator.AggregationOperatorFactory(aggregatorFactories, aggregatorMode); } } else { // grouping @@ -136,7 +149,7 @@ else if (mode == AggregateExec.Mode.PARTIAL) { // create the agg factories aggregatesToFactory( aggregates, - mode, + aggregatorMode, sourceLayout, true, // grouping s -> aggregatorFactories.add(s.supplier.groupingAggregatorFactory(s.mode)) @@ -219,7 +232,7 @@ private record AggFunctionSupplierContext(AggregatorFunctionSupplier supplier, A private void aggregatesToFactory( List aggregates, - AggregateExec.Mode mode, + AggregatorMode mode, Layout layout, boolean grouping, Consumer consumer @@ -228,11 +241,9 @@ private void aggregatesToFactory( if (ne instanceof Alias alias) { var child = alias.child(); if (child instanceof AggregateFunction aggregateFunction) { - AggregatorMode aggMode = null; List sourceAttr; - if (mode == AggregateExec.Mode.PARTIAL) { - aggMode = AggregatorMode.INITIAL; + if (mode == AggregatorMode.INITIAL) { // TODO: this needs to be made more reliable - use casting to blow up when dealing with expressions (e+1) Expression field = aggregateFunction.field(); // Only count can now support literals - all the other aggs should be optimized away @@ -257,9 +268,7 @@ private void aggregatesToFactory( } sourceAttr = List.of(attr); } - - } else if (mode == AggregateExec.Mode.FINAL) { - aggMode = AggregatorMode.FINAL; + } else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { if (grouping) { sourceAttr = aggregateMapper.mapGrouping(aggregateFunction); } else { @@ -279,7 +288,7 @@ private void aggregatesToFactory( assert inputChannels.size() > 0 && inputChannels.stream().allMatch(i -> i >= 0); } if (aggregateFunction instanceof ToAggregator agg) { - consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), aggMode)); + consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), mode)); } else { throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 62b305a68bc28..1212e77557ca6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -54,7 +54,7 @@ public class Mapper { private final FunctionRegistry functionRegistry; - private final boolean localMode; + private final boolean localMode; // non-coordinator (data node) mode public Mapper(FunctionRegistry functionRegistry) { this.functionRegistry = functionRegistry; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 26c57f13e16c4..fbfc57261bc40 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -87,23 +88,19 @@ public static PhysicalPlan dataNodeReductionPlan(LogicalPlan plan, PhysicalPlan if (pipelineBreakers.isEmpty() == false) { UnaryPlan pipelineBreaker = (UnaryPlan) pipelineBreakers.get(0); - if (pipelineBreaker instanceof TopN topN) { - return new TopNExec(topN.source(), unused, topN.order(), topN.limit(), 2000); + if (pipelineBreaker instanceof TopN) { + Mapper mapper = new Mapper(true); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); + return physicalPlan.collectFirstChildren(TopNExec.class::isInstance).get(0); } else if (pipelineBreaker instanceof Limit limit) { return new LimitExec(limit.source(), unused, limit.limit()); } else if (pipelineBreaker instanceof OrderBy order) { return new OrderExec(order.source(), unused, order.order()); - } else if (pipelineBreaker instanceof Aggregate aggregate) { - // TODO handle this as a special PARTIAL step (intermediate) - /*return new AggregateExec( - aggregate.source(), - unused, - aggregate.groupings(), - aggregate.aggregates(), - AggregateExec.Mode.PARTIAL, - 0 - );*/ - return null; + } else if (pipelineBreaker instanceof Aggregate) { + Mapper mapper = new Mapper(true); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); + var aggregate = (AggregateExec) physicalPlan.collectFirstChildren(AggregateExec.class::isInstance).get(0); + return aggregate.withMode(AggregateExec.Mode.PARTIAL); } else { throw new EsqlIllegalArgumentException("unsupported unary physical plan node [" + pipelineBreaker.nodeName() + "]"); } From 6308bbf032a2dbf7f6671634bd520c5310014f66 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 9 May 2024 11:41:38 -0400 Subject: [PATCH 087/117] Add an APM metric to aggregations usage (#108110) This wires up the "new" APM metrics integration to the existing Aggregations usage tracking system. It introduces one new metric, a LongCounter named es.search.query.aggregations.total, which has dimensions for the specific aggregation being run, and the values source type we resolved it to. --------- Co-authored-by: Elastic Machine --- .../elasticsearch/node/NodeConstruction.java | 6 ++--- .../elasticsearch/search/SearchModule.java | 20 ++++++++++++++++- .../support/AggregationUsageService.java | 22 +++++++++++++++++++ .../support/ValuesSourceRegistry.java | 7 +++++- 4 files changed, 50 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 14e8ce80fcf26..9585711b5562e 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -257,7 +257,7 @@ static NodeConstruction prepareConstruction( ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); SettingsModule settingsModule = constructor.validateSettings(initialEnvironment.settings(), settings, threadPool); - SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool); + SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool, telemetryProvider); constructor.createClientAndRegistries(settingsModule.getSettings(), threadPool, searchModule); DocumentParsingProvider documentParsingProvider = constructor.getDocumentParsingProvider(); @@ -525,9 +525,9 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, return settingsModule; } - private SearchModule createSearchModule(Settings settings, ThreadPool threadPool) { + private SearchModule createSearchModule(Settings settings, ThreadPool threadPool, TelemetryProvider telemetryProvider) { IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); - return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList()); + return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList(), telemetryProvider); } /** diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 97b747c650c1b..8d5fa0a7ac155 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -254,6 +254,7 @@ import org.elasticsearch.search.vectors.KnnScoreDocQueryBuilder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.search.vectors.QueryVectorBuilder; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -289,6 +290,11 @@ public class SearchModule { Setting.Property.NodeScope ); + /** + * Metric name for aggregation usage statistics + */ + private final TelemetryProvider telemetryProvider; + private final Map highlighters; private final List fetchSubPhases = new ArrayList<>(); @@ -306,7 +312,19 @@ public class SearchModule { * @param plugins List of included {@link SearchPlugin} objects. */ public SearchModule(Settings settings, List plugins) { + this(settings, plugins, TelemetryProvider.NOOP); + } + + /** + * Constructs a new SearchModule object + * + * @param settings Current settings + * @param plugins List of included {@link SearchPlugin} objects. + * @param telemetryProvider + */ + public SearchModule(Settings settings, List plugins, TelemetryProvider telemetryProvider) { this.settings = settings; + this.telemetryProvider = telemetryProvider; registerSuggesters(plugins); highlighters = setupHighlighters(settings, plugins); registerScoreFunctions(plugins); @@ -352,7 +370,7 @@ public Map getHighlighters() { } private ValuesSourceRegistry registerAggregations(List plugins) { - ValuesSourceRegistry.Builder builder = new ValuesSourceRegistry.Builder(); + ValuesSourceRegistry.Builder builder = new ValuesSourceRegistry.Builder(telemetryProvider.getMeterRegistry()); registerAggregation( new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder.PARSER).addResultReader( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java index 853aa152db036..28ef6f934d287 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java @@ -9,12 +9,18 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.node.ReportingService; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.LongAdder; public class AggregationUsageService implements ReportingService { + private static final String ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT = "es.search.query.aggregations.total"; + private final String AGGREGATION_NAME_KEY = "aggregation_name"; + private final String VALUES_SOURCE_KEY = "values_source"; + private final LongCounter aggregationsUsageCounter; private final Map> aggs; private final AggregationInfo info; @@ -22,9 +28,16 @@ public class AggregationUsageService implements ReportingService> aggs; + private final MeterRegistry meterRegistry; public Builder() { + this(MeterRegistry.NOOP); + } + + public Builder(MeterRegistry meterRegistry) { aggs = new HashMap<>(); + assert meterRegistry != null; + this.meterRegistry = meterRegistry; } public void registerAggregationUsage(String aggregationName) { @@ -45,9 +58,16 @@ public AggregationUsageService build() { } } + // Attribute names for the metric + private AggregationUsageService(Builder builder) { this.aggs = builder.aggs; info = new AggregationInfo(aggs); + this.aggregationsUsageCounter = builder.meterRegistry.registerLongCounter( + ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT, + "Aggregations usage", + "count" + ); } public void incAggregationUsage(String aggregationName, String valuesSourceType) { @@ -61,6 +81,8 @@ public void incAggregationUsage(String aggregationName, String valuesSourceType) assert adder != null : "Unknown subtype [" + aggregationName + "][" + valuesSourceType + "]"; } assert valuesSourceMap != null : "Unknown aggregation [" + aggregationName + "][" + valuesSourceType + "]"; + // tests will have a no-op implementation here + aggregationsUsageCounter.incrementBy(1, Map.of(AGGREGATION_NAME_KEY, aggregationName, VALUES_SOURCE_KEY, valuesSourceType)); } public Map getUsageStats() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java index 44e66d98f0258..fcfcad96d9fbf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java @@ -10,6 +10,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.AbstractMap; import java.util.ArrayList; @@ -58,7 +59,11 @@ public static class Builder { private final Map, List>> aggregatorRegistry = new HashMap<>(); public Builder() { - this.usageServiceBuilder = new AggregationUsageService.Builder(); + this(MeterRegistry.NOOP); + } + + public Builder(MeterRegistry meterRegistry) { + this.usageServiceBuilder = new AggregationUsageService.Builder(meterRegistry); } /** From 9f438edb43cbc95f4156ce4bc8b4251a2f75da9d Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 9 May 2024 11:44:24 -0400 Subject: [PATCH 088/117] Fix ClassCastException in Significant Terms (#108429) Prior to this PR, if a SignificantTerms aggregation targeted a field existing on two indices (that were included in the aggregation) but mapped to different field types, the query would fail at reduce time with a somewhat obscure ClassCastException. This change brings the behavior in line with the Terms aggregation, which returns a 400 class IllegalArgumentException with a useful message in this situation. Resolves #108427 --- docs/changelog/108429.yaml | 6 ++++++ .../terms/InternalSignificantTerms.java | 19 +++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 docs/changelog/108429.yaml diff --git a/docs/changelog/108429.yaml b/docs/changelog/108429.yaml new file mode 100644 index 0000000000000..562454a0de256 --- /dev/null +++ b/docs/changelog/108429.yaml @@ -0,0 +1,6 @@ +pr: 108429 +summary: Fix `ClassCastException` in Significant Terms +area: Aggregations +type: bug +issues: + - 108427 diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index f8e7f3cf3a69c..91bb4c3f0cd74 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -29,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; /** * Result of the significant terms aggregation. @@ -208,10 +210,27 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont reduceContext.bigArrays() ); + private InternalAggregation referenceAgg = null; + @Override public void accept(InternalAggregation aggregation) { + /* + canLeadReduction here is essentially checking if this shard returned data. Unmapped shards (that didn't + specify a missing value) will be false. Since they didn't return data, we can safely skip them, and + doing so prevents us from accidentally taking one as the reference agg for type checking, which would cause + shards that actually returned data to fail. + */ + if (aggregation.canLeadReduction() == false) { + return; + } @SuppressWarnings("unchecked") final InternalSignificantTerms terms = (InternalSignificantTerms) aggregation; + if (referenceAgg == null) { + referenceAgg = terms; + } else if (referenceAgg.getClass().equals(terms.getClass()) == false) { + // We got here because shards had different mappings for the same field (presumably different indices) + throw AggregationErrors.reduceTypeMismatch(referenceAgg.getName(), Optional.empty()); + } // Compute the overall result set size and the corpus size using the // top-level Aggregations from each shard globalSubsetSize += terms.getSubsetSize(); From 1a55e2fa76b938f9a354daa2d8c64f0c86db2077 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Thu, 9 May 2024 10:27:19 -0600 Subject: [PATCH 089/117] (Doc+) Capture Elasticsearch diagnostic (#108259) * (Doc+) Capture Elasticsearch diagnostic * add diagnostic topic to nav, chunk content, style edits * fix test --------- Co-authored-by: shainaraskas --- docs/reference/troubleshooting.asciidoc | 2 + .../troubleshooting/diagnostic.asciidoc | 152 ++++++++++++++++++ 2 files changed, 154 insertions(+) create mode 100644 docs/reference/troubleshooting/diagnostic.asciidoc diff --git a/docs/reference/troubleshooting.asciidoc b/docs/reference/troubleshooting.asciidoc index 01ef39b69c529..ceff8619062c4 100644 --- a/docs/reference/troubleshooting.asciidoc +++ b/docs/reference/troubleshooting.asciidoc @@ -138,3 +138,5 @@ include::troubleshooting/troubleshooting-searches.asciidoc[] include::troubleshooting/troubleshooting-shards-capacity.asciidoc[] include::troubleshooting/troubleshooting-unbalanced-cluster.asciidoc[] + +include::troubleshooting/diagnostic.asciidoc[] diff --git a/docs/reference/troubleshooting/diagnostic.asciidoc b/docs/reference/troubleshooting/diagnostic.asciidoc new file mode 100644 index 0000000000000..a944ca88d285d --- /dev/null +++ b/docs/reference/troubleshooting/diagnostic.asciidoc @@ -0,0 +1,152 @@ +[[diagnostic]] +== Capturing diagnostics +++++ +Capture diagnostics +++++ +:keywords: Elasticsearch diagnostic, diagnostics + +The {es} https://github.com/elastic/support-diagnostics[Support Diagnostic] tool captures a point-in-time snapshot of cluster statistics and most settings. +It works against all {es} versions. + +This information can be used to troubleshoot problems with your cluster. For examples of issues that you can troubleshoot using Support Diagnostic tool output, refer to https://www.elastic.co/blog/why-does-elastic-support-keep-asking-for-diagnostic-files[the Elastic blog]. + +You can generate diagnostic information using this tool before you contact https://support.elastic.co[Elastic Support] or +https://discuss.elastic.co[Elastic Discuss] to minimize turnaround time. + +[discrete] +[[diagnostic-tool-requirements]] +=== Requirements + +- Java Runtime Environment or Java Development Kit v1.8 or higher + +[discrete] +[[diagnostic-tool-access]] +=== Access the tool + +The Support Diagnostic tool is included as a sub-library in some Elastic deployments: + +* {ece}: Located under **{ece}** > **Deployment** > **Operations** > +**Prepare Bundle** > **{es}**. +* {eck}: Run as https://www.elastic.co/guide/en/cloud-on-k8s/current/k8s-take-eck-dump.html[`eck-diagnostics`]. + +You can also directly download the `diagnostics-X.X.X-dist.zip` file for the latest Support Diagnostic release +from https://github.com/elastic/support-diagnostics/releases/latest[the `support-diagnostic` repo]. + + +[discrete] +[[diagnostic-capture]] +=== Capture diagnostic information + +To capture an {es} diagnostic: + +. In a terminal, verify that your network and user permissions are sufficient to connect to your {es} +cluster by polling the cluster's <>. ++ +For example, with the parameters `host:localhost`, `port:9200`, and `username:elastic`, you'd use the following curl request: ++ +[source,sh] +---- +curl -X GET -k -u elastic -p https://localhost:9200/_cluster/health +---- +// NOTCONSOLE ++ +If you receive a an HTTP 200 `OK` response, then you can proceed to the next step. If you receive a different +response code, then <> before proceeding. + +. Using the same environment parameters, run the diagnostic tool script. ++ +For information about the parameters that you can pass to the tool, refer to the https://github.com/elastic/support-diagnostics#standard-options[diagnostic +parameter reference]. ++ +The following command options are recommended: ++ +**Unix-based systems** ++ +[source,sh] +---- +sudo ./diagnostics.sh --type local --host localhost --port 9200 -u elastic -p --bypassDiagVerify --ssl --noVerify +---- ++ +**Windows** ++ +[source,sh] +---- +sudo .\diagnostics.bat --type local --host localhost --port 9200 -u elastic -p --bypassDiagVerify --ssl --noVerify +---- ++ +[TIP] +.Script execution modes +==== +You can execute the script in three https://github.com/elastic/support-diagnostics#diagnostic-types[modes]: + +* `local` (default, recommended): Polls the <>, +gathers operating system info, and captures cluster and GC logs. + +* `remote`: Establishes an ssh session +to the applicable target server to pull the same information as `local`. + +* `api`: Polls the <>. All other data must be +collected manually. +==== + +. When the script has completed, verify that no errors were logged to `diagnostic.log`. +If the log file contains errors, then refer to <>. + +. If the script completed without errors, then an archive with the format `-diagnostics-.zip` is created in the working directory, or an output directory you have specified. You can review or share the diagnostic archive as needed. + +[discrete] +[[diagnostic-non-200]] +=== Diagnose a non-200 cluster health response + +When you poll your cluster health, if you receive any response other than `200 0K`, then the diagnostic tool +might not work as intended. The following are possible error codes and their resolutions: + +HTTP 401 `UNAUTHENTICATED`:: +Additional information in the error will usually indicate either +that your `username:password` pair is invalid, or that your `.security` +index is unavailable and you need to setup a temporary +<> user with `role:superuser` to authenticate. + +HTTP 403 `UNAUTHORIZED`:: +Your `username` is recognized but +has insufficient permissions to run the diagnostic. Either use a different +username or elevate the user's privileges. + +HTTP 429 `TOO_MANY_REQUESTS` (for example, `circuit_breaking_exception`):: +Your username authenticated and authorized, but the cluster is under +sufficiently high strain that it's not responding to API calls. These +responses are usually intermittent. You can proceed with running the diagnostic, +but the diagnostic results might be incomplete. + +HTTP 504 `BAD_GATEWAY`:: +Your network is experiencing issues reaching the cluster. You might be using a proxy or firewall. +Consider running the diagnostic tool from a different location, confirming your port, or using an IP +instead of a URL domain. + +HTTP 503 `SERVICE_UNAVAILABLE` (for example, `master_not_discovered_exception`):: +Your cluster does not currently have an elected master node, which is +required for it to be API-responsive. This might be temporary while the master +node rotates. If the issue persists, then <> +before proceeding. + +[discrete] +[[diagnostic-log-errors]] +=== Diagnose errors in `diagnostic.log` + +The following are common errors that you might encounter when running the diagnostic tool: + +* `Error: Could not find or load main class com.elastic.support.diagnostics.DiagnosticApp` ++ +This indicates that you accidentally downloaded the source code file +instead of `diagnostics-X.X.X-dist.zip` from the releases page. + +* `Could not retrieve the Elasticsearch version due to a system or network error - unable to continue.` ++ +This indicates that the diagnostic couldn't run commands against the cluster. +Poll the cluster's health again, and ensure that you're using the same parameters +when you run the dianostic batch or shell file. + +* A `security_exception` that includes `is unauthorized for user`: ++ +The provided user has insufficient admin permissions to run the diagnostic tool. Use another +user, or grant the user `role:superuser` privileges. \ No newline at end of file From 91bed34e72c6fe91773f5af81e1d8be0e028f514 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 9 May 2024 18:37:31 +0100 Subject: [PATCH 090/117] Clarify docs on deleting searchable snapshots (#108451) The word `cannot` implies Elasticsearch prevents you from doing these things, but it doesn't have this protection today (see #73947). This commit clarifies this by saying `must not` instead. Closes #108450 --- docs/reference/searchable-snapshots/index.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/searchable-snapshots/index.asciidoc b/docs/reference/searchable-snapshots/index.asciidoc index 4a56961246c2b..794496c8b24ad 100644 --- a/docs/reference/searchable-snapshots/index.asciidoc +++ b/docs/reference/searchable-snapshots/index.asciidoc @@ -310,9 +310,9 @@ of {search-snap} indices. The sole copy of the data in a {search-snap} index is the underlying snapshot, stored in the repository. For example: -* You cannot unregister a repository while any of the searchable snapshots it -contains are mounted in {es}. You also cannot delete a snapshot if any of its -indices are mounted as a searchable snapshot in the same cluster. +* You must not unregister a repository while any of the searchable snapshots it +contains are mounted in {es}. You also must not delete a snapshot if any of its +indices are mounted as searchable snapshots. * If you mount indices from snapshots held in a repository to which a different cluster has write access then you must make sure that the other cluster does not From 6ecb295ff1c7bc473e043ce896d06525973dd5b7 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 9 May 2024 19:04:45 +0100 Subject: [PATCH 091/117] Document `transport.compress` trade-offs more clearly (#108458) Spells out explicitly that setting `transport.compress: true` may cost extra CPU. --- .../cluster/remote-clusters-settings.asciidoc | 53 ++++++++++--------- docs/reference/modules/transport.asciidoc | 42 +++++++++++---- 2 files changed, 62 insertions(+), 33 deletions(-) diff --git a/docs/reference/modules/cluster/remote-clusters-settings.asciidoc b/docs/reference/modules/cluster/remote-clusters-settings.asciidoc index 848a29c64279c..2308ec259da48 100644 --- a/docs/reference/modules/cluster/remote-clusters-settings.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-settings.asciidoc @@ -59,35 +59,40 @@ you configure the remotes. `cluster.remote..transport.compress`:: - Per cluster setting that enables you to configure compression for requests - to a specific remote cluster. This setting impacts only requests - sent to the remote cluster. If the inbound request is compressed, - Elasticsearch compresses the response. The setting options are `true`, - `indexing_data`, and `false`. If unset, the global `transport.compress` is - used as the fallback setting. + Per-cluster setting that enables you to configure compression for requests to + a specific remote cluster. The handling cluster will automatically compress + responses to compressed requests. The setting options are `true`, + `indexing_data`, and `false`. If unset, defaults to the behaviour specified + by the node-wide `transport.compress` setting. See the + <> for further information. `cluster.remote..transport.compression_scheme`:: - Per cluster setting that enables you to configure compression scheme for - requests to a specific remote cluster. This setting impacts only requests - sent to the remote cluster. If an inbound request is compressed, {es} - compresses the response using the same compression scheme. The setting options - are `deflate` and `lz4`. If unset, the global `transport.compression_scheme` - is used as the fallback setting. + Per-cluster setting that enables you to configure the compression scheme for + requests to a specific cluster if those requests are selected to be + compressed by to the `cluster.remote..transport.compress` + setting. The handling cluster will automatically use the same compression + scheme for responses as for the corresponding requests. The setting options + are `deflate` and `lz4`. If unset, defaults to the behaviour specified by the + node-wide `transport.compression_scheme` setting. See the + <> for further information. - -`cluster.remote..credentials` (<>, <>):: [[remote-cluster-credentials-setting]] - - Per cluster setting for configuring <>. - This setting takes the encoded value of a - <> and must be set - in the <> on each node in the cluster. - The presence (or not) of this setting determines which model a remote cluster uses. - If present, the remote cluster uses the API key based model. - Otherwise, it uses the certificate based model. - If the setting is added, removed, or updated in the <> and reloaded via the - <> API, the cluster will automatically rebuild its connection to the remote. +`cluster.remote..credentials`:: + + (<>, <>) + Per-cluster setting for configuring <>. This setting takes the encoded value of a + <> and must + be set in the <> on each node in the cluster. + The presence (or not) of this setting determines which model a remote cluster + uses. If present, the remote cluster uses the API key based model. Otherwise, + it uses the certificate based model. If the setting is added, removed, or + updated in the <> and reloaded via the + <> API, the cluster will automatically + rebuild its connection to the remote. [[remote-cluster-sniff-settings]] ==== Sniff mode remote cluster settings diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index 2ec574544f9bb..d08da2cfc1d2f 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -47,20 +47,44 @@ different from `transport.port`. Defaults to the port assigned via The connect timeout for initiating a new connection (in time setting format). Defaults to `30s`. +[[transport-settings-compress]] `transport.compress`:: (<>, string) -Set to `true`, `indexing_data`, or `false` to configure transport compression -between nodes. The option `true` will compress all data. The option -`indexing_data` will compress only the raw index data sent between nodes during -ingest, ccr following (excluding bootstrap), and operations based shard recovery -(excluding transferring lucene files). Defaults to `indexing_data`. +Determines which transport requests are compressed before sending them to +another node. {es} will compress transport responses if and only if the +corresponding request was compressed. See also `transport.compression_scheme`, +which specifies the compression scheme which is used. Accepts the following +values: ++ +-- +`false`:: + +No transport requests are compressed. This option uses the most network +bandwidth, but avoids the CPU overhead of compression and decompression. + +`indexing_data`:: + +Compresses only the raw indexing data sent between nodes during ingest, CCR +following (excluding bootstrapping) and operations-based shard recovery +(excluding file-based recovery which copies the raw Lucene data). This option +is a good trade-off between network bandwidth savings and the extra CPU +required for compression and decompression. This option is the default. + +`true`:: + +All transport requests are compressed. This option may perform better than +`indexing_data` in terms of network bandwidth, but will require the most CPU +for compression and decompression work. +-- +[[transport-settings-compression-scheme]] `transport.compression_scheme`:: (<>, string) -Configures the compression scheme for `transport.compress`. The options are -`deflate` or `lz4`. If `lz4` is configured and the remote node has not been -upgraded to a version supporting `lz4`, the traffic will be sent uncompressed. -Defaults to `lz4`. +Configures the compression scheme for requests which are selected for +compression by to the `transport.compress` setting. Accepts either `deflate` or +`lz4`, which offer different trade-offs between compression ratio and CPU +usage. {es} will use the same compression scheme for responses as for the +corresponding requests. Defaults to `lz4`. `transport.tcp.keep_alive`:: (<>, boolean) From ff201646f0c977c72421275f5d57478b04cbcb05 Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Thu, 9 May 2024 12:17:23 -0600 Subject: [PATCH 092/117] Allow read_slm to call GET /_slm/status (#108333) Add the ability to access the SLM status api to the read_slm privilege. --- docs/changelog/108333.yaml | 5 +++++ .../security/authz/privilege/ClusterPrivilegeResolver.java | 7 ++++++- .../core/security/authz/privilege/PrivilegeTests.java | 7 ++++++- 3 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/108333.yaml diff --git a/docs/changelog/108333.yaml b/docs/changelog/108333.yaml new file mode 100644 index 0000000000000..c3152500ce1b2 --- /dev/null +++ b/docs/changelog/108333.yaml @@ -0,0 +1,5 @@ +pr: 108333 +summary: Allow `read_slm` to call GET /_slm/status +area: ILM+SLM +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 372b62cffeaea..7f927d45a2375 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesAction; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.slm.action.GetSLMStatusAction; import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleAction; import java.util.Collection; @@ -165,7 +166,11 @@ public class ClusterPrivilegeResolver { ILMActions.STOP.name(), GetStatusAction.NAME ); - private static final Set READ_SLM_PATTERN = Set.of(GetSnapshotLifecycleAction.NAME, GetStatusAction.NAME); + private static final Set READ_SLM_PATTERN = Set.of( + GetSLMStatusAction.NAME, + GetSnapshotLifecycleAction.NAME, + GetStatusAction.NAME + ); private static final Set MANAGE_SEARCH_APPLICATION_PATTERN = Set.of("cluster:admin/xpack/application/search_application/*"); private static final Set MANAGE_SEARCH_QUERY_RULES_PATTERN = Set.of("cluster:admin/xpack/query_rules/*"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index d15fb9a1409dd..ad73944f4c64d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -460,7 +460,12 @@ public void testSlmPrivileges() { } { - verifyClusterActionAllowed(ClusterPrivilegeResolver.READ_SLM, "cluster:admin/slm/get", "cluster:admin/ilm/operation_mode/get"); + verifyClusterActionAllowed( + ClusterPrivilegeResolver.READ_SLM, + "cluster:admin/slm/get", + "cluster:admin/slm/status", + "cluster:admin/ilm/operation_mode/get" + ); verifyClusterActionDenied( ClusterPrivilegeResolver.READ_SLM, "cluster:admin/slm/delete", From 0b71746d96acde1397a11a99ea466f1078124eb3 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Thu, 9 May 2024 14:19:40 -0400 Subject: [PATCH 093/117] [Transform] Retry Destination IndexNotFoundException (#108394) A Destination Index can be removed from its previous shard in the middle of a Transform run. Ideally, this happens as part of the Delete API, and the Transform has already been stopped, but in the case that it isn't, we want to retry the checkpoint. If the Transform had been stopped, the retry will move the Indexer into a graceful shutdown. If the Transform had not been stopped, the retry will check if the Index exists or recreate the Index if it does not exist. This is currently how unattended Transforms work, and this change will make it so regular Transforms can also auto-recover from this error. Fix #107263 --- docs/changelog/108394.yaml | 6 + .../transforms/ClientTransformIndexer.java | 6 +- .../transforms/TransformContext.java | 9 + .../transforms/TransformIndexer.java | 33 ++- .../utils/ExceptionRootCauseFinder.java | 12 +- .../TransformIndexerFailureHandlingTests.java | 204 +++++++++++++++++- .../utils/ExceptionRootCauseFinderTests.java | 129 +++-------- 7 files changed, 278 insertions(+), 121 deletions(-) create mode 100644 docs/changelog/108394.yaml diff --git a/docs/changelog/108394.yaml b/docs/changelog/108394.yaml new file mode 100644 index 0000000000000..58f48fa548c6e --- /dev/null +++ b/docs/changelog/108394.yaml @@ -0,0 +1,6 @@ +pr: 108394 +summary: Handle `IndexNotFoundException` +area: Transform +type: bug +issues: + - 107263 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index ed0f721f5f7f0..df8c3f62034e5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -193,7 +193,11 @@ protected void handleBulkResponse(BulkResponse bulkResponse, ActionListener listener) { }, listener::onFailure); var deducedDestIndexMappings = new SetOnce>(); - var shouldMaybeCreateDestIndexForUnattended = context.getCheckpoint() == 0 - && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); + + // if the unattended transform had not created the destination index yet, or if the destination index was deleted for any + // type of transform during the last run, then we try to create the destination index. + // This is important to create the destination index explicitly before indexing documents. Otherwise, the destination + // index aliases may be missing. + var shouldMaybeCreateDestIndex = isFirstUnattendedRun() || context.shouldRecreateDestinationIndex(); ActionListener> fieldMappingsListener = ActionListener.wrap(destIndexMappings -> { if (destIndexMappings.isEmpty() == false) { @@ -359,11 +363,12 @@ protected void onStart(long now, ActionListener listener) { // ... otherwise we fall back to index mappings deduced based on source indices this.fieldMappings = deducedDestIndexMappings.get(); } - // Since the unattended transform could not have created the destination index yet, we do it here. - // This is important to create the destination index explicitly before indexing first documents. Otherwise, the destination - // index aliases may be missing. - if (destIndexMappings.isEmpty() && shouldMaybeCreateDestIndexForUnattended) { - doMaybeCreateDestIndex(deducedDestIndexMappings.get(), configurationReadyListener); + + if (destIndexMappings.isEmpty() && shouldMaybeCreateDestIndex) { + doMaybeCreateDestIndex(deducedDestIndexMappings.get(), configurationReadyListener.delegateFailure((delegate, response) -> { + context.setShouldRecreateDestinationIndex(false); + delegate.onResponse(response); + })); } else { configurationReadyListener.onResponse(null); } @@ -380,7 +385,7 @@ protected void onStart(long now, ActionListener listener) { deducedDestIndexMappings.set(validationResponse.getDestIndexMappings()); if (isContinuous()) { transformsConfigManager.getTransformConfiguration(getJobId(), ActionListener.wrap(config -> { - if (transformConfig.equals(config) && fieldMappings != null && shouldMaybeCreateDestIndexForUnattended == false) { + if (transformConfig.equals(config) && fieldMappings != null && shouldMaybeCreateDestIndex == false) { logger.trace("[{}] transform config has not changed.", getJobId()); configurationReadyListener.onResponse(null); } else { @@ -415,7 +420,7 @@ protected void onStart(long now, ActionListener listener) { }, listener::onFailure); Instant instantOfTrigger = Instant.ofEpochMilli(now); - // If we are not on the initial batch checkpoint and its the first pass of whatever continuous checkpoint we are on, + // If we are not on the initial batch checkpoint and it's the first pass of whatever continuous checkpoint we are on, // we should verify if there are local changes based on the sync config. If not, do not proceed further and exit. if (context.getCheckpoint() > 0 && initialRun()) { checkpointProvider.sourceHasChanged(getLastCheckpoint(), ActionListener.wrap(hasChanged -> { @@ -436,8 +441,7 @@ protected void onStart(long now, ActionListener listener) { hasSourceChanged = true; listener.onFailure(failure); })); - } else if (context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings())) { - // this transform runs in unattended mode and has never run, to go on + } else if (shouldMaybeCreateDestIndex) { validate(changedSourceListener); } else { hasSourceChanged = true; @@ -447,6 +451,13 @@ protected void onStart(long now, ActionListener listener) { } } + /** + * Returns true if this transform runs in unattended mode and has never run. + */ + private boolean isFirstUnattendedRun() { + return context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); + } + protected void initializeFunction() { // create the function function = FunctionFactory.create(getConfig()); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java index 8618b01a0440b..8bf859a020ba4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.tasks.TaskCancelledException; @@ -63,7 +64,7 @@ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collecti } if (unwrappedThrowable instanceof ElasticsearchException elasticsearchException) { - if (isExceptionIrrecoverable(elasticsearchException)) { + if (isExceptionIrrecoverable(elasticsearchException) && isNotIndexNotFoundException(elasticsearchException)) { return elasticsearchException; } } @@ -72,6 +73,15 @@ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collecti return null; } + /** + * We can safely recover from IndexNotFoundExceptions on Bulk responses. + * If the transform is running, the next checkpoint will recreate the index. + * If the transform is not running, the next start request will recreate the index. + */ + private static boolean isNotIndexNotFoundException(ElasticsearchException elasticsearchException) { + return elasticsearchException instanceof IndexNotFoundException == false; + } + public static boolean isExceptionIrrecoverable(ElasticsearchException elasticsearchException) { if (IRRECOVERABLE_REST_STATUSES.contains(elasticsearchException.status())) { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index fe54847af0404..f39a4329f2bb1 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -10,10 +10,13 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -27,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.script.ScriptException; @@ -75,6 +79,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.core.transform.transforms.DestConfigTests.randomDestConfig; @@ -85,6 +90,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.matchesRegex; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; @@ -101,6 +107,10 @@ public class TransformIndexerFailureHandlingTests extends ESTestCase { private Client client; private ThreadPool threadPool; + private static final Function EMPTY_BULK_RESPONSE = bulkRequest -> new BulkResponse( + new BulkItemResponse[0], + 100 + ); static class MockedTransformIndexer extends ClientTransformIndexer { @@ -110,6 +120,7 @@ static class MockedTransformIndexer extends ClientTransformIndexer { // used for synchronizing with the test private CountDownLatch latch; + private int doProcessCount; MockedTransformIndexer( ThreadPool threadPool, @@ -127,7 +138,8 @@ static class MockedTransformIndexer extends ClientTransformIndexer { TransformContext context, Function searchFunction, Function bulkFunction, - Function deleteByQueryFunction + Function deleteByQueryFunction, + int doProcessCount ) { super( threadPool, @@ -157,6 +169,7 @@ static class MockedTransformIndexer extends ClientTransformIndexer { this.searchFunction = searchFunction; this.bulkFunction = bulkFunction; this.deleteByQueryFunction = deleteByQueryFunction; + this.doProcessCount = doProcessCount; } public void initialize() { @@ -278,6 +291,17 @@ void doGetFieldMappings(ActionListener> fieldMappingsListene protected void persistState(TransformState state, ActionListener listener) { listener.onResponse(null); } + + @Override + protected IterationResult doProcess(SearchResponse searchResponse) { + if (doProcessCount > 0) { + doProcessCount -= 1; + // pretend that we processed 10k documents for each call + getStats().incrementNumDocuments(10_000); + return new IterationResult<>(Stream.of(new IndexRequest()), new TransformIndexerPosition(null, null), false); + } + return super.doProcess(searchResponse); + } } @Before @@ -936,6 +960,152 @@ public void testHandleFailureAuditing() { auditor.assertAllExpectationsMatched(); } + /** + * Given no bulk upload errors + * When we run the indexer + * Then we should not fail or recreate the destination index + */ + public void testHandleBulkResponseWithNoFailures() throws Exception { + var indexer = runIndexer(createMockIndexer(returnHit(), EMPTY_BULK_RESPONSE)); + assertThat(indexer.getStats().getIndexFailures(), is(0L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertNull(indexer.context.getLastFailure()); + } + + private static TransformIndexer runIndexer(MockedTransformIndexer indexer) throws Exception { + var latch = indexer.newLatch(1); + indexer.start(); + assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); + latch.countDown(); + assertBusy(() -> assertThat(indexer.getState(), equalTo(IndexerState.STARTED)), 10, TimeUnit.SECONDS); + return indexer; + } + + private MockedTransformIndexer createMockIndexer( + Function searchFunction, + Function bulkFunction + ) { + return createMockIndexer(searchFunction, bulkFunction, mock(TransformContext.Listener.class)); + } + + private static Function returnHit() { + return request -> new SearchResponse( + new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, + "", + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + } + + /** + * Given an irrecoverable bulk upload error + * When we run the indexer + * Then we should fail without retries and not recreate the destination index + */ + public void testHandleBulkResponseWithIrrecoverableFailures() throws Exception { + var failCalled = new AtomicBoolean(); + var indexer = runIndexer( + createMockIndexer( + returnHit(), + bulkResponseWithError(new ResourceNotFoundException("resource not found error")), + createContextListener(failCalled, new AtomicReference<>()) + ) + ); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertTrue(failCalled.get()); + } + + private MockedTransformIndexer createMockIndexer( + Function searchFunction, + Function bulkFunction, + TransformContext.Listener listener + ) { + return createMockIndexer( + new TransformConfig( + randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + null, + null, + randomPivotConfig(), + null, + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), + new SettingsConfig.Builder().setMaxPageSearchSize(randomBoolean() ? null : randomIntBetween(500, 10_000)).build(), + null, + null, + null, + null + ), + new AtomicReference<>(IndexerState.STOPPED), + searchFunction, + bulkFunction, + null, + threadPool, + ThreadPool.Names.GENERIC, + mock(TransformAuditor.class), + new TransformContext(TransformTaskState.STARTED, "", 0, listener), + 1 + ); + } + + private static Function bulkResponseWithError(Exception e) { + return bulkRequest -> new BulkResponse( + new BulkItemResponse[] { + BulkItemResponse.failure(1, DocWriteRequest.OpType.INDEX, new BulkItemResponse.Failure("the_index", "id", e)) }, + 100 + ); + } + + /** + * Given an IndexNotFound bulk upload error + * When we run the indexer + * Then we should fail with retries and recreate the destination index + */ + public void testHandleBulkResponseWithIndexNotFound() throws Exception { + var indexer = runIndexerWithBulkResponseError(new IndexNotFoundException("Some Error")); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertTrue(indexer.context.shouldRecreateDestinationIndex()); + assertFalse(bulkIndexingException(indexer).isIrrecoverable()); + } + + private TransformIndexer runIndexerWithBulkResponseError(Exception e) throws Exception { + return runIndexer(createMockIndexer(returnHit(), bulkResponseWithError(e))); + } + + private static BulkIndexingException bulkIndexingException(TransformIndexer indexer) { + var lastFailure = indexer.context.getLastFailure(); + assertNotNull(lastFailure); + assertThat(lastFailure, instanceOf(BulkIndexingException.class)); + return (BulkIndexingException) lastFailure; + } + + /** + * Given a recoverable bulk upload error + * When we run the indexer + * Then we should fail with retries and not recreate the destination index + */ + public void testHandleBulkResponseWithNoIrrecoverableFailures() throws Exception { + var indexer = runIndexerWithBulkResponseError(new EsRejectedExecutionException("es rejected execution")); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertFalse(bulkIndexingException(indexer).isIrrecoverable()); + } + public void testHandleFailure() { testHandleFailure(0, 5, 0, 0); testHandleFailure(5, 0, 5, 2); @@ -1042,11 +1212,36 @@ private MockedTransformIndexer createMockIndexer( String executorName, TransformAuditor auditor, TransformContext context + ) { + return createMockIndexer( + config, + state, + searchFunction, + bulkFunction, + deleteByQueryFunction, + threadPool, + executorName, + auditor, + context, + 0 + ); + } + + private MockedTransformIndexer createMockIndexer( + TransformConfig config, + AtomicReference state, + Function searchFunction, + Function bulkFunction, + Function deleteByQueryFunction, + ThreadPool threadPool, + String executorName, + TransformAuditor auditor, + TransformContext context, + int doProcessCount ) { IndexBasedTransformConfigManager transformConfigManager = mock(IndexBasedTransformConfigManager.class); doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = invocationOnMock.getArgument(1); listener.onResponse(config); return null; }).when(transformConfigManager).getTransformConfiguration(any(), any()); @@ -1066,7 +1261,8 @@ private MockedTransformIndexer createMockIndexer( context, searchFunction, bulkFunction, - deleteByQueryFunction + deleteByQueryFunction, + doProcessCount ); indexer.initialize(); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java index b71156cad5adf..9a0431d40a972 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.shard.ShardId; @@ -27,116 +28,27 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentLocation; +import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.stream.Collectors; public class ExceptionRootCauseFinderTests extends ESTestCase { public void testGetFirstIrrecoverableExceptionFromBulkResponses() { - Map bulkItemResponses = new HashMap<>(); - - int id = 1; - // 1 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new DocumentParsingException(XContentLocation.UNKNOWN, "document parsing error") - ) - ) - ); - // 2 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new ResourceNotFoundException("resource not found error")) - ) - ); - // 3 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new IllegalArgumentException("illegal argument error")) - ) - ); - // 4 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new EsRejectedExecutionException("es rejected execution")) - ) - ); - // 5 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new TranslogException(new ShardId("the_index", "uid", 0), "translog error")) - ) - ); - // 6 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED) - ) - ) - ); - // 7 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN) - ) - ) - ); - // 8 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS) - ) - ) - ); - // 9 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR) - ) - ) + Map bulkItemResponses = bulkItemResponses( + new DocumentParsingException(XContentLocation.UNKNOWN, "document parsing error"), + new ResourceNotFoundException("resource not found error"), + new IllegalArgumentException("illegal argument error"), + new EsRejectedExecutionException("es rejected execution"), + new TranslogException(new ShardId("the_index", "uid", 0), "translog error"), + new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED), + new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN), + new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS), + new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR), + new IndexNotFoundException("some missing index") ); assertFirstException(bulkItemResponses.values(), DocumentParsingException.class, "document parsing error"); @@ -157,6 +69,14 @@ public void testGetFirstIrrecoverableExceptionFromBulkResponses() { assertNull(ExceptionRootCauseFinder.getFirstIrrecoverableExceptionFromBulkResponses(bulkItemResponses.values())); } + private static Map bulkItemResponses(Exception... exceptions) { + var id = new AtomicInteger(1); + return Arrays.stream(exceptions) + .map(exception -> new BulkItemResponse.Failure("the_index", "id", exception)) + .map(failure -> BulkItemResponse.failure(id.get(), OpType.INDEX, failure)) + .collect(Collectors.toMap(response -> id.getAndIncrement(), Function.identity())); + } + public void testIsIrrecoverable() { assertFalse(ExceptionRootCauseFinder.isExceptionIrrecoverable(new MapperException("mappings problem"))); assertFalse(ExceptionRootCauseFinder.isExceptionIrrecoverable(new TaskCancelledException("cancelled task"))); @@ -174,6 +94,7 @@ public void testIsIrrecoverable() { assertTrue( ExceptionRootCauseFinder.isExceptionIrrecoverable(new DocumentParsingException(new XContentLocation(1, 2), "parse error")) ); + assertTrue(ExceptionRootCauseFinder.isExceptionIrrecoverable(new IndexNotFoundException("some missing index"))); } private static void assertFirstException(Collection bulkItemResponses, Class expectedClass, String message) { From 1bc64745f2c397886efd3555e365f88f5089be51 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 9 May 2024 13:30:21 -0700 Subject: [PATCH 094/117] Add number of nodes accessor to test cluster handle (#108484) Local test clusters have several methods allowing interaction with nodes by ordinal number. However, there is currently no way to know how mnany nodes were actually configured for the cluster. This commit adds an accessor for the number of nodes the cluster handle knows about. --- .../test/cluster/local/DefaultLocalClusterHandle.java | 5 +++++ .../cluster/local/DefaultLocalElasticsearchCluster.java | 5 +++++ .../test/cluster/local/LocalClusterHandle.java | 6 ++++++ 3 files changed, 16 insertions(+) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java index 718c9c1bb0042..5292d917df630 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java @@ -65,6 +65,11 @@ public DefaultLocalClusterHandle(String name, List nodes) { this.nodes = nodes; } + @Override + public int getNumNodes() { + return nodes.size(); + } + @Override public void start() { if (started.getAndSet(true) == false) { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java index 77b73e7b6ce86..7b24709b18a90 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java @@ -54,6 +54,11 @@ public void evaluate() throws Throwable { }; } + @Override + public int getNumNodes() { + return handle.getNumNodes(); + } + @Override public void start() { checkHandle(); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java index 7a95d682e9ddc..acb9ef77b9e41 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java @@ -16,6 +16,12 @@ import java.io.InputStream; public interface LocalClusterHandle extends ClusterHandle { + + /** + * Returns the number of nodes that are part of this cluster. + */ + int getNumNodes(); + /** * Stops the node at a given index. * @param index of the node to stop From b26dc840cf7036d62275ede0a9cd4665016ddadd Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Thu, 9 May 2024 16:58:26 -0400 Subject: [PATCH 095/117] Explain Settings in the arch. guide (#107379) --- docs/internal/GeneralArchitectureGuide.md | 60 +++++++++++++++++++ .../common/settings/Setting.java | 2 +- 2 files changed, 61 insertions(+), 1 deletion(-) diff --git a/docs/internal/GeneralArchitectureGuide.md b/docs/internal/GeneralArchitectureGuide.md index f865277d07f8f..a2dadb70bf975 100644 --- a/docs/internal/GeneralArchitectureGuide.md +++ b/docs/internal/GeneralArchitectureGuide.md @@ -6,6 +6,66 @@ ## Settings +Elasticsearch supports [cluster-level settings][] and [index-level settings][], configurable via [node-level file settings][] +(e.g. `elasticsearch.yml` file), command line arguments and REST APIs. + +### Declaring a Setting + +[cluster-level settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html +[index-level settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-update-settings.html +[node-level file settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html + +The [Setting][] class is the building block for Elasticsearch server settings. Each `Setting` can take multiple [Property][] +declarations to define setting characteristics. All setting values first come from the node-local `elasticsearch.yml` file, +if they are set therein, before falling back to the default specified in their `Setting` declaration. [A setting][] with +`Property.Dynamic` can be updated during runtime, but must be paired with a [local volatile variable like this one][] and +registered in the `ClusterSettings` via a utility like [ClusterSettings#initializeAndWatch()][] to catch and immediately +apply dynamic changes. NB that a common dynamic Setting bug is always reading the value directly from [Metadata#settings()][], +which holds the default and dynamically updated values, but _not_ the node-local `elasticsearch.yml` value. The scope of a +Setting must also be declared, such as `Property.IndexScope` for a setting that applies to indexes, or `Property.NodeScope` +for a cluster-level setting. + +[Setting]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/Setting.java#L57-L80 +[Property]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/Setting.java#L82 +[A setting]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L111-L117 +[local volatile variable like this one]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L123 +[ClusterSettings#initializeAndWatch()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L145 +[Metadata#settings()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L713-L715 + +[ClusterSettings][] tracks the [core Elasticsearch settings][]. Ultimately the `ClusterSettings` get loaded via the +[SettingsModule][]. Additional settings from the various plugins are [collected during node construction] and passed into the +[SettingsModule constructor][]. The Plugin interface has a [getSettings()][] method via which each plugin can declare additional +settings. + +[ClusterSettings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java#L138 +[core Elasticsearch settings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java#L204-L586 +[SettingsModule]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java#L54 +[collected during node construction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/node/NodeConstruction.java#L483 +[SettingsModule constructor]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/node/NodeConstruction.java#L491-L495 +[getSettings()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/plugins/Plugin.java#L203-L208 + +### Dynamically updating a Setting + +Externally, [TransportClusterUpdateSettingsAction][] and [TransportUpdateSettingsAction][] (and the corresponding REST endpoints) +allow users to dynamically change cluster and index settings, respectively. Internally, `AbstractScopedSettings` (parent class +of `ClusterSettings`) has various helper methods to track dynamic changes: it keeps a [registry of `SettingUpdater`][] consumer +lambdas to run updates when settings are changed in the cluster state. The `ClusterApplierService` [sends setting updates][] +through to the `AbstractScopedSettings`, invoking the consumers registered therein for each updated setting. + +[TransportClusterUpdateSettingsAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java#L154-L160 +[TransportUpdateSettingsAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java#L96-L101 +[registry of `SettingUpdater`]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java#L379-L381 +[sends setting updates]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java#L490-L494 + +Index settings are always persisted. They can only be modified on an existing index, and setting values are persisted as part +of the `IndexMetadata`. Cluster settings, however, can be either persisted or transient depending on how they are tied to +[Metadata][] ([applied here][]). Changes to persisted cluster settings will survive a full cluster restart; whereas changes +made to transient cluster settings will reset to their default values, or the `elasticsearch.yml` values, if the cluster +state must ever be reloaded from persisted state. + +[Metadata]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L212-L213 +[applied here]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L2437 + ## Deprecations ## Plugins diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 4fb02fdaac7b4..a385950e10922 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -113,7 +113,7 @@ public enum Property { DeprecatedWarning, /** - * Node scope + * Cluster-level or configuration file-level setting. Not an index setting. */ NodeScope, From 0081c1cd2b0ef5de5d97c25a47d598deb4a3a2fb Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 9 May 2024 16:33:08 -0700 Subject: [PATCH 096/117] Bump esql test suite timeout (#108488) --- .../elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 448d39913a8f6..fc65cb990f82b 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.esql.qa.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.http.HttpEntity; +import org.apache.lucene.tests.util.TimeUnits; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Request; @@ -56,6 +58,8 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; +// This test can run very long in serverless configurations +@TimeoutSuite(millis = 30 * TimeUnits.MINUTE) public abstract class EsqlSpecTestCase extends ESRestTestCase { // To avoid referencing the main module, we replicate EsqlFeatures.ASYNC_QUERY.id() here From e10b3d4b2b51ea095ec841625d7ec74e6ad5d7da Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 9 May 2024 16:33:26 -0700 Subject: [PATCH 097/117] Increase startup timeout in packaging tests (#108487) --- .../test/java/org/elasticsearch/packaging/util/Archives.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java index ecc043906bd1a..787069eb2605c 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java @@ -264,7 +264,7 @@ public static Shell.Result startElasticsearchWithTty( Locale.ROOT, """ expect - < Date: Fri, 10 May 2024 08:47:40 +0100 Subject: [PATCH 098/117] Handle must_not clauses when disabling the weight matches highlighting mode (#108453) This change makes sure we check all queries, even the must_not ones, to decide if we should disable weight matches highlighting or not. Closes #101667 Closes #106693 --- .../test/search.highlight/10_unified.yml | 115 ++++++++++++------ .../uhighlight/CustomUnifiedHighlighter.java | 3 +- 2 files changed, 82 insertions(+), 36 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index 3ae8f8b09aa4a..ca1d22e4a1ce7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -14,12 +14,26 @@ setup: "postings": "type": "text" "index_options": "offsets" + "nested": + "type": "nested" + "properties": + "text": + "type": "text" + "vectors": + "type": "dense_vector" + "dims": 2 + "index": true + "similarity": "l2_norm" + - do: index: index: test id: "1" body: "text" : "The quick brown fox is brown." + "nested": + "text": "The quick brown fox is brown." + "vectors": [1, 2] - do: indices.refresh: {} @@ -43,6 +57,7 @@ teardown: "query" : { "multi_match" : { "query" : "quick brown fox", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -58,6 +73,7 @@ teardown: "query" : { "combined_fields" : { "query" : "quick brown fox", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -72,11 +88,13 @@ teardown: search: body: { "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, - "highlight": { "type": "unified", "fields": { "*": { } } } } + "highlight": { "type": "unified", "fields": { "*": { } } } + } - - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } - - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } - - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } + - length: { hits.hits.0.highlight: 3 } + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } - do: indices.put_settings: @@ -90,6 +108,7 @@ teardown: "query" : { "multi_match" : { "query" : "quick brown fox", "type": "phrase", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -100,43 +119,69 @@ teardown: reason: 'kNN was not correctly skipped until 8.12' - do: - indices.create: - index: test-highlighting-knn - body: - mappings: - "properties": - "vectors": - "type": "dense_vector" - "dims": 2 - "index": true - "similarity": "l2_norm" - "text": - "type": "text" - "fields": - "fvh": - "type": "text" - "term_vector": "with_positions_offsets" - "postings": - "type": "text" - "index_options": "offsets" - - do: - index: - index: test-highlighting-knn - id: "1" - body: - "text" : "The quick brown fox is brown." - "vectors": [1, 2] + search: + index: test + body: { + "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, + "highlight": { "type": "unified", "fields": { "text*": { } } }, + "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + + - length: { hits.hits.0.highlight: 3 } + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } + +--- +"Test nested queries automatically disable weighted mode": + - requires: + cluster_features: "gte_v8.15.0" + reason: 'nested was not correctly skipped until 8.15' + - do: - indices.refresh: {} + search: + index: test + body: { + "query": { + "nested": { + "path": "nested", + "query": { + "multi_match": { + "query": "quick brown fox", + "type": "phrase", + "fields": [ "nested.text" ] + } + } + } + }, + "highlight": { "type": "unified", "fields": { "*": { } } } + } + + - length: { hits.hits.0.highlight: 1 } + - match: { hits.hits.0.highlight.nested\.text.0: "The quick brown fox is brown." } - do: search: - index: test-highlighting-knn + index: test body: { - "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, - "highlight": { "type": "unified", "fields": { "*": { } } }, - "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + "query": { + "bool": { + "must_not": { + "nested": { + "path": "nested", + "query": { + "multi_match": { "query": "quick red fox", "type": "phrase", "fields": [ "nested.text" ] } + } + } + }, + "should": { + "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } + } + } + }, + "highlight": { "type": "unified", "fields": { "text*": { } } } + } + - length: { hits.hits.0.highlight: 3 } - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } diff --git a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 5c1381f730013..c29e248b1a689 100644 --- a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -293,7 +293,8 @@ public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) { if (parent instanceof ESToParentBlockJoinQuery) { hasUnknownLeaf[0] = true; } - return super.getSubVisitor(occur, parent); + // we want to visit all queries, including those within the must_not clauses. + return this; } }); return hasUnknownLeaf[0]; From bc37ecfbafefd7cb84976cd17a8129bd7c24afac Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Fri, 10 May 2024 09:48:37 +0100 Subject: [PATCH 099/117] Specify some parameters as always supported by capabilities (#108461) --- .../java/org/elasticsearch/rest/BaseRestHandler.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 70801cdef560b..b142e4d567c04 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -76,13 +76,18 @@ public final long getUsageCount() { @Override public abstract List routes(); + private static final Set ALWAYS_SUPPORTED = Set.of("format", "filter_path", "pretty", "human"); + @Override public final void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { // check if the query has any parameters that are not in the supported set (if declared) Set supported = supportedQueryParameters(); - if (supported != null && supported.containsAll(request.params().keySet()) == false) { - Set unsupported = Sets.difference(request.params().keySet(), supported); - throw new IllegalArgumentException(unrecognized(request, unsupported, supported, "parameter")); + if (supported != null) { + var allSupported = Sets.union(ALWAYS_SUPPORTED, supported); + if (allSupported.containsAll(request.params().keySet()) == false) { + Set unsupported = Sets.difference(request.params().keySet(), allSupported); + throw new IllegalArgumentException(unrecognized(request, unsupported, allSupported, "parameter")); + } } // prepare the request for execution; has the side effect of touching the request parameters From 0eae05633684c6b2c974cd0272713bf52c4ab66d Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 10 May 2024 11:10:06 +0200 Subject: [PATCH 100/117] [Inference API] Add AzureOpenAiCompletionServiceSettings and AzureOpenAiCompletionTaskSettings to InferenceNamedWriteablesProvider (#108491) --- .../InferenceNamedWriteablesProvider.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 8d01b25aa2795..41bef3521cdf2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -26,6 +26,8 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionServiceSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionTaskSettings; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; @@ -237,6 +239,21 @@ public static List getNamedWriteables() { ) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + AzureOpenAiCompletionServiceSettings.NAME, + AzureOpenAiCompletionServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + TaskSettings.class, + AzureOpenAiCompletionTaskSettings.NAME, + AzureOpenAiCompletionTaskSettings::new + ) + ); + return namedWriteables; } } From 2541ce9c4d37191f43cfc0be3c9462adbb8dc1fb Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Fri, 10 May 2024 11:47:31 +0200 Subject: [PATCH 101/117] Log skipped prevoting as INFO (#108411) Relates ES-6576 --- .../org/elasticsearch/cluster/coordination/Coordinator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 156ba88a7d2b1..daff05f0fb19b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -1781,7 +1781,7 @@ public void run() { final var nodeEligibility = localNodeMayWinElection(lastAcceptedState, electionStrategy); if (nodeEligibility.mayWin() == false) { assert nodeEligibility.reason().isEmpty() == false; - logger.trace( + logger.info( "skip prevoting as local node may not win election ({}): {}", nodeEligibility.reason(), lastAcceptedState.coordinationMetadata() From 2e0f8d087c370c43d258c2e1ac4e5ac91a2a9c2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 10 May 2024 11:58:34 +0200 Subject: [PATCH 102/117] Add a SIMD (AVX2) optimised vector distance function for int7 on x64 (#108088) * Adding support for x64 to native vec library * Fix: aarch64 sqr7u dims * Fix: add symbol stripping (deb lintian) --------- Co-authored-by: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Co-authored-by: Elastic Machine --- docs/changelog/108088.yaml | 5 + libs/native/libraries/build.gradle | 2 +- .../nativeaccess/PosixNativeAccess.java | 10 +- .../VectorSimilarityFunctionsTests.java | 4 +- libs/vec/native/Dockerfile | 5 +- libs/vec/native/build.gradle | 76 +++++++-- libs/vec/native/publish_vec_binaries.sh | 16 +- libs/vec/native/src/vec/c/{ => aarch64}/vec.c | 2 +- libs/vec/native/src/vec/c/amd64/vec.c | 150 ++++++++++++++++++ libs/vec/native/src/vec/headers/vec.h | 2 +- .../vec/AbstractVectorTestCase.java | 4 +- 11 files changed, 254 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/108088.yaml rename libs/vec/native/src/vec/c/{ => aarch64}/vec.c (99%) create mode 100644 libs/vec/native/src/vec/c/amd64/vec.c diff --git a/docs/changelog/108088.yaml b/docs/changelog/108088.yaml new file mode 100644 index 0000000000000..95c58f6dc19f1 --- /dev/null +++ b/docs/changelog/108088.yaml @@ -0,0 +1,5 @@ +pr: 108088 +summary: Add a SIMD (AVX2) optimised vector distance function for int7 on x64 +area: "Search" +type: enhancement +issues: [] diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index 168eb533fea74..7a545787bbdae 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -18,7 +18,7 @@ configurations { } var zstdVersion = "1.5.5" -var vecVersion = "1.0.6" +var vecVersion = "1.0.8" repositories { exclusiveContent { diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java index 56017d3a8a20a..c390cfc9289c6 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java @@ -45,7 +45,15 @@ public Optional getVectorSimilarityFunctions() { } static boolean isNativeVectorLibSupported() { - return Runtime.version().feature() >= 21 && isMacOrLinuxAarch64() && checkEnableSystemProperty(); + return Runtime.version().feature() >= 21 && (isMacOrLinuxAarch64() || isLinuxAmd64()) && checkEnableSystemProperty(); + } + + /** + * Returns true iff the architecture is x64 (amd64) and the OS Linux (the OS we currently support for the native lib). + */ + static boolean isLinuxAmd64() { + String name = System.getProperty("os.name"); + return (name.startsWith("Linux")) && System.getProperty("os.arch").equals("amd64"); } /** Returns true iff the OS is Mac or Linux, and the architecture is aarch64. */ diff --git a/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java b/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java index adf32874c04f1..8c4cbb688abcd 100644 --- a/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java +++ b/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java @@ -37,7 +37,9 @@ public boolean supported() { var arch = System.getProperty("os.arch"); var osName = System.getProperty("os.name"); - if (jdkVersion >= 21 && arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) { + if (jdkVersion >= 21 + && ((arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) + || (arch.equals("amd64") && osName.equals("Linux")))) { assertThat(vectorSimilarityFunctions, isPresent()); return true; } else { diff --git a/libs/vec/native/Dockerfile b/libs/vec/native/Dockerfile index 25dcf4d4854d0..66eb7e92ef479 100644 --- a/libs/vec/native/Dockerfile +++ b/libs/vec/native/Dockerfile @@ -4,6 +4,7 @@ RUN apt update RUN apt install -y gcc g++ openjdk-17-jdk COPY . /workspace WORKDIR /workspace -RUN ./gradlew --quiet --console=plain clean vecSharedLibrary +RUN ./gradlew --quiet --console=plain clean buildSharedLibrary +RUN strip --strip-unneeded build/output/libvec.so -CMD cat build/libs/vec/shared/libvec.so +CMD cat build/output/libvec.so diff --git a/libs/vec/native/build.gradle b/libs/vec/native/build.gradle index 6a658da0644b7..7edf46d406862 100644 --- a/libs/vec/native/build.gradle +++ b/libs/vec/native/build.gradle @@ -12,9 +12,10 @@ var os = org.gradle.internal.os.OperatingSystem.current() // To update this library run publish_vec_binaries.sh ( or ./gradlew vecSharedLibrary ) // Or // For local development, build the docker image with: -// docker build --platform linux/arm64 --progress=plain . +// docker build --platform linux/arm64 --progress=plain . (for aarch64) +// docker build --platform linux/amd64 --progress=plain . (for x64) // Grab the image id from the console output, then, e.g. -// docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/libvec.so +// docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/$arch/libvec.so // // To run tests and benchmarks on a locally built libvec, // 1. Temporarily comment out the download in libs/native/library/build.gradle @@ -30,26 +31,83 @@ var os = org.gradle.internal.os.OperatingSystem.current() group = 'org.elasticsearch' +def platformName = System.getProperty("os.arch"); + model { + platforms { + aarch64 { + architecture "aarch64" + } + amd64 { + architecture "x86-64" + } + } toolChains { gcc(Gcc) { target("aarch64") { cCompiler.executable = "/usr/bin/gcc" + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=armv8-a"]) } + } + target("amd64") { + cCompiler.executable = "/usr/bin/gcc" + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2", "-Wno-incompatible-pointer-types"]) } } } - clang(Clang) - } - platforms { - aarch64 { - architecture "aarch64" + cl(VisualCpp) { + eachPlatform { toolchain -> + def platform = toolchain.getPlatform() + if (platform.name == "x64") { + cCompiler.withArguments { args -> args.addAll(["/O2", "/LD", "-march=core-avx2"]) } + } + } + } + clang(Clang) { + target("amd64") { + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2"]) } + } } } components { vec(NativeLibrarySpec) { targetPlatform "aarch64" - binaries.withType(SharedLibraryBinarySpec) { - cCompiler.args "-O3", "-std=c99", "-march=armv8-a" + targetPlatform "amd64" + + sources { + c { + source { + srcDir "src/vec/c/${platformName}/" + include "*.c" + } + exportedHeaders { + srcDir "src/vec/headers/" + } + } + } + } + } +} + +tasks.register('buildSharedLibrary') { + description = 'Assembles native shared library for the host architecture' + if (platformName.equals("aarch64")) { + dependsOn tasks.vecAarch64SharedLibrary + doLast { + copy { + from tasks.linkVecAarch64SharedLibrary.outputs.files.files + into layout.buildDirectory.dir('output'); + duplicatesStrategy = 'INCLUDE' + } + } + } else if (platformName.equals("amd64")) { + dependsOn tasks.vecAmd64SharedLibrary + doLast { + copy { + from tasks.linkVecAmd64SharedLibrary.outputs.files.files + into layout.buildDirectory.dir('output'); + duplicatesStrategy = 'INCLUDE' } } + } else { + throw new GradleException("Unsupported platform: " + platformName) } } diff --git a/libs/vec/native/publish_vec_binaries.sh b/libs/vec/native/publish_vec_binaries.sh index e17690160e253..2ed6c750ab9e8 100755 --- a/libs/vec/native/publish_vec_binaries.sh +++ b/libs/vec/native/publish_vec_binaries.sh @@ -19,7 +19,7 @@ if [ -z "$ARTIFACTORY_API_KEY" ]; then exit 1; fi -VERSION="1.0.6" +VERSION="1.0.8" ARTIFACTORY_REPOSITORY="${ARTIFACTORY_REPOSITORY:-https://artifactory.elastic.dev/artifactory/elasticsearch-native/}" TEMP=$(mktemp -d) @@ -29,16 +29,22 @@ if curl -sS -I --fail --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/ve fi echo 'Building Darwin binary...' -./gradlew --quiet --console=plain vecSharedLibrary +./gradlew --quiet --console=plain vecAarch64SharedLibrary echo 'Building Linux binary...' DOCKER_IMAGE=$(docker build --platform linux/arm64 --quiet .) -docker run $DOCKER_IMAGE > build/libs/vec/shared/libvec.so +docker run $DOCKER_IMAGE > build/libs/vec/shared/aarch64/libvec.so + +echo 'Building Linux x64 binary...' +DOCKER_IMAGE=$(docker build --platform linux/amd64 --quiet .) +docker run --platform linux/amd64 $DOCKER_IMAGE > build/libs/vec/shared/amd64/libvec.so mkdir -p $TEMP/darwin-aarch64 mkdir -p $TEMP/linux-aarch64 -cp build/libs/vec/shared/libvec.dylib $TEMP/darwin-aarch64/ -cp build/libs/vec/shared/libvec.so $TEMP/linux-aarch64/ +mkdir -p $TEMP/linux-x64 +cp build/libs/vec/shared/aarch64/libvec.dylib $TEMP/darwin-aarch64/ +cp build/libs/vec/shared/aarch64/libvec.so $TEMP/linux-aarch64/ +cp build/libs/vec/shared/amd64/libvec.so $TEMP/linux-x64/ echo 'Uploading to Artifactory...' (cd $TEMP && zip -rq - .) | curl -sS -X PUT -H "X-JFrog-Art-Api: ${ARTIFACTORY_API_KEY}" --data-binary @- --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/vec/${VERSION}/vec-${VERSION}.zip" diff --git a/libs/vec/native/src/vec/c/vec.c b/libs/vec/native/src/vec/c/aarch64/vec.c similarity index 99% rename from libs/vec/native/src/vec/c/vec.c rename to libs/vec/native/src/vec/c/aarch64/vec.c index 05dfe64a3be9b..478e5e84d3859 100644 --- a/libs/vec/native/src/vec/c/vec.c +++ b/libs/vec/native/src/vec/c/aarch64/vec.c @@ -121,7 +121,7 @@ static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { int32_t res = 0; int i = 0; - if (i > SQR7U_STRIDE_BYTES_LEN) { + if (dims > SQR7U_STRIDE_BYTES_LEN) { i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); res = sqr7u_inner(a, b, i); } diff --git a/libs/vec/native/src/vec/c/amd64/vec.c b/libs/vec/native/src/vec/c/amd64/vec.c new file mode 100644 index 0000000000000..c9a49ad2d1d4d --- /dev/null +++ b/libs/vec/native/src/vec/c/amd64/vec.c @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +#include +#include +#include "vec.h" + +#include +#include + +#ifndef DOT7U_STRIDE_BYTES_LEN +#define DOT7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#endif + +#ifndef SQR7U_STRIDE_BYTES_LEN +#define SQR7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#endif + +#ifdef _MSC_VER +#include +#elif __GNUC__ +#include +#elif __clang__ +#include +#endif + +// Multi-platform CPUID "intrinsic"; it takes as input a "functionNumber" (or "leaf", the eax registry). "Subleaf" +// is always 0. Output is stored in the passed output parameter: output[0] = eax, output[1] = ebx, output[2] = ecx, +// output[3] = edx +static inline void cpuid(int output[4], int functionNumber) { +#if defined(__GNUC__) || defined(__clang__) + // use inline assembly, Gnu/AT&T syntax + int a, b, c, d; + __asm("cpuid" : "=a"(a), "=b"(b), "=c"(c), "=d"(d) : "a"(functionNumber), "c"(0) : ); + output[0] = a; + output[1] = b; + output[2] = c; + output[3] = d; + +#elif defined (_MSC_VER) + __cpuidex(output, functionNumber, 0); +#else + #error Unsupported compiler +#endif +} + +// Utility function to horizontally add 8 32-bit integers +static inline int hsum_i32_8(const __m256i a) { + const __m128i sum128 = _mm_add_epi32(_mm256_castsi256_si128(a), _mm256_extractf128_si256(a, 1)); + const __m128i hi64 = _mm_unpackhi_epi64(sum128, sum128); + const __m128i sum64 = _mm_add_epi32(hi64, sum128); + const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); + return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); +} + +EXPORT int vec_caps() { + int cpuInfo[4] = {-1}; + // Calling __cpuid with 0x0 as the function_id argument + // gets the number of the highest valid function ID. + cpuid(cpuInfo, 0); + int functionIds = cpuInfo[0]; + if (functionIds >= 7) { + cpuid(cpuInfo, 7); + int ebx = cpuInfo[1]; + // AVX2 flag is the 5th bit + // We assume that all processors that have AVX2 also have FMA3 + return (ebx & (1 << 5)) != 0; + } + return 0; +} + +static inline int32_t dot7u_inner(int8_t* a, int8_t* b, size_t dims) { + const __m256i ones = _mm256_set1_epi16(1); + + // Init accumulator(s) with 0 + __m256i acc1 = _mm256_setzero_si256(); + +#pragma GCC unroll 4 + for(int i = 0; i < dims; i += DOT7U_STRIDE_BYTES_LEN) { + // Load packed 8-bit integers + __m256i va1 = _mm256_loadu_si256(a + i); + __m256i vb1 = _mm256_loadu_si256(b + i); + + // Perform multiplication and create 16-bit values + // Vertically multiply each unsigned 8-bit integer from va with the corresponding + // 8-bit integer from vb, producing intermediate signed 16-bit integers. + const __m256i vab = _mm256_maddubs_epi16(va1, vb1); + // Horizontally add adjacent pairs of intermediate signed 16-bit integers, and pack the results. + acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, vab), acc1); + } + + // reduce (horizontally add all) + return hsum_i32_8(acc1); +} + +EXPORT int32_t dot7u(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > DOT7U_STRIDE_BYTES_LEN) { + i += dims & ~(DOT7U_STRIDE_BYTES_LEN - 1); + res = dot7u_inner(a, b, i); + } + for (; i < dims; i++) { + res += a[i] * b[i]; + } + return res; +} + +static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { + // Init accumulator(s) with 0 + __m256i acc1 = _mm256_setzero_si256(); + + const __m256i ones = _mm256_set1_epi16(1); + +#pragma GCC unroll 4 + for(int i = 0; i < dims; i += SQR7U_STRIDE_BYTES_LEN) { + // Load packed 8-bit integers + __m256i va1 = _mm256_loadu_si256(a + i); + __m256i vb1 = _mm256_loadu_si256(b + i); + + const __m256i dist1 = _mm256_sub_epi8(va1, vb1); + const __m256i abs_dist1 = _mm256_sign_epi8(dist1, dist1); + const __m256i sqr1 = _mm256_maddubs_epi16(abs_dist1, abs_dist1); + + acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, sqr1), acc1); + } + + // reduce (accumulate all) + return hsum_i32_8(acc1); +} + +EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > SQR7U_STRIDE_BYTES_LEN) { + i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); + res = sqr7u_inner(a, b, i); + } + for (; i < dims; i++) { + int32_t dist = a[i] - b[i]; + res += dist * dist; + } + return res; +} + diff --git a/libs/vec/native/src/vec/headers/vec.h b/libs/vec/native/src/vec/headers/vec.h index 5d3806dfccbe6..49fa29ec6fae9 100644 --- a/libs/vec/native/src/vec/headers/vec.h +++ b/libs/vec/native/src/vec/headers/vec.h @@ -7,7 +7,7 @@ */ #ifdef _MSC_VER -#define EXPORT extern "C" __declspec(dllexport) +#define EXPORT __declspec(dllexport) #elif defined(__GNUC__) && !defined(__clang__) #define EXPORT __attribute__((externally_visible,visibility("default"))) #elif __clang__ diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java b/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java index 771f665fb4084..13f2d5a03ec76 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java @@ -39,7 +39,9 @@ public static boolean supported() { var arch = System.getProperty("os.arch"); var osName = System.getProperty("os.name"); - if (jdkVersion >= 21 && arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) { + if (jdkVersion >= 21 + && (arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux")) + || arch.equals("amd64") && osName.equals("Linux"))) { assertThat(factory, isPresent()); return true; } else { From d2d1357a334df228dd59878d844bf3870e1efc8b Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 10 May 2024 12:37:54 +0200 Subject: [PATCH 103/117] Expose capability checks for YAML REST tests (#108425) Co-authored-by: Simon Cooper --- .../rest-api-spec/api/capabilities.json | 47 ++++++++++ .../test/capabilities/10_basic.yml | 28 ++++++ .../SimpleNodesCapabilitiesIT.java | 10 +-- .../NodesCapabilitiesResponse.java | 10 ++- .../yaml/ClientYamlTestExecutionContext.java | 43 ++++++++- .../yaml/section/PrerequisiteSection.java | 90 ++++++++++++++++--- .../test/rest/yaml/section/Prerequisites.java | 20 ++++- .../section/PrerequisiteSectionTests.java | 83 ++++++++++++++++- 8 files changed, 307 insertions(+), 24 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json b/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json new file mode 100644 index 0000000000000..28c341d9983cc --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json @@ -0,0 +1,47 @@ +{ + "capabilities": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/capabilities.html", + "description": "Checks if the specified combination of method, API, parameters, and arbitrary capabilities are supported" + }, + "stability": "experimental", + "visibility": "private", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_capabilities", + "methods": [ + "GET" + ] + } + ] + }, + "params": { + "method": { + "type": "enum", + "description": "REST method to check", + "options": [ + "GET", "HEAD", "POST", "PUT", "DELETE" + ], + "default": "GET" + }, + "path": { + "type": "string", + "description": "API path to check" + }, + "parameters": { + "type": "string", + "description": "Comma-separated list of API parameters to check" + }, + "capabilities": { + "type": "string", + "description": "Comma-separated list of arbitrary API capabilities to check" + } + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml new file mode 100644 index 0000000000000..715e696bd1032 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml @@ -0,0 +1,28 @@ +--- +"Capabilities API": + + - requires: + capabilities: + - method: GET + path: /_capabilities + parameters: [method, path, parameters, capabilities] + capabilities: [] + reason: "capabilities api requires itself to be supported" + + - do: + capabilities: + method: GET + path: /_capabilities + parameters: method,path,parameters,capabilities + error_trace: false + + - match: { supported: true } + + - do: + capabilities: + method: GET + path: /_capabilities + parameters: unknown + error_trace: false + + - match: { supported: false } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java index 7e4ae040caeca..9b60044c94f70 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java @@ -15,8 +15,8 @@ import java.io.IOException; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class SimpleNodesCapabilitiesIT extends ESIntegTestCase { @@ -31,25 +31,25 @@ public void testNodesCapabilities() throws IOException { NodesCapabilitiesResponse response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(true)); + assertThat(response.isSupported(), isPresentWith(true)); // check we support some parameters of the capabilities API response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "path")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(true)); + assertThat(response.isSupported(), isPresentWith(true)); // check we don't support some other parameters of the capabilities API response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "invalid")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(false)); + assertThat(response.isSupported(), isPresentWith(false)); // check we don't support a random invalid api // TODO this is not working yet - see https://github.com/elastic/elasticsearch/issues/107425 /*response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_invalid")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(false));*/ + assertThat(response.isSupported(), isPresentWith(false));*/ } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java index 63fdb9f7da08a..c2acbf65f6e57 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; public class NodesCapabilitiesResponse extends BaseNodesResponse implements ToXContentFragment { protected NodesCapabilitiesResponse(ClusterName clusterName, List nodes, List failures) { @@ -35,12 +36,15 @@ protected void writeNodesTo(StreamOutput out, List nodes) throws TransportAction.localOnly(); } - public boolean isSupported() { - return getNodes().isEmpty() == false && getNodes().stream().allMatch(NodeCapability::isSupported); + public Optional isSupported() { + // if there are any failures, we don't know if it is fully supported by all nodes in the cluster + if (hasFailures() || getNodes().isEmpty()) return Optional.empty(); + return Optional.of(getNodes().stream().allMatch(NodeCapability::isSupported)); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.field("supported", isSupported()); + Optional supported = isSupported(); + return builder.field("supported", supported.orElse(null)); } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 10bf2fb4b0a9f..4954065369ad9 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -16,7 +16,9 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.NodeSelector; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.test.rest.Stash; import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; @@ -25,14 +27,19 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.function.BiPredicate; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; + /** * Execution context passed across the REST tests. * Holds the REST client used to communicate with elasticsearch. @@ -122,7 +129,15 @@ public ClientYamlTestResponse callApi( ) throws IOException { // makes a copy of the parameters before modifying them for this specific request Map requestParams = new HashMap<>(params); - requestParams.putIfAbsent("error_trace", "true"); // By default ask for error traces, this my be overridden by params + requestParams.compute("error_trace", (k, v) -> { + if (v == null) { + return "true"; // By default ask for error traces, this my be overridden by params + } else if (v.equals("false")) { + return null; + } else { + return v; + } + }); for (Map.Entry entry : requestParams.entrySet()) { if (stash.containsStashedValue(entry.getValue())) { entry.setValue(stash.getValue(entry.getValue()).toString()); @@ -264,4 +279,30 @@ public ClientYamlTestCandidate getClientYamlTestCandidate() { public boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } + + public Optional clusterHasCapabilities(String method, String path, String parametersString, String capabilitiesString) { + Map params = Maps.newMapWithExpectedSize(5); + params.put("method", method); + params.put("path", path); + if (Strings.hasLength(parametersString)) { + params.put("parameters", parametersString); + } + if (Strings.hasLength(capabilitiesString)) { + params.put("capabilities", capabilitiesString); + } + params.put("error_trace", "false"); // disable error trace + try { + ClientYamlTestResponse resp = callApi("capabilities", params, emptyList(), emptyMap()); + // anything other than 200 should result in an exception, handled below + assert resp.getStatusCode() == 200 : "Unknown response code " + resp.getStatusCode(); + return Optional.ofNullable(resp.evaluate("supported")); + } catch (ClientYamlTestResponseException responseException) { + if (responseException.getRestTestResponse().getStatusCode() / 100 == 4) { + return Optional.empty(); // we don't know, the capabilities API is unsupported + } + throw new UncheckedIOException(responseException); + } catch (IOException ioException) { + throw new UncheckedIOException(ioException); + } + } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java index 1ee447da1f111..c12de7e1155a7 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -27,6 +28,7 @@ import java.util.function.Predicate; import static java.util.Collections.emptyList; +import static java.util.stream.Collectors.joining; /** * Represents a section where prerequisites to run a specific test section or suite are specified. It is possible to specify preconditions @@ -43,16 +45,23 @@ record KnownIssue(String clusterFeature, String fixedBy) { private static final Set FIELD_NAMES = Set.of("cluster_feature", "fixed_by"); } + record CapabilitiesCheck(String method, String path, String parameters, String capabilities) { + private static final Set FIELD_NAMES = Set.of("method", "path", "parameters", "capabilities"); + } + static class PrerequisiteSectionBuilder { - String skipVersionRange = null; String skipReason = null; - String requiresReason = null; - List requiredYamlRunnerFeatures = new ArrayList<>(); + String skipVersionRange = null; List skipOperatingSystems = new ArrayList<>(); List skipKnownIssues = new ArrayList<>(); String skipAwaitsFix = null; Set skipClusterFeatures = new HashSet<>(); + List skipCapabilities = new ArrayList<>(); + + String requiresReason = null; + List requiredYamlRunnerFeatures = new ArrayList<>(); Set requiredClusterFeatures = new HashSet<>(); + List requiredCapabilities = new ArrayList<>(); enum XPackRequired { NOT_SPECIFIED, @@ -116,11 +125,21 @@ public PrerequisiteSectionBuilder skipKnownIssue(KnownIssue knownIssue) { return this; } + public PrerequisiteSectionBuilder skipIfCapabilities(CapabilitiesCheck capabilitiesCheck) { + skipCapabilities.add(capabilitiesCheck); + return this; + } + public PrerequisiteSectionBuilder requireClusterFeature(String featureName) { requiredClusterFeatures.add(featureName); return this; } + public PrerequisiteSectionBuilder requireCapabilities(CapabilitiesCheck capabilitiesCheck) { + requiredCapabilities.add(capabilitiesCheck); + return this; + } + public PrerequisiteSectionBuilder skipIfOs(String osName) { this.skipOperatingSystems.add(osName); return this; @@ -128,13 +147,15 @@ public PrerequisiteSectionBuilder skipIfOs(String osName) { void validate(XContentLocation contentLocation) { if ((Strings.isEmpty(skipVersionRange)) - && requiredYamlRunnerFeatures.isEmpty() && skipOperatingSystems.isEmpty() - && xpackRequired == XPackRequired.NOT_SPECIFIED - && requiredClusterFeatures.isEmpty() && skipClusterFeatures.isEmpty() + && skipCapabilities.isEmpty() && skipKnownIssues.isEmpty() - && Strings.isEmpty(skipAwaitsFix)) { + && Strings.isEmpty(skipAwaitsFix) + && xpackRequired == XPackRequired.NOT_SPECIFIED + && requiredYamlRunnerFeatures.isEmpty() + && requiredCapabilities.isEmpty() + && requiredClusterFeatures.isEmpty()) { // TODO separate the validation for requires / skip when dropping parsing of legacy fields, e.g. features in skip throw new ParsingException(contentLocation, "at least one predicate is mandatory within a skip or requires section"); } @@ -143,11 +164,12 @@ void validate(XContentLocation contentLocation) { && (Strings.isEmpty(skipVersionRange) && skipOperatingSystems.isEmpty() && skipClusterFeatures.isEmpty() + && skipCapabilities.isEmpty() && skipKnownIssues.isEmpty()) == false) { throw new ParsingException(contentLocation, "reason is mandatory within this skip section"); } - if (Strings.isEmpty(requiresReason) && (requiredClusterFeatures.isEmpty() == false)) { + if (Strings.isEmpty(requiresReason) && ((requiredClusterFeatures.isEmpty() && requiredCapabilities.isEmpty()) == false)) { throw new ParsingException(contentLocation, "reason is mandatory within this requires section"); } @@ -190,6 +212,13 @@ public PrerequisiteSection build() { if (xpackRequired == XPackRequired.YES) { requiresCriteriaList.add(Prerequisites.hasXPack()); } + if (requiredClusterFeatures.isEmpty() == false) { + requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures)); + } + if (requiredCapabilities.isEmpty() == false) { + requiresCriteriaList.add(Prerequisites.requireCapabilities(requiredCapabilities)); + } + if (xpackRequired == XPackRequired.NO) { skipCriteriaList.add(Prerequisites.hasXPack()); } @@ -199,12 +228,12 @@ public PrerequisiteSection build() { if (skipOperatingSystems.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnOsList(skipOperatingSystems)); } - if (requiredClusterFeatures.isEmpty() == false) { - requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures)); - } if (skipClusterFeatures.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnClusterFeatures(skipClusterFeatures)); } + if (skipCapabilities.isEmpty() == false) { + skipCriteriaList.add(Prerequisites.skipCapabilities(skipCapabilities)); + } if (skipKnownIssues.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnKnownIssue(skipKnownIssues)); } @@ -287,6 +316,7 @@ static void parseSkipSection(XContentParser parser, PrerequisiteSectionBuilder b case "os" -> parseStrings(parser, builder::skipIfOs); case "cluster_features" -> parseStrings(parser, builder::skipIfClusterFeature); case "known_issues" -> parseArray(parser, PrerequisiteSection::parseKnownIssue, builder::skipKnownIssue); + case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::skipIfCapabilities); default -> false; }; } @@ -337,12 +367,47 @@ private static KnownIssue parseKnownIssue(XContentParser parser) throws IOExcept if (fields.keySet().equals(KnownIssue.FIELD_NAMES) == false) { throw new ParsingException( parser.getTokenLocation(), - Strings.format("Expected fields %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet()) + Strings.format("Expected all of %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet()) ); } return new KnownIssue(fields.get("cluster_feature"), fields.get("fixed_by")); } + private static CapabilitiesCheck parseCapabilities(XContentParser parser) throws IOException { + Map fields = parser.map(); + if (CapabilitiesCheck.FIELD_NAMES.containsAll(fields.keySet()) == false) { + throw new ParsingException( + parser.getTokenLocation(), + Strings.format("Expected some of %s, but got %s", CapabilitiesCheck.FIELD_NAMES, fields.keySet()) + ); + } + Object path = fields.get("path"); + if (path == null) { + throw new ParsingException(parser.getTokenLocation(), "path is required"); + } + + return new CapabilitiesCheck( + ensureString(ensureString(fields.getOrDefault("method", "GET"))), + ensureString(path), + stringArrayAsParamString("parameters", fields), + stringArrayAsParamString("capabilities", fields) + ); + } + + private static String ensureString(Object obj) { + if (obj instanceof String str) return str; + throw new IllegalArgumentException("Expected STRING, but got: " + obj); + } + + private static String stringArrayAsParamString(String name, Map fields) { + Object value = fields.get(name); + if (value == null) return null; + if (value instanceof Collection values) { + return values.stream().map(PrerequisiteSection::ensureString).collect(joining(",")); + } + return ensureString(value); + } + static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuilder builder) throws IOException { requireStartObject("requires", parser.nextToken()); @@ -361,6 +426,7 @@ static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuild valid = switch (parser.currentName()) { case "test_runner_features" -> parseStrings(parser, f -> parseFeatureField(f, builder)); case "cluster_features" -> parseStrings(parser, builder::requireClusterFeature); + case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::requireCapabilities); default -> false; }; } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java index ca10101a4612c..86c035ebad62f 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java @@ -10,8 +10,11 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.CapabilitiesCheck; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.KnownIssue; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.function.Predicate; @@ -45,8 +48,23 @@ static Predicate skipOnClusterFeatures(Set clusterFeatures.stream().anyMatch(context::clusterHasFeature); } - static Predicate skipOnKnownIssue(List knownIssues) { + static Predicate skipOnKnownIssue(List knownIssues) { return context -> knownIssues.stream() .anyMatch(i -> context.clusterHasFeature(i.clusterFeature()) && context.clusterHasFeature(i.fixedBy()) == false); } + + static Predicate requireCapabilities(List checks) { + // requirement not fulfilled if unknown / capabilities API not supported + return context -> checks.stream().allMatch(check -> checkCapabilities(context, check).orElse(false)); + } + + static Predicate skipCapabilities(List checks) { + // skip if unknown / capabilities API not supported + return context -> checks.stream().anyMatch(check -> checkCapabilities(context, check).orElse(true)); + } + + private static Optional checkCapabilities(ClientYamlTestExecutionContext context, CapabilitiesCheck check) { + Optional b = context.clusterHasCapabilities(check.method(), check.path(), check.parameters(), check.capabilities()); + return b; + } } diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java index a77b2cc5b40f1..0bb31ae2c574a 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.CapabilitiesCheck; import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.KnownIssue; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.yaml.YamlXContent; @@ -20,8 +21,11 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.Set; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.contains; @@ -36,6 +40,8 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -357,8 +363,8 @@ public void testParseSkipSectionIncompleteKnownIssues() throws Exception { e.getMessage(), is( oneOf( - ("Expected fields [cluster_feature, fixed_by], but got [cluster_feature]"), - ("Expected fields [fixed_by, cluster_feature], but got [cluster_feature]") + ("Expected all of [cluster_feature, fixed_by], but got [cluster_feature]"), + ("Expected all of [fixed_by, cluster_feature], but got [cluster_feature]") ) ) ); @@ -498,6 +504,42 @@ public void testParseRequireAndSkipSectionsClusterFeatures() throws Exception { assertThat(parser.nextToken(), nullValue()); } + public void testParseRequireAndSkipSectionsCapabilities() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + - requires: + capabilities: + - path: /a + - method: POST + path: /b + parameters: [param1, param2] + - method: PUT + path: /c + capabilities: [a, b, c] + reason: required to run test + - skip: + capabilities: + - path: /d + parameters: param1 + capabilities: a + reason: undesired if supported + """); + + var skipSectionBuilder = PrerequisiteSection.parseInternal(parser); + assertThat(skipSectionBuilder, notNullValue()); + assertThat( + skipSectionBuilder.requiredCapabilities, + contains( + new CapabilitiesCheck("GET", "/a", null, null), + new CapabilitiesCheck("POST", "/b", "param1,param2", null), + new CapabilitiesCheck("PUT", "/c", null, "a,b,c") + ) + ); + assertThat(skipSectionBuilder.skipCapabilities, contains(new CapabilitiesCheck("GET", "/d", "param1", "a"))); + + assertThat(parser.currentToken(), equalTo(XContentParser.Token.END_ARRAY)); + assertThat(parser.nextToken(), nullValue()); + } + public void testParseRequireAndSkipSectionMultipleClusterFeatures() throws Exception { parser = createParser(YamlXContent.yamlXContent, """ - requires: @@ -659,6 +701,43 @@ public void testSkipKnownIssue() { assertFalse(section.skipCriteriaMet(mockContext)); } + public void testEvaluateCapabilities() { + List skipCapabilities = List.of( + new CapabilitiesCheck("GET", "/s", null, "c1,c2"), + new CapabilitiesCheck("GET", "/s", "p1,p2", "c1") + ); + List requiredCapabilities = List.of( + new CapabilitiesCheck("GET", "/r", null, null), + new CapabilitiesCheck("GET", "/r", "p1", null) + ); + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipCapabilities(skipCapabilities)), + "skip", + List.of(Prerequisites.requireCapabilities(requiredCapabilities)), + "required", + emptyList() + ); + + var context = mock(ClientYamlTestExecutionContext.class); + + // when the capabilities API is unavailable: + assertTrue(section.skipCriteriaMet(context)); // always skip if unavailable + assertFalse(section.requiresCriteriaMet(context)); // always fail requirements / skip if unavailable + + when(context.clusterHasCapabilities(anyString(), anyString(), any(), any())).thenReturn(Optional.of(FALSE)); + assertFalse(section.skipCriteriaMet(context)); + assertFalse(section.requiresCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/s", null, "c1,c2")).thenReturn(Optional.of(TRUE)); + assertTrue(section.skipCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/r", null, null)).thenReturn(Optional.of(TRUE)); + assertFalse(section.requiresCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/r", "p1", null)).thenReturn(Optional.of(TRUE)); + assertTrue(section.requiresCriteriaMet(context)); + } + public void evaluateEmpty() { var section = new PrerequisiteSection(List.of(), "unsupported", List.of(), "required", List.of()); From 7ed58e75dab2c36c99aabd78157be166c4ec322f Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Fri, 10 May 2024 13:35:00 +0200 Subject: [PATCH 104/117] Do not filter source if exclude contains `*` (#108501) This commit prevents the serialization of source if not needed. --- .../fetch/subphase/FetchSourcePhase.java | 11 +++++++--- .../search/lookup/SourceFilter.java | 4 ++++ .../fetch/subphase/FetchSourcePhaseTests.java | 21 +++++++++++++++++++ 3 files changed, 33 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index 3b8e4e69d9318..68e46186e4505 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -28,7 +28,7 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { } assert fetchSourceContext.fetchSource(); SourceFilter sourceFilter = fetchSourceContext.filter(); - + final boolean filterExcludesAll = sourceFilter.excludesAll(); return new FetchSubPhaseProcessor() { private int fastPath; @@ -67,8 +67,13 @@ private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitCon return; } - // Otherwise, filter the source and add it to the hit. - source = source.filter(sourceFilter); + if (filterExcludesAll) { + // we can just add an empty map + source = Source.empty(source.sourceContentType()); + } else { + // Otherwise, filter the source and add it to the hit. + source = source.filter(sourceFilter); + } if (nestedHit) { source = extractNested(source, hitContext.hit().getNestedIdentity()); } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java index 3bf32159c1676..ceffb32c08b48 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java @@ -109,4 +109,8 @@ private Function buildBytesFilter() { } }; } + + public boolean excludesAll() { + return Arrays.asList(excludes).contains("*"); + } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 3a4d67ae281f2..2b8bf0dad65fe 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -52,6 +52,27 @@ public void testBasicFiltering() throws IOException { assertEquals(Collections.singletonMap("field1", "value"), hitContext.hit().getSourceAsMap()); } + public void testExcludesAll() throws IOException { + XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field1", "value").field("field2", "value2").endObject(); + HitContext hitContext = hitExecute(source, false, null, null); + assertNull(hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, "field1", "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, null, "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, "*", "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecuteMultiple(source, true, new String[] { "field1", "field2" }, new String[] { "*", "field1" }); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecuteMultiple(source, true, null, new String[] { "field2", "*", "field1" }); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + } + public void testMultipleFiltering() throws IOException { XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value").field("field2", "value2").endObject(); HitContext hitContext = hitExecuteMultiple(source, true, new String[] { "*.notexisting", "field" }, null); From fed808850d708ba4be5190ac2abc3c47d8d2d379 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 10 May 2024 14:28:19 +0200 Subject: [PATCH 105/117] ES|QL: Add unit tests for now() function (#108498) --- .../functions/date-time-functions.asciidoc | 2 +- .../esql/functions/description/now.asciidoc | 5 ++ .../esql/functions/examples/now.asciidoc | 22 ++++++ .../esql/functions/kibana/definition/now.json | 16 +++++ .../esql/functions/kibana/docs/now.md | 10 +++ .../esql/functions/layout/now.asciidoc | 15 ++++ docs/reference/esql/functions/now.asciidoc | 28 -------- .../esql/functions/parameters/now.asciidoc | 3 + .../esql/functions/signature/now.svg | 1 + .../esql/functions/types/now.asciidoc | 9 +++ .../function/scalar/math/NowTests.java | 68 +++++++++++++++++++ 11 files changed, 150 insertions(+), 29 deletions(-) create mode 100644 docs/reference/esql/functions/description/now.asciidoc create mode 100644 docs/reference/esql/functions/examples/now.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/now.json create mode 100644 docs/reference/esql/functions/kibana/docs/now.md create mode 100644 docs/reference/esql/functions/layout/now.asciidoc delete mode 100644 docs/reference/esql/functions/now.asciidoc create mode 100644 docs/reference/esql/functions/parameters/now.asciidoc create mode 100644 docs/reference/esql/functions/signature/now.svg create mode 100644 docs/reference/esql/functions/types/now.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index 8ce26eaabe381..eceb6378426a2 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -21,4 +21,4 @@ include::layout/date_extract.asciidoc[] include::layout/date_format.asciidoc[] include::layout/date_parse.asciidoc[] include::layout/date_trunc.asciidoc[] -include::now.asciidoc[] +include::layout/now.asciidoc[] diff --git a/docs/reference/esql/functions/description/now.asciidoc b/docs/reference/esql/functions/description/now.asciidoc new file mode 100644 index 0000000000000..4852c98b4980a --- /dev/null +++ b/docs/reference/esql/functions/description/now.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns current date and time. diff --git a/docs/reference/esql/functions/examples/now.asciidoc b/docs/reference/esql/functions/examples/now.asciidoc new file mode 100644 index 0000000000000..b8953de93724c --- /dev/null +++ b/docs/reference/esql/functions/examples/now.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNow] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNow-result] +|=== +To retrieve logs from the last hour: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNowWhere-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/now.json b/docs/reference/esql/functions/kibana/definition/now.json new file mode 100644 index 0000000000000..9cdb4945afa2e --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/now.json @@ -0,0 +1,16 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "now", + "description" : "Returns current date and time.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "datetime" + } + ], + "examples" : [ + "ROW current_date = NOW()", + "FROM sample_data\n| WHERE @timestamp > NOW() - 1 hour" + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/now.md b/docs/reference/esql/functions/kibana/docs/now.md new file mode 100644 index 0000000000000..5143dc843ebd8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/now.md @@ -0,0 +1,10 @@ + + +### NOW +Returns current date and time. + +``` +ROW current_date = NOW() +``` diff --git a/docs/reference/esql/functions/layout/now.asciidoc b/docs/reference/esql/functions/layout/now.asciidoc new file mode 100644 index 0000000000000..52341c1665619 --- /dev/null +++ b/docs/reference/esql/functions/layout/now.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-now]] +=== `NOW` + +*Syntax* + +[.text-center] +image::esql/functions/signature/now.svg[Embedded,opts=inline] + +include::../parameters/now.asciidoc[] +include::../description/now.asciidoc[] +include::../types/now.asciidoc[] +include::../examples/now.asciidoc[] diff --git a/docs/reference/esql/functions/now.asciidoc b/docs/reference/esql/functions/now.asciidoc deleted file mode 100644 index 3c46f557acd1f..0000000000000 --- a/docs/reference/esql/functions/now.asciidoc +++ /dev/null @@ -1,28 +0,0 @@ -[discrete] -[[esql-now]] -=== `NOW` - -*Syntax* - -[source,esql] ----- -NOW() ----- - -*Description* - -Returns current date and time. - -*Example* - -[source,esql] ----- -include::{esql-specs}/date.csv-spec[tag=docsNow] ----- - -To retrieve logs from the last hour: - -[source,esql] ----- -include::{esql-specs}/date.csv-spec[tag=docsNowWhere] ----- \ No newline at end of file diff --git a/docs/reference/esql/functions/parameters/now.asciidoc b/docs/reference/esql/functions/parameters/now.asciidoc new file mode 100644 index 0000000000000..25b3c973f1a26 --- /dev/null +++ b/docs/reference/esql/functions/parameters/now.asciidoc @@ -0,0 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* diff --git a/docs/reference/esql/functions/signature/now.svg b/docs/reference/esql/functions/signature/now.svg new file mode 100644 index 0000000000000..2cd48ac561408 --- /dev/null +++ b/docs/reference/esql/functions/signature/now.svg @@ -0,0 +1 @@ +NOW() \ No newline at end of file diff --git a/docs/reference/esql/functions/types/now.asciidoc b/docs/reference/esql/functions/types/now.asciidoc new file mode 100644 index 0000000000000..5737d98f2f7db --- /dev/null +++ b/docs/reference/esql/functions/types/now.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +result +datetime +|=== diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java new file mode 100644 index 0000000000000..b4f195c5929e3 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.matchesPattern; + +public class NowTests extends AbstractConfigurationFunctionTestCase { + public NowTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + "Now Test", + () -> new TestCaseSupplier.TestCase( + List.of(), + matchesPattern("LiteralsEvaluator\\[lit=.*\\]"), + DataTypes.DATETIME, + equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) + ) + ) + ) + ); + } + + @Override + protected Expression buildWithConfiguration(Source source, List args, EsqlConfiguration configuration) { + return new Now(Source.EMPTY, configuration); + } + + @Override + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + assertThat(((LongBlock) value).asVector().getLong(0), equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli())); + } + + @Override + protected Matcher allNullsMatcher() { + return equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()); + } + +} From ac102e53f3d5eb318e682101b2060cba7ae90936 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Fri, 10 May 2024 08:33:28 -0400 Subject: [PATCH 106/117] Improve join NotMasterException response, and add class documentation (#108107) The NotMasterException response to a join request is difficult to use to diagnose a failed join attempt. Enhancing the NotMasterException to include what node is thought to be master and the current term. This additional information will help readers locate the real master, to go look at those logs. The additional class documentation on JoinHelper and ClusterFormationFailureHelper should improve comprehension of the circumstances of error message logs. --- .../ClusterFormationFailureHelper.java | 23 ++++++++++++++- .../cluster/coordination/JoinHelper.java | 29 +++++++++++++++++-- .../coordination/NodeJoinExecutor.java | 10 ++++++- 3 files changed, 58 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java index c2cd403836593..e81d8d73af9a2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -43,9 +43,16 @@ import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.elasticsearch.monitor.StatusInfo.Status.UNHEALTHY; +/** + * Handles periodic debug logging of information regarding why the cluster has failed to form. + * Periodic logging begins once {@link #start()} is called, and ceases on {@link #stop()}. + */ public class ClusterFormationFailureHelper { private static final Logger logger = LogManager.getLogger(ClusterFormationFailureHelper.class); + /** + * This time period controls how often warning log messages will be written if this node fails to join or form a cluster. + */ public static final Setting DISCOVERY_CLUSTER_FORMATION_WARNING_TIMEOUT_SETTING = Setting.timeSetting( "discovery.cluster_formation_warning_timeout", TimeValue.timeValueMillis(10000), @@ -61,6 +68,16 @@ public class ClusterFormationFailureHelper { @Nullable // if no warning is scheduled private volatile WarningScheduler warningScheduler; + /** + * Works with the {@link JoinHelper} to log the latest node-join attempt failure and cluster state debug information. Must call + * {@link ClusterFormationState#start()} to begin. + * + * @param settings provides the period in which to log cluster formation errors. + * @param clusterFormationStateSupplier information about the current believed cluster state (See {@link ClusterFormationState}) + * @param threadPool the thread pool on which to run debug logging + * @param logLastFailedJoinAttempt invokes an instance of the JoinHelper to log the last encountered join failure + * (See {@link JoinHelper#logLastFailedJoinAttempt()}) + */ public ClusterFormationFailureHelper( Settings settings, Supplier clusterFormationStateSupplier, @@ -78,6 +95,10 @@ public boolean isRunning() { return warningScheduler != null; } + /** + * Schedules a warning debug message to be logged in 'clusterFormationWarningTimeout' time, and periodically thereafter, until + * {@link ClusterFormationState#stop()} has been called. + */ public void start() { assert warningScheduler == null; warningScheduler = new WarningScheduler(); @@ -129,7 +150,7 @@ public String toString() { } /** - * If this node believes that cluster formation has failed, this record provides information that can be used to determine why that is. + * This record provides node state information that can be used to determine why cluster formation has failed. */ public record ClusterFormationState( List initialMasterNodesSetting, diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index b960bb02ceb7f..059400ad81cfb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -194,13 +194,23 @@ private void unregisterAndReleaseConnection(DiscoveryNode destination, Releasabl Releasables.close(connectionReference); } - // package-private for testing + /** + * Saves information about a join failure. The failure information may be logged later via either {@link FailedJoinAttempt#logNow} + * or {@link FailedJoinAttempt#lastFailedJoinAttempt}. + * + * Package-private for testing. + */ static class FailedJoinAttempt { private final DiscoveryNode destination; private final JoinRequest joinRequest; private final ElasticsearchException exception; private final long timestamp; + /** + * @param destination the master node targeted by the join request. + * @param joinRequest the join request that was sent to the perceived master node. + * @param exception the error response received in reply to the join request attempt. + */ FailedJoinAttempt(DiscoveryNode destination, JoinRequest joinRequest, ElasticsearchException exception) { this.destination = destination; this.joinRequest = joinRequest; @@ -208,10 +218,18 @@ static class FailedJoinAttempt { this.timestamp = System.nanoTime(); } + /** + * Logs the failed join attempt exception. + * {@link FailedJoinAttempt#getLogLevel(ElasticsearchException)} determines at what log-level the log is written. + */ void logNow() { logger.log(getLogLevel(exception), () -> format("failed to join %s with %s", destination, joinRequest), exception); } + /** + * Returns the appropriate log level based on the given exception. Every error is at least DEBUG, but unexpected errors are INFO. + * For example, NotMasterException and CircuitBreakingExceptions are DEBUG logs. + */ static Level getLogLevel(ElasticsearchException e) { Throwable cause = e.unwrapCause(); if (cause instanceof CoordinationStateRejectedException @@ -226,6 +244,10 @@ void logWarnWithTimestamp() { logger.warn( () -> format( "last failed join attempt was %s ago, failed to join %s with %s", + // 'timestamp' is when this error exception was received by the local node. If the time that has passed since the error + // was originally received is quite large, it could indicate that this is a stale error exception from some prior + // out-of-order request response (where a later sent request but earlier received response was successful); or + // alternatively an old error could indicate that this node did not retry the join request for a very long time. TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - timestamp)), destination, joinRequest @@ -235,6 +257,9 @@ void logWarnWithTimestamp() { } } + /** + * Logs a warning message if {@link #lastFailedJoinAttempt} has been set with a failure. + */ void logLastFailedJoinAttempt() { FailedJoinAttempt attempt = lastFailedJoinAttempt.get(); if (attempt != null) { @@ -247,7 +272,7 @@ public void sendJoinRequest(DiscoveryNode destination, long term, Optional assert destination.isMasterNode() : "trying to join master-ineligible " + destination; final StatusInfo statusInfo = nodeHealthService.getHealth(); if (statusInfo.getStatus() == UNHEALTHY) { - logger.debug("dropping join request to [{}]: [{}]", destination, statusInfo.getInfo()); + logger.debug("dropping join request to [{}], unhealthy status: [{}]", destination, statusInfo.getInfo()); return; } final JoinRequest joinRequest = new JoinRequest( diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java index 2c024063e2399..9223e02fc946c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -123,7 +124,14 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex newState = ClusterState.builder(initialState); } else { logger.trace("processing node joins, but we are not the master. current master: {}", currentNodes.getMasterNode()); - throw new NotMasterException("Node [" + currentNodes.getLocalNode() + "] not master for join request"); + throw new NotMasterException( + Strings.format( + "Node [%s] not master for join request. Current known master [%s], current term [%d]", + currentNodes.getLocalNode(), + currentNodes.getMasterNode(), + term + ) + ); } DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(newState.nodes()); From 79032ec77eb5227bdb6eef37df7a4a6d35d98912 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Fri, 10 May 2024 14:46:33 +0200 Subject: [PATCH 107/117] Do not use global ordinals strategy if the leaf reader context cannot be obtained (#108459) --- docs/changelog/108459.yaml | 6 ++++++ .../FrequentItemSetCollector.java | 12 +++++++----- .../mr/ItemSetMapReduceAggregator.java | 18 ++++++++---------- .../mr/ItemSetMapReduceValueSource.java | 12 +++++++----- 4 files changed, 28 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/108459.yaml diff --git a/docs/changelog/108459.yaml b/docs/changelog/108459.yaml new file mode 100644 index 0000000000000..5e05797f284be --- /dev/null +++ b/docs/changelog/108459.yaml @@ -0,0 +1,6 @@ +pr: 108459 +summary: Do not use global ordinals strategy if the leaf reader context cannot be + obtained +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java index 18086748d6fe0..bd80e362f2f71 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java @@ -177,7 +177,8 @@ FrequentItemSet toFrequentItemSet(List fields) throws IOException { int pos = items.nextSetBit(0); while (pos > 0) { Tuple item = transactionStore.getItem(topItemIds.getItemIdAt(pos - 1)); - assert item.v1() < fields.size() : "item id exceed number of given items, did you configure eclat correctly?"; + assert item.v1() < fields.size() + : "eclat error: item id (" + item.v1() + ") exceeds the number of given items (" + fields.size() + ")"; final Field field = fields.get(item.v1()); Object formattedValue = field.formatValue(item.v2()); String fieldName = fields.get(item.v1()).getName(); @@ -252,19 +253,20 @@ public FrequentItemSetCollector(TransactionStore transactionStore, TopItemIds to this.topItemIds = topItemIds; this.size = size; this.min = min; - queue = new FrequentItemSetPriorityQueue(size); - frequentItemsByCount = Maps.newMapWithExpectedSize(size / 10); + this.queue = new FrequentItemSetPriorityQueue(size); + this.frequentItemsByCount = Maps.newMapWithExpectedSize(size / 10); } public FrequentItemSet[] finalizeAndGetResults(List fields) throws IOException { - FrequentItemSet[] topFrequentItems = new FrequentItemSet[size()]; + FrequentItemSet[] topFrequentItems = new FrequentItemSet[queue.size()]; for (int i = topFrequentItems.length - 1; i >= 0; i--) { topFrequentItems[i] = queue.pop().toFrequentItemSet(fields); } return topFrequentItems; } - public int size() { + // Visible for testing + int size() { return queue.size(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java index 72bfb6f1f0394..0f9555c77341f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java @@ -86,17 +86,15 @@ protected ItemSetMapReduceAggregator( boolean rewriteBasedOnOrdinals = false; - if (ctx.isPresent()) { - for (var c : configsAndValueFilters) { - ItemSetMapReduceValueSource e = context.getValuesSourceRegistry() - .getAggregator(registryKey, c.v1()) - .build(c.v1(), id++, c.v2(), ordinalOptimization, ctx.get()); - if (e.getField().getName() != null) { - fields.add(e.getField()); - valueSources.add(e); - } - rewriteBasedOnOrdinals |= e.usesOrdinals(); + for (var c : configsAndValueFilters) { + ItemSetMapReduceValueSource e = context.getValuesSourceRegistry() + .getAggregator(registryKey, c.v1()) + .build(c.v1(), id++, c.v2(), ordinalOptimization, ctx); + if (e.getField().getName() != null) { + fields.add(e.getField()); + valueSources.add(e); } + rewriteBasedOnOrdinals |= e.usesOrdinals(); } this.rewriteBasedOnOrdinals = rewriteBasedOnOrdinals; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java index c9ec772eb3321..08adecd3fbce5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java @@ -37,6 +37,7 @@ import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Optional; /** * Interface to extract values from Lucene in order to feed it into the MapReducer. @@ -53,7 +54,7 @@ ItemSetMapReduceValueSource build( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization ordinalOptimization, - LeafReaderContext ctx + Optional ctx ) throws IOException; } @@ -345,20 +346,21 @@ public KeywordValueSource( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization ordinalOptimization, - LeafReaderContext ctx + Optional ctx ) throws IOException { super(config, id, ValueFormatter.BYTES_REF); if (AbstractItemSetMapReducer.OrdinalOptimization.GLOBAL_ORDINALS.equals(ordinalOptimization) && config.getValuesSource() instanceof Bytes.WithOrdinals - && ((Bytes.WithOrdinals) config.getValuesSource()).supportsGlobalOrdinalsMapping()) { + && ((Bytes.WithOrdinals) config.getValuesSource()).supportsGlobalOrdinalsMapping() + && ctx.isPresent()) { logger.debug("Use ordinals for field [{}]", config.fieldContext().field()); this.executionStrategy = new GlobalOrdinalsStrategy( getField(), (Bytes.WithOrdinals) config.getValuesSource(), includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(config.format()), - ctx + ctx.get() ); } else { this.executionStrategy = new MapStrategy( @@ -394,7 +396,7 @@ public NumericValueSource( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization unusedOrdinalOptimization, - LeafReaderContext unusedCtx + Optional unusedCtx ) { super(config, id, ValueFormatter.LONG); this.source = (Numeric) config.getValuesSource(); From 8d19849dc10b28244b506c131cfe9db6e6c4372d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 10 May 2024 15:10:07 +0200 Subject: [PATCH 108/117] Fix potential leaks in search execution (#108391) Cleaning up some potentially leaky spots or at the very least making them easier to read. --- .../action/search/TransportSearchAction.java | 58 +++++++++++-------- .../search/internal/SearchContext.java | 1 + 2 files changed, 34 insertions(+), 25 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 51a8c6ddb3d76..a12d149bbe342 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -1303,8 +1303,8 @@ public SearchPhase newSearchPhase( task, true, searchService.getCoordinatorRewriteContextProvider(timeProvider::absoluteStartMillis), - listener.delegateFailureAndWrap((l, iters) -> { - SearchPhase action = newSearchPhase( + listener.delegateFailureAndWrap( + (l, iters) -> newSearchPhase( task, searchRequest, executor, @@ -1317,30 +1317,32 @@ public SearchPhase newSearchPhase( false, threadPool, clusters - ); - action.start(); - }) - ); - } else { - // for synchronous CCS minimize_roundtrips=false, use the CCSSingleCoordinatorSearchProgressListener - // (AsyncSearchTask will not return SearchProgressListener.NOOP, since it uses its own progress listener - // which delegates to CCSSingleCoordinatorSearchProgressListener when minimizing roundtrips) - if (clusters.isCcsMinimizeRoundtrips() == false - && clusters.hasRemoteClusters() - && task.getProgressListener() == SearchProgressListener.NOOP) { - task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); - } - final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( - executor, - circuitBreaker, - task::isCancelled, - task.getProgressListener(), - searchRequest, - shardIterators.size(), - exc -> searchTransportService.cancelSearchTask(task, "failed to merge result [" + exc.getMessage() + "]") + ).start() + ) ); + } + // for synchronous CCS minimize_roundtrips=false, use the CCSSingleCoordinatorSearchProgressListener + // (AsyncSearchTask will not return SearchProgressListener.NOOP, since it uses its own progress listener + // which delegates to CCSSingleCoordinatorSearchProgressListener when minimizing roundtrips) + if (clusters.isCcsMinimizeRoundtrips() == false + && clusters.hasRemoteClusters() + && task.getProgressListener() == SearchProgressListener.NOOP) { + task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); + } + final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( + executor, + circuitBreaker, + task::isCancelled, + task.getProgressListener(), + searchRequest, + shardIterators.size(), + exc -> searchTransportService.cancelSearchTask(task, "failed to merge result [" + exc.getMessage() + "]") + ); + boolean success = false; + try { + final SearchPhase searchPhase; if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) { - return new SearchDfsQueryThenFetchAsyncAction( + searchPhase = new SearchDfsQueryThenFetchAsyncAction( logger, namedWriteableRegistry, searchTransportService, @@ -1359,7 +1361,7 @@ public SearchPhase newSearchPhase( ); } else { assert searchRequest.searchType() == QUERY_THEN_FETCH : searchRequest.searchType(); - return new SearchQueryThenFetchAsyncAction( + searchPhase = new SearchQueryThenFetchAsyncAction( logger, namedWriteableRegistry, searchTransportService, @@ -1377,6 +1379,12 @@ public SearchPhase newSearchPhase( clusters ); } + success = true; + return searchPhase; + } finally { + if (success == false) { + queryResultConsumer.close(); + } } } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 232c12e944a96..35f96ee2dc102 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -351,6 +351,7 @@ public Query rewrittenQuery() { * Adds a releasable that will be freed when this context is closed. */ public void addReleasable(Releasable releasable) { // TODO most Releasables are managed by their callers. We probably don't need this. + assert closed.get() == false; releasables.add(releasable); } From 7e38ee13d593984406c9192c5ee31b6b351e99e7 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Fri, 10 May 2024 15:07:01 +0100 Subject: [PATCH 109/117] Mute capabilities yaml test (#108511) --- .../resources/rest-api-spec/test/capabilities/10_basic.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml index 715e696bd1032..04d60e31149e9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml @@ -1,6 +1,9 @@ --- "Capabilities API": + - skip: + awaits_fix: "https://github.com/elastic/elasticsearch/issues/108509" + - requires: capabilities: - method: GET From b6874a516076044c7937ed807270e320fcfc925a Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Fri, 10 May 2024 10:36:16 -0400 Subject: [PATCH 110/117] [Transform] Use unpooled SearchHits in tests (#108508) Fix #108507 --- .../transforms/TransformIndexerFailureHandlingTests.java | 2 +- .../xpack/transform/transforms/TransformIndexerStateTests.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index f39a4329f2bb1..ceb2507ad33ab 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -992,7 +992,7 @@ private MockedTransformIndexer createMockIndexer( private static Function returnHit() { return request -> new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index a474976cf9dfa..01a2db839b7d8 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -88,7 +88,7 @@ public class TransformIndexerStateTests extends ESTestCase { private static final SearchResponse ONE_HIT_SEARCH_RESPONSE = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), From d6cb12ec8fc377b5ed50681304bf2cd33f43e62f Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Fri, 10 May 2024 16:29:01 +0100 Subject: [PATCH 111/117] Properly support capabilities checks with mixed version clusters (#108513) When a capabilities check is done on a cluster with some nodes that do not support capabilities, always return false --- .../upgrades/NodesCapabilitiesUpgradeIT.java | 68 +++++++++++++++++++ .../test/capabilities/10_basic.yml | 3 - .../NodesCapabilitiesResponse.java | 9 ++- .../TransportNodesCapabilitiesAction.java | 25 ++++++- .../org/elasticsearch/rest/RestFeatures.java | 7 ++ .../cluster/RestNodesCapabilitiesAction.java | 3 + .../test/rest/ESRestTestCase.java | 38 +++++++++++ 7 files changed, 147 insertions(+), 6 deletions(-) create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java new file mode 100644 index 0000000000000..2acaf33c2130c --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.UpdateForV9; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; + +@UpdateForV9 +public class NodesCapabilitiesUpgradeIT extends AbstractRollingUpgradeTestCase { + + private static Boolean upgradingBeforeCapabilities; + + public NodesCapabilitiesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Before + public void checkBeforeHasNoCapabilities() throws IOException { + if (upgradingBeforeCapabilities == null) { + // try to do a _capabilities query on a node before we upgrade + try { + clusterHasCapability("GET", "_capabilities", List.of(), List.of()); + upgradingBeforeCapabilities = false; + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == 400) { + upgradingBeforeCapabilities = true; + } else { + throw e; + } + } + } + + assumeTrue("Only valid when upgrading from versions without capabilities API", upgradingBeforeCapabilities); + } + + public void testCapabilitiesReturnsFalsePartiallyUpgraded() throws IOException { + if (isMixedCluster()) { + // capabilities checks should either fail (if talking to an old node), + // or return false as not all nodes have the API (if talking to a new node) + try { + assertThat( + "Upgraded node should report no capabilities supported", + clusterHasCapability("GET", "_capabilities", List.of(), List.of()), + isPresentWith(false) + ); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() != 400) { + // throw explicitly to capture exception too + throw new AssertionError("Old node should not have the capabilities API", e); + } + } + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml index 04d60e31149e9..715e696bd1032 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml @@ -1,9 +1,6 @@ --- "Capabilities API": - - skip: - awaits_fix: "https://github.com/elastic/elasticsearch/issues/108509" - - requires: capabilities: - method: GET diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java index c2acbf65f6e57..3527b8cc46840 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java @@ -37,8 +37,13 @@ protected void writeNodesTo(StreamOutput out, List nodes) throws } public Optional isSupported() { - // if there are any failures, we don't know if it is fully supported by all nodes in the cluster - if (hasFailures() || getNodes().isEmpty()) return Optional.empty(); + if (hasFailures() || getNodes().isEmpty()) { + // there's no nodes in the response (uh? what about ourselves?) + // or there's a problem (hopefully transient) talking to one or more nodes. + // We don't have enough information to decide if it's supported or not, so return unknown + return Optional.empty(); + } + return Optional.of(getNodes().stream().allMatch(NodeCapability::isSupported)); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java index 7e392775bf42e..71aa95908d3b7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.cluster.node.capabilities; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -18,8 +19,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; @@ -27,6 +30,7 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.Set; public class TransportNodesCapabilitiesAction extends TransportNodesAction< @@ -38,6 +42,7 @@ public class TransportNodesCapabilitiesAction extends TransportNodesAction< public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/capabilities"); private final RestController restController; + private final FeatureService featureService; @Inject public TransportNodesCapabilitiesAction( @@ -45,7 +50,8 @@ public TransportNodesCapabilitiesAction( ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - RestController restController + RestController restController, + FeatureService featureService ) { super( TYPE.name(), @@ -56,6 +62,23 @@ public TransportNodesCapabilitiesAction( threadPool.executor(ThreadPool.Names.MANAGEMENT) ); this.restController = restController; + this.featureService = featureService; + } + + @Override + protected void doExecute(Task task, NodesCapabilitiesRequest request, ActionListener listener) { + if (featureService.clusterHasFeature(clusterService.state(), RestNodesCapabilitiesAction.CAPABILITIES_ACTION) == false) { + // not everything in the cluster supports capabilities. + // Therefore we don't support whatever it is we're being asked for + listener.onResponse(new NodesCapabilitiesResponse(clusterService.getClusterName(), List.of(), List.of()) { + @Override + public Optional isSupported() { + return Optional.of(false); + } + }); + } else { + super.doExecute(task, request, listener); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java index 73b788d63b2ab..111204fbe7fb8 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -12,10 +12,17 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import java.util.Map; +import java.util.Set; public class RestFeatures implements FeatureSpecification { + @Override + public Set getFeatures() { + return Set.of(RestNodesCapabilitiesAction.CAPABILITIES_ACTION); + } + @Override public Map getHistoricalFeatures() { return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java index 9b89a6a932dd3..fae7903d02b82 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -26,6 +27,8 @@ @ServerlessScope(Scope.INTERNAL) public class RestNodesCapabilitiesAction extends BaseRestHandler { + public static final NodeFeature CAPABILITIES_ACTION = new NodeFeature("rest.capabilities_action"); + @Override public List routes() { return List.of(new Route(RestRequest.Method.GET, "/_capabilities")); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index fd3ba7d864f99..6dfd51c0bee5e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -105,6 +105,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -261,6 +262,43 @@ protected static Set readVersionsFromNodesInfo(RestClient adminClient) t .collect(Collectors.toUnmodifiableMap(entry -> entry.getKey().toString(), entry -> (Map) entry.getValue())); } + /** + * Does the cluster being tested support the set of capabilities + * for specified path and method. + */ + protected static Optional clusterHasCapability( + String method, + String path, + Collection parameters, + Collection capabilities + ) throws IOException { + return clusterHasCapability(adminClient, method, path, parameters, capabilities); + } + + /** + * Does the cluster on the other side of {@code client} support the set + * of capabilities for specified path and method. + */ + protected static Optional clusterHasCapability( + RestClient client, + String method, + String path, + Collection parameters, + Collection capabilities + ) throws IOException { + Request request = new Request("GET", "_capabilities"); + request.addParameter("method", method); + request.addParameter("path", path); + if (parameters.isEmpty() == false) { + request.addParameter("parameters", String.join(",", parameters)); + } + if (capabilities.isEmpty() == false) { + request.addParameter("capabilities", String.join(",", capabilities)); + } + Map response = entityAsMap(client.performRequest(request).getEntity()); + return Optional.ofNullable((Boolean) response.get("supported")); + } + protected static boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } From 5a824c5a38e51590064e410a292fa50cb0bb0d86 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 10 May 2024 17:39:04 +0200 Subject: [PATCH 112/117] [Inference API] Improve completion response entity tests (#108512) --- .../AzureOpenAiCompletionResponseEntityTests.java | 9 +++------ .../openai/OpenAiChatCompletionResponseEntityTests.java | 6 +++--- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java index 3afe4bd439e0f..ec76f43a6d52f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -50,7 +49,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { "index": 0, "logprobs": null, "message": { - "content": "response", + "content": "result", "role": "assistant" } } @@ -92,10 +91,8 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(chatCompletionResults.getResults().size(), equalTo(1)); - - ChatCompletionResults.Result result = chatCompletionResults.getResults().get(0); - assertThat(result.asMap().get(result.getResultsField()), is("response")); + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); } public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java index 080602e8fd245..5604d6573144e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -35,7 +34,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { "index": 0, "message": { "role": "assistant", - "content": "some content" + "content": "result" }, "logprobs": null, "finish_reason": "stop" @@ -55,7 +54,8 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); } public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { From ef12b99284785b5877bf62193bdb6f40a0bde66f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 10 May 2024 12:04:58 -0400 Subject: [PATCH 113/117] Fix random sampler consistency test (#107957) Random sampler consistency requires a restricted number of segments, to ensure we always hit the same number of segments and that no merging is occurring, this merges the segment count to 1 for this particular test. In practice, this isn't needed as the approximate nature of the aggregation already means you could get different statistics per call, but they are within an error bound set by the users configured sampling probability. closes: https://github.com/elastic/elasticsearch/issues/105839 --- .../search/aggregations/bucket/RandomSamplerIT.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index c9a6cfaf754c6..71402d3e9c1d8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -22,6 +22,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -83,9 +84,11 @@ public void setupSuiteScopeCluster() throws Exception { } indexRandom(true, builders); ensureSearchable(); + // Force merge to ensure segment consistency as any segment merging can change which particular documents + // are sampled + assertNoFailures(indicesAdmin().prepareForceMerge("idx").setMaxNumSegments(1).get()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105839") public void testRandomSamplerConsistentSeed() { double[] sampleMonotonicValue = new double[1]; double[] sampleNumericValue = new double[1]; From 2d14095ebf66bf04881f04b7e7f92ba2834187f3 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 10 May 2024 09:51:56 -0700 Subject: [PATCH 114/117] Handle blocked cluster state in file settings (#108481) When file settings is first loaded on a master node starting up, the cluster state will likely be in a blocked state before it recovers. In that case the file settings will not be processable since the metadata will be missing in cluster state. This commit makes watching for file settings not start until the cluster state is in a recovered state. It also updates the the reserved state update task to handle a similar case where a task may be queued and then run at time when the node is no longer master, but before the watcher is stopped. --- .../service/FileSettingsServiceIT.java | 2 +- .../file/MasterNodeFileWatchingService.java | 4 +- .../service/ReservedStateUpdateTask.java | 8 ++ .../MasterNodeFileWatchingServiceTests.java | 119 ++++++++++++++++++ .../service/ReservedStateUpdateTaskTests.java | 31 +++++ 5 files changed, 162 insertions(+), 2 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java create mode 100644 server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java index 58dcfdaec5147..38bc372868df0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java @@ -243,7 +243,7 @@ public void testReservedStatePersistsOnRestart() throws Exception { FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); - assertTrue(masterFileSettingsService.watching()); + assertBusy(() -> assertTrue(masterFileSettingsService.watching())); logger.info("--> write some settings"); writeJSONFile(masterNode, testJSON); diff --git a/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java index 6da0845a7c7ba..65bfa804cec2f 100644 --- a/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.gateway.GatewayService; import java.io.IOException; import java.nio.file.Files; @@ -58,7 +59,8 @@ protected void doStop() { @Override public final void clusterChanged(ClusterChangedEvent event) { ClusterState clusterState = event.state(); - if (clusterState.nodes().isLocalNodeElectedMaster()) { + if (clusterState.nodes().isLocalNodeElectedMaster() + && clusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { synchronized (this) { if (watching() || active == false) { refreshExistingFileStateIfNeeded(clusterState); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 535758ed71eac..1ac42a91736c3 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; import org.elasticsearch.cluster.metadata.ReservedStateHandlerMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; @@ -80,6 +81,13 @@ ActionListener listener() { } protected ClusterState execute(final ClusterState currentState) { + if (currentState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + // If cluster state has become blocked, this task was submitted while the node was master but is now not master. + // The new master will re-read file settings, so whatever update was to be written here will be handled + // by the new master. + return currentState; + } + ReservedStateMetadata existingMetadata = currentState.metadata().reservedStateMetadata().get(namespace); Map reservedState = stateChunk.state(); ReservedStateVersion reservedStateVersion = stateChunk.metadata(); diff --git a/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java new file mode 100644 index 0000000000000..f92097f53bb81 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.file; + +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.node.NodeRoleSettings; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MasterNodeFileWatchingServiceTests extends ESTestCase { + + static final DiscoveryNode localNode = DiscoveryNodeUtils.create("local-node"); + MasterNodeFileWatchingService testService; + Path watchedFile; + Runnable fileChangedCallback; + + @Before + public void setupTestService() throws IOException { + watchedFile = createTempFile(); + ClusterService clusterService = mock(ClusterService.class); + Settings settings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.MASTER_ROLE.roleName()) + .build(); + when(clusterService.getSettings()).thenReturn(settings); + fileChangedCallback = () -> {}; + testService = new MasterNodeFileWatchingService(clusterService, watchedFile) { + + @Override + protected void processFileChanges() throws InterruptedException, ExecutionException, IOException { + fileChangedCallback.run(); + } + + @Override + protected void processInitialFileMissing() throws InterruptedException, ExecutionException, IOException { + // file always exists, but we don't care about the missing case for master node behavior + } + }; + testService.start(); + } + + @After + public void stopTestService() { + testService.stop(); + } + + public void testBecomingMasterNodeStartsWatcher() { + ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", notRecoveredClusterState, ClusterState.EMPTY_STATE)); + // just a master node isn't sufficient, cluster state also must be recovered + assertThat(testService.watching(), is(false)); + + ClusterState recoveredClusterState = ClusterState.builder(notRecoveredClusterState) + .blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", recoveredClusterState, notRecoveredClusterState)); + // just a master node isn't sufficient, cluster state also must be recovered + assertThat(testService.watching(), is(true)); + } + + public void testChangingMasterStopsWatcher() { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); + assertThat(testService.watching(), is(true)); + + final DiscoveryNode anotherNode = DiscoveryNodeUtils.create("another-node"); + ClusterState differentMasterClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes( + DiscoveryNodes.builder().add(localNode).add(anotherNode).localNodeId(localNode.getId()).masterNodeId(anotherNode.getId()) + ) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", differentMasterClusterState, clusterState)); + assertThat(testService.watching(), is(false)); + } + + public void testBlockingClusterStateStopsWatcher() { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); + assertThat(testService.watching(), is(true)); + + ClusterState blockedClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", blockedClusterState, clusterState)); + assertThat(testService.watching(), is(false)); + } +} diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java new file mode 100644 index 0000000000000..d887d7edb19f2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.sameInstance; + +public class ReservedStateUpdateTaskTests extends ESTestCase { + public void testBlockedClusterState() { + var task = new ReservedStateUpdateTask("dummy", null, List.of(), Map.of(), List.of(), e -> {}, ActionListener.noop()); + ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + assertThat(task.execute(notRecoveredClusterState), sameInstance(notRecoveredClusterState)); + } +} From 04d3b9989fe03d3591d9eb637dc54f7813e174ea Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 10 May 2024 13:45:42 -0400 Subject: [PATCH 115/117] ESQL: Begin optimizing `Block#lookup` (#108482) This creates the infrastructure to allow optimizing the `lookup` method when applied to `Vector`s and then implements that optimization for constant vectors. Constant vectors now take one of six paths: 1. An empty positions `Block` yields an empty result set. 2. If `positions` is a `Block`, perform the un-optimized lookup. 3. If the `min` of the `positions` *Vector* is less that 0 then throw an exception. 4. If the `min` of the positions Vector is greater than the number of positions in the lookup block then return a single `ConstantNullBlock` because you are looking up outside the range. 5. If the `max` of the positions Vector is less than the number of positions in the lookup block then return a `Constant$Type$Block` with the same value as the lookup block. This is a lookup that's entirely within range. 6. Otherwise return the unoptimized lookup. This is *fairly* simple but demonstrates how we can plug in more complex optimizations later. --- .../core/ReleasableIterator.java | 26 ++++++ .../compute/data/BooleanArrayVector.java | 7 ++ .../compute/data/BooleanBigArrayVector.java | 7 ++ .../compute/data/BooleanVector.java | 5 ++ .../compute/data/BooleanVectorBlock.java | 5 +- .../compute/data/BytesRefArrayVector.java | 7 ++ .../compute/data/BytesRefVector.java | 5 ++ .../compute/data/BytesRefVectorBlock.java | 5 +- .../compute/data/ConstantBooleanVector.java | 24 +++++ .../compute/data/ConstantBytesRefVector.java | 24 +++++ .../compute/data/ConstantDoubleVector.java | 24 +++++ .../compute/data/ConstantIntVector.java | 24 +++++ .../compute/data/ConstantLongVector.java | 24 +++++ .../compute/data/DoubleArrayVector.java | 7 ++ .../compute/data/DoubleBigArrayVector.java | 7 ++ .../compute/data/DoubleVector.java | 5 ++ .../compute/data/DoubleVectorBlock.java | 5 +- .../compute/data/IntArrayVector.java | 7 ++ .../compute/data/IntBigArrayVector.java | 7 ++ .../elasticsearch/compute/data/IntVector.java | 5 ++ .../compute/data/IntVectorBlock.java | 5 +- .../compute/data/LongArrayVector.java | 7 ++ .../compute/data/LongBigArrayVector.java | 7 ++ .../compute/data/LongVector.java | 5 ++ .../compute/data/LongVectorBlock.java | 5 +- .../org/elasticsearch/compute/data/Block.java | 8 +- .../compute/data/ConstantNullVector.java | 8 ++ .../elasticsearch/compute/data/DocBlock.java | 2 +- .../elasticsearch/compute/data/DocVector.java | 7 ++ .../compute/data/OrdinalBytesRefVector.java | 7 ++ .../elasticsearch/compute/data/Vector.java | 29 ++++++ .../compute/data/X-ArrayVector.java.st | 9 ++ .../compute/data/X-BigArrayVector.java.st | 7 ++ .../compute/data/X-ConstantVector.java.st | 24 +++++ .../compute/data/X-Vector.java.st | 5 ++ .../compute/data/X-VectorBlock.java.st | 5 +- .../compute/data/BasicBlockTests.java | 89 +++++++++++++++++-- 37 files changed, 431 insertions(+), 28 deletions(-) diff --git a/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java b/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java index 68a4a136c5308..83a68c984a684 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java +++ b/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java @@ -46,4 +46,30 @@ public String toString() { }; } + + /** + * Returns an empty iterator over the supplied value. + */ + static ReleasableIterator empty() { + return new ReleasableIterator<>() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public T next() { + assert false : "hasNext is always false so next should never be called"; + return null; + } + + @Override + public void close() {} + + @Override + public String toString() { + return "ReleasableIterator[]"; + } + }; + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index e195bda3a6dbb..a91999a49c16b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -91,6 +93,11 @@ public BooleanVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(boolean[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 5f6db129e73d3..9215cd0d9bbda 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -87,6 +89,11 @@ public BooleanVector filter(int... positions) { return new BooleanBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link BitArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index 7218f3d2771c8..c8921a7c9f02e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface BooleanVector extends Vector permits ConstantBooleanVect @Override BooleanVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a BooleanVector, and both vectors are {@link #equals(BooleanVector, BooleanVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 013718bb42a7d..193e6ea5d8965 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -52,9 +52,8 @@ public BooleanBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new BooleanLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 75cf4a2e1fe5a..61bbfb5ebbd02 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -11,7 +11,9 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -91,6 +93,11 @@ public BytesRefVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(BytesRefArray values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index 4f07ca2d61049..3739dccb0f956 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -11,6 +11,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -34,6 +36,9 @@ public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVe @Override BytesRefVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a BytesRefVector, and both vectors are {@link #equals(BytesRefVector, BytesRefVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 39bd37ea9bc34..16a8fc0888096 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -63,9 +63,8 @@ public BytesRefBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new BytesRefLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index 16d70d1a0e800..1f6786f64e0a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant boolean value. @@ -39,6 +41,28 @@ public BooleanVector filter(int... positions) { return blockFactory().newConstantBooleanVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((BooleanBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantBooleanBlockWith(value, positions.getPositionCount())); + } + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.BOOLEAN; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index eed780a42f7ba..33967d66374c1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant BytesRef value. @@ -45,6 +47,28 @@ public BytesRefVector filter(int... positions) { return blockFactory().newConstantBytesRefVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((BytesRefBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantBytesRefBlockWith(value, positions.getPositionCount())); + } + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.BYTES_REF; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index a783f0243313e..1ddf31d753d43 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant double value. @@ -39,6 +41,28 @@ public DoubleVector filter(int... positions) { return blockFactory().newConstantDoubleVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((DoubleBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantDoubleBlockWith(value, positions.getPositionCount())); + } + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.DOUBLE; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index c6d463af7cfad..e8fb8cb39ceb4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant int value. @@ -39,6 +41,28 @@ public IntVector filter(int... positions) { return blockFactory().newConstantIntVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((IntBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantIntBlockWith(value, positions.getPositionCount())); + } + return new IntLookup(asBlock(), positions, targetBlockSize); + } + /** * The minimum value in the block. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 0173f1c1d4d7a..b997cbbe22849 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant long value. @@ -39,6 +41,28 @@ public LongVector filter(int... positions) { return blockFactory().newConstantLongVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((LongBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantLongBlockWith(value, positions.getPositionCount())); + } + return new LongLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.LONG; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 476d5e55c55a0..e7c1d342133d5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -90,6 +92,11 @@ public DoubleVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(double[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index 8f6aedf31b50e..d558eabd2dd4c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -86,6 +88,11 @@ public DoubleVector filter(int... positions) { return new DoubleBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link DoubleArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index 1d71575b33316..3d93043f93d8f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface DoubleVector extends Vector permits ConstantDoubleVector @Override DoubleVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a DoubleVector, and both vectors are {@link #equals(DoubleVector, DoubleVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index e76a4e0c5fdee..24887bebcd838 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -52,9 +52,8 @@ public DoubleBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new DoubleLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index 97bf1675a9a37..e9d9a6b3fb958 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -100,6 +102,11 @@ public IntVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(int[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index fe89782bad0ec..df8298b87237e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -126,6 +128,11 @@ public IntVector filter(int... positions) { return new IntBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link IntArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 8f6f42b66fbe6..b1a2d1b80a410 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface IntVector extends Vector permits ConstantIntVector, IntA @Override IntVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 70bcf6919bea6..ae28fb9f6ffa6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -52,9 +52,8 @@ public IntBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new IntLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 4b504943b760a..5fa904dcf1acc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -90,6 +92,11 @@ public LongVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(long[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index d30dedd4cce16..a7828788169ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -86,6 +88,11 @@ public LongVector filter(int... positions) { return new LongBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link LongArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index 2ebdb89a31262..e2f53d1ee07f4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface LongVector extends Vector permits ConstantLongVector, Lo @Override LongVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a LongVector, and both vectors are {@link #equals(LongVector, LongVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index b6f1e8e77505d..01921e1195f4a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -52,9 +52,8 @@ public LongBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new LongLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index cfa1d3656ba3a..9a6b701a2e4ea 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -139,19 +139,19 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R * same number of {@link #getPositionCount() positions} as the {@code positions} * parameter. *

- * For example, this this block contained {@code [a, b, [b, c]]} + * For example, if this block contained {@code [a, b, [b, c]]} * and were called with the block {@code [0, 1, 1, [1, 2]]} then the * result would be {@code [a, b, b, [b, b, c]]}. *

*

* This process produces {@code count(this) * count(positions)} values per - * positions which could be quite quite large. Instead of returning a single + * positions which could be quite large. Instead of returning a single * Block, this returns an Iterator of Blocks containing all of the promised * values. *

*

- * The returned {@link ReleasableIterator} may retain a reference to {@link Block}s - * inside the {@link Page}. Close it to release those references. + * The returned {@link ReleasableIterator} may retain a reference to the + * {@code positions} parameter. Close it to release those references. *

*

* This block is built using the same {@link BlockFactory} as was used to diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java index 4deededdf41c5..a8a6dbaf382f9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -45,6 +47,12 @@ public ConstantNullVector filter(int... positions) { throw new UnsupportedOperationException("null vector"); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + @Override public boolean getBoolean(int position) { assert false : "null vector"; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index e5a0d934aa01a..da9ca2bbae270 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -52,7 +52,7 @@ public Block filter(int... positions) { @Override public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("can't lookup values from DocBlock"); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 067fddd311cc7..33f5797f60df8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.IntroSorter; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.util.Objects; @@ -235,6 +237,11 @@ public DocVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException("can't lookup values from DocVector"); + } + @Override public ElementType elementType() { return ElementType.DOC; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java index a67db54b68ec9..ec0c7efa715ad 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -120,6 +122,11 @@ public BytesRefVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return bytes.elementType(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 89b39569be454..9a5688685374d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -8,8 +8,10 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; /** * A dense Vector of single values. @@ -35,6 +37,33 @@ public interface Vector extends Accountable, RefCounted, Releasable { */ Vector filter(int... positions); + /** + * Builds an Iterator of new {@link Block}s with the same {@link #elementType} + * as this {@link Vector} whose values are copied from positions in this Vector. + * It has the same number of {@link #getPositionCount() positions} as the + * {@code positions} parameter. + *

+ * For example, if this vector contained {@code [a, b, c]} + * and were called with the block {@code [0, 1, 1, [1, 2]]} then the + * result would be {@code [a, b, b, [b, c]]}. + *

+ *

+ * This process produces {@code count(positions)} values per + * positions which could be quite large. Instead of returning a single + * Block, this returns an Iterator of Blocks containing all of the promised + * values. + *

+ *

+ * The returned {@link ReleasableIterator} may retain a reference to the + * {@code positions} parameter. Close it to release those references. + *

+ *

+ * This block is built using the same {@link BlockFactory} as was used to + * build the {@code positions} parameter. + *

+ */ + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * {@return the element type of this vector} */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 4afd8db62f848..d594d32898d36 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -12,7 +12,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -21,6 +23,8 @@ $else$ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -168,6 +172,11 @@ $endif$ } } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index d6a8723748c1f..30ef9e799cf11 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -10,8 +10,10 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.$Array$; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -148,6 +150,11 @@ $endif$ return new $Type$BigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link $if(boolean)$Bit$else$$Type$$endif$Array} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index 37cb2d2412522..42c34128121a8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -11,6 +11,8 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant $type$ value. @@ -58,6 +60,28 @@ $endif$ return blockFactory().newConstant$Type$Vector(value, positions.length); } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single(($Type$Block) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstant$Type$BlockWith(value, positions.getPositionCount())); + } + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + $if(int)$ /** * The minimum value in the block. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 746ccc97a2819..628ee93ed757d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -13,6 +13,8 @@ $endif$ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -54,6 +56,9 @@ $endif$ @Override $Type$Vector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + $if(int)$ /** * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index f011d6f2a4b48..8f4390e8782c5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -72,9 +72,8 @@ $endif$ } @Override - public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new $Type$Lookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 3d80e560cc4d2..017d4c7065bed 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -35,6 +35,7 @@ import java.util.BitSet; import java.util.List; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -283,8 +284,19 @@ public void testConstantIntBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantIntVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); assertThat(block.asVector().min(), equalTo(value)); assertThat(block.asVector().max(), equalTo(value)); @@ -365,8 +377,19 @@ public void testConstantLongBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantLongVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -447,8 +470,19 @@ public void testConstantDoubleBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantDoubleVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -605,8 +639,19 @@ public void testConstantBytesRefBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantBytesRefVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -689,8 +734,19 @@ public void testConstantBooleanBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantBooleanVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -716,6 +772,24 @@ public void testConstantNullBlock() { assertThat(positionCount, is(block.getPositionCount())); assertThat(block.getPositionCount(), is(positionCount)); assertThat(block.isNull(randomPosition(positionCount)), is(true)); + if (positionCount > 2) { + List> expected = new ArrayList<>(); + expected.add(null); + expected.add(null); + expected.add(null); + assertLookup( + block, + positions(blockFactory, 1, 2, new int[] { 1, 2 }), + expected, + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); + } + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); releaseAndAssertBreaker(block); } } @@ -1544,11 +1618,16 @@ static void assertEmptyLookup(BlockFactory blockFactory, Block block) { } static void assertLookup(Block block, IntBlock positions, List> expected) { + assertLookup(block, positions, expected, l -> {}); + } + + static void assertLookup(Block block, IntBlock positions, List> expected, Consumer extra) { try (positions; ReleasableIterator lookup = block.lookup(positions, ByteSizeValue.ofKb(100))) { assertThat(lookup.hasNext(), equalTo(true)); try (Block b = lookup.next()) { assertThat(valuesAtPositions(b, 0, b.getPositionCount()), equalTo(expected)); assertThat(b.blockFactory(), sameInstance(positions.blockFactory())); + extra.accept(b); } assertThat(lookup.hasNext(), equalTo(false)); } From 11de886346b02df8f23848bb31d29ad4b70f02f3 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Fri, 10 May 2024 14:49:25 -0400 Subject: [PATCH 116/117] [ES|QL] Add/Modify annotations for spatial and conditional functions for better doc generation (#107722) * annotation for spatial functions and conditional functions --- .../esql/functions/coalesce.asciidoc | 13 -- ...itional-functions-and-expressions.asciidoc | 8 +- .../esql/functions/description/case.asciidoc | 2 +- .../functions/description/greatest.asciidoc | 4 +- .../esql/functions/description/least.asciidoc | 2 +- .../description/st_contains.asciidoc | 2 +- .../description/st_disjoint.asciidoc | 2 +- .../description/st_intersects.asciidoc | 2 +- .../functions/description/st_within.asciidoc | 2 +- .../esql/functions/description/st_x.asciidoc | 2 +- .../esql/functions/description/st_y.asciidoc | 2 +- .../esql/functions/examples/case.asciidoc | 32 +++ .../esql/functions/examples/greatest.asciidoc | 13 ++ .../esql/functions/examples/least.asciidoc | 13 ++ .../esql/functions/examples/st_x.asciidoc | 13 ++ .../esql/functions/examples/st_y.asciidoc | 13 ++ .../esql/functions/greatest.asciidoc | 38 ---- .../functions/kibana/definition/case.json | 211 +++++++++++++++++- .../functions/kibana/definition/coalesce.json | 30 +-- .../functions/kibana/definition/greatest.json | 48 ++-- .../functions/kibana/definition/least.json | 47 ++-- .../kibana/definition/st_contains.json | 34 +-- .../kibana/definition/st_disjoint.json | 34 +-- .../kibana/definition/st_intersects.json | 34 +-- .../kibana/definition/st_within.json | 34 +-- .../functions/kibana/definition/st_x.json | 9 +- .../functions/kibana/definition/st_y.json | 9 +- .../esql/functions/kibana/docs/case.md | 16 +- .../esql/functions/kibana/docs/greatest.md | 8 +- .../esql/functions/kibana/docs/least.md | 6 +- .../esql/functions/kibana/docs/st_contains.md | 1 + .../esql/functions/kibana/docs/st_disjoint.md | 2 + .../functions/kibana/docs/st_intersects.md | 6 +- .../esql/functions/kibana/docs/st_within.md | 1 + .../esql/functions/kibana/docs/st_x.md | 7 +- .../esql/functions/kibana/docs/st_y.md | 7 +- .../esql/functions/layout/case.asciidoc | 1 + .../esql/functions/layout/greatest.asciidoc | 1 + .../esql/functions/layout/least.asciidoc | 1 + .../esql/functions/layout/st_x.asciidoc | 1 + .../esql/functions/layout/st_y.asciidoc | 1 + docs/reference/esql/functions/least.asciidoc | 38 ---- .../esql/functions/parameters/case.asciidoc | 4 +- .../functions/parameters/coalesce.asciidoc | 4 +- .../functions/parameters/greatest.asciidoc | 4 +- .../esql/functions/parameters/least.asciidoc | 4 +- .../functions/parameters/st_contains.asciidoc | 4 +- .../functions/parameters/st_disjoint.asciidoc | 4 +- .../parameters/st_intersects.asciidoc | 4 +- .../functions/parameters/st_within.asciidoc | 4 +- .../esql/functions/parameters/st_x.asciidoc | 2 +- .../esql/functions/parameters/st_y.asciidoc | 2 +- .../esql/functions/spatial-functions.asciidoc | 12 +- .../esql/functions/st_contains.asciidoc | 26 --- .../esql/functions/st_disjoint.asciidoc | 27 --- .../esql/functions/st_intersects.asciidoc | 31 --- .../esql/functions/st_within.asciidoc | 26 --- docs/reference/esql/functions/st_x.asciidoc | 33 --- docs/reference/esql/functions/st_y.asciidoc | 33 --- .../esql/functions/types/case.asciidoc | 12 +- .../src/main/resources/meta.csv-spec | 46 ++-- .../function/scalar/conditional/Case.java | 27 ++- .../function/scalar/conditional/Greatest.java | 23 +- .../function/scalar/conditional/Least.java | 21 +- .../function/scalar/nulls/Coalesce.java | 4 +- .../scalar/spatial/SpatialContains.java | 12 +- .../scalar/spatial/SpatialDisjoint.java | 13 +- .../scalar/spatial/SpatialIntersects.java | 19 +- .../scalar/spatial/SpatialWithin.java | 12 +- .../function/scalar/spatial/StX.java | 17 +- .../function/scalar/spatial/StY.java | 17 +- .../scalar/conditional/CaseTests.java | 185 +++++++++++++-- 72 files changed, 860 insertions(+), 522 deletions(-) delete mode 100644 docs/reference/esql/functions/coalesce.asciidoc create mode 100644 docs/reference/esql/functions/examples/case.asciidoc create mode 100644 docs/reference/esql/functions/examples/greatest.asciidoc create mode 100644 docs/reference/esql/functions/examples/least.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_x.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_y.asciidoc delete mode 100644 docs/reference/esql/functions/greatest.asciidoc delete mode 100644 docs/reference/esql/functions/least.asciidoc delete mode 100644 docs/reference/esql/functions/st_contains.asciidoc delete mode 100644 docs/reference/esql/functions/st_disjoint.asciidoc delete mode 100644 docs/reference/esql/functions/st_intersects.asciidoc delete mode 100644 docs/reference/esql/functions/st_within.asciidoc delete mode 100644 docs/reference/esql/functions/st_x.asciidoc delete mode 100644 docs/reference/esql/functions/st_y.asciidoc diff --git a/docs/reference/esql/functions/coalesce.asciidoc b/docs/reference/esql/functions/coalesce.asciidoc deleted file mode 100644 index 2d8c0f379c82e..0000000000000 --- a/docs/reference/esql/functions/coalesce.asciidoc +++ /dev/null @@ -1,13 +0,0 @@ -[discrete] -[[esql-coalesce]] -=== `COALESCE` - -*Syntax* - -[source,esql] ----- -COALESCE(expression1 [, ..., expressionN]) ----- -include::parameters/coalesce.asciidoc[] -include::description/coalesce.asciidoc[] -include::examples/coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc index d835a14856c03..081e3b8589dba 100644 --- a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc +++ b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc @@ -15,7 +15,7 @@ manner. {esql} supports these conditional functions: * <> // end::cond_list[] -include::case.asciidoc[] -include::coalesce.asciidoc[] -include::greatest.asciidoc[] -include::least.asciidoc[] +include::layout/case.asciidoc[] +include::layout/coalesce.asciidoc[] +include::layout/greatest.asciidoc[] +include::layout/least.asciidoc[] diff --git a/docs/reference/esql/functions/description/case.asciidoc b/docs/reference/esql/functions/description/case.asciidoc index 5c98a7a2620d0..c3e80301fbc31 100644 --- a/docs/reference/esql/functions/description/case.asciidoc +++ b/docs/reference/esql/functions/description/case.asciidoc @@ -2,4 +2,4 @@ *Description* -Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. diff --git a/docs/reference/esql/functions/description/greatest.asciidoc b/docs/reference/esql/functions/description/greatest.asciidoc index 3c7cfd3bfb14c..ed705d0bbb59e 100644 --- a/docs/reference/esql/functions/description/greatest.asciidoc +++ b/docs/reference/esql/functions/description/greatest.asciidoc @@ -2,4 +2,6 @@ *Description* -Returns the maximum value from many columns. +Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. + +NOTE: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/esql/functions/description/least.asciidoc b/docs/reference/esql/functions/description/least.asciidoc index 2aeb1f85aa51a..c5daf0bc79ae0 100644 --- a/docs/reference/esql/functions/description/least.asciidoc +++ b/docs/reference/esql/functions/description/least.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the minimum value from many columns. +Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc index 678fde7f5d98b..a2c81b9d24a10 100644 --- a/docs/reference/esql/functions/description/st_contains.asciidoc +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the first geometry contains the second geometry. +Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. diff --git a/docs/reference/esql/functions/description/st_disjoint.asciidoc b/docs/reference/esql/functions/description/st_disjoint.asciidoc index 95ab02a39614a..461dd61daef7a 100644 --- a/docs/reference/esql/functions/description/st_disjoint.asciidoc +++ b/docs/reference/esql/functions/description/st_disjoint.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the two geometries or geometry columns are disjoint. +Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index b736ba29a6c8b..48fd7bdb2f338 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the two geometries or geometry columns intersect. +Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc index 890f28cb769b0..38a34f518234a 100644 --- a/docs/reference/esql/functions/description/st_within.asciidoc +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the first geometry is within the second geometry. +Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. diff --git a/docs/reference/esql/functions/description/st_x.asciidoc b/docs/reference/esql/functions/description/st_x.asciidoc index beb077bea332c..33d867f862429 100644 --- a/docs/reference/esql/functions/description/st_x.asciidoc +++ b/docs/reference/esql/functions/description/st_x.asciidoc @@ -2,4 +2,4 @@ *Description* -Extracts the x-coordinate from a point geometry. +Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. diff --git a/docs/reference/esql/functions/description/st_y.asciidoc b/docs/reference/esql/functions/description/st_y.asciidoc index 19c371d2ef931..b03956a51e1a6 100644 --- a/docs/reference/esql/functions/description/st_y.asciidoc +++ b/docs/reference/esql/functions/description/st_y.asciidoc @@ -2,4 +2,4 @@ *Description* -Extracts the y-coordinate from a point geometry. +Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. diff --git a/docs/reference/esql/functions/examples/case.asciidoc b/docs/reference/esql/functions/examples/case.asciidoc new file mode 100644 index 0000000000000..c5c766512ce0b --- /dev/null +++ b/docs/reference/esql/functions/examples/case.asciidoc @@ -0,0 +1,32 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +Determine whether employees are monolingual, bilingual, or polyglot: +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=case] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=case-result] +|=== +Calculate the total connection success rate based on log messages: +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate-result] +|=== +Calculate an hourly error rate as a percentage of the total number of log messages: +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] +|=== + diff --git a/docs/reference/esql/functions/examples/greatest.asciidoc b/docs/reference/esql/functions/examples/greatest.asciidoc new file mode 100644 index 0000000000000..bd89ad1b3cdd1 --- /dev/null +++ b/docs/reference/esql/functions/examples/greatest.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=greatest] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=greatest-result] +|=== + diff --git a/docs/reference/esql/functions/examples/least.asciidoc b/docs/reference/esql/functions/examples/least.asciidoc new file mode 100644 index 0000000000000..67fc5260f6391 --- /dev/null +++ b/docs/reference/esql/functions/examples/least.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=least] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=least-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_x.asciidoc b/docs/reference/esql/functions/examples/st_x.asciidoc new file mode 100644 index 0000000000000..895e76c6c04e2 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_x.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_x_y] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_y.asciidoc b/docs/reference/esql/functions/examples/st_y.asciidoc new file mode 100644 index 0000000000000..895e76c6c04e2 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_y.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_x_y] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] +|=== + diff --git a/docs/reference/esql/functions/greatest.asciidoc b/docs/reference/esql/functions/greatest.asciidoc deleted file mode 100644 index 003f1f46e6db5..0000000000000 --- a/docs/reference/esql/functions/greatest.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[discrete] -[[esql-greatest]] -=== `GREATEST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/greatest.svg[Embedded,opts=inline] - -*Parameters* - -`first`:: -First of the columns to evaluate. - -`rest`:: -The rest of the columns to evaluate. - -*Description* - -Returns the maximum value from multiple columns. This is similar to <> -except it is intended to run on multiple columns at once. - -NOTE: When run on `keyword` or `text` fields, this returns the last string - in alphabetical order. When run on `boolean` columns this will return - `true` if any values are `true`. - -include::types/greatest.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=greatest] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=greatest-result] -|=== diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index 73bc215ac6ade..5959eed62d37b 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "case", - "description" : "Accepts pairs of conditions and values.\nThe function returns the value that belongs to the first condition that evaluates to true.", + "description" : "Accepts pairs of conditions and values. The function returns the value that\nbelongs to the first condition that evaluates to `true`.\n\nIf the number of arguments is odd, the last argument is the default value which\nis returned when no condition matches. If the number of arguments is even, and\nno condition matches, the function returns `null`.", "signatures" : [ { "params" : [ @@ -10,23 +10,226 @@ "name" : "condition", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "boolean", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "cartesian_point", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "datetime", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "double", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "geo_point", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "integer", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "ip", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." }, { "name" : "trueValue", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." }, { "name" : "falseValue", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." } ], "variadic" : true, "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "long", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "text", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "unsigned_long", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "version", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "version" } + ], + "examples" : [ + "FROM employees\n| EVAL type = CASE(\n languages <= 1, \"monolingual\",\n languages <= 2, \"bilingual\",\n \"polyglot\")\n| KEEP emp_no, languages, type", + "FROM sample_data\n| EVAL successful = CASE(\n STARTS_WITH(message, \"Connected to\"), 1,\n message == \"Connection error\", 0\n )\n| STATS success_rate = AVG(successful)", + "FROM sample_data\n| EVAL error = CASE(message LIKE \"*error*\", 1, 0)\n| EVAL hour = DATE_TRUNC(1 hour, @timestamp)\n| STATS error_rate = AVG(error) by hour\n| SORT hour" ] } diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index 87feead06d091..1081b42839577 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -10,7 +10,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -22,13 +22,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -40,7 +40,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -52,13 +52,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -70,7 +70,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -82,13 +82,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -100,7 +100,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -112,13 +112,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -130,7 +130,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -142,13 +142,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index f72f54708c6b1..15c9f58d32d3e 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -2,7 +2,8 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "greatest", - "description" : "Returns the maximum value from many columns.", + "description" : "Returns the maximum value from multiple columns. This is similar to <>\nexcept it is intended to run on multiple columns at once.", + "note" : "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`.", "signatures" : [ { "params" : [ @@ -10,7 +11,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -22,13 +23,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -40,13 +41,13 @@ "name" : "first", "type" : "double", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "double", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -58,7 +59,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -70,13 +71,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -88,13 +89,13 @@ "name" : "first", "type" : "ip", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "ip", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -106,7 +107,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -118,13 +119,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -136,7 +137,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -148,13 +149,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -166,7 +167,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -178,13 +179,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -196,17 +197,20 @@ "name" : "first", "type" : "version", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "version", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, "returnType" : "version" } + ], + "examples" : [ + "ROW a = 10, b = 20\n| EVAL g = GREATEST(a, b)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 66efedc0c9fe5..0b922ad6ad3c2 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "least", - "description" : "Returns the minimum value from many columns.", + "description" : "Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -22,13 +22,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -40,13 +40,13 @@ "name" : "first", "type" : "double", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "double", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -58,7 +58,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -70,13 +70,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -88,13 +88,13 @@ "name" : "first", "type" : "ip", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "ip", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -106,7 +106,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -118,13 +118,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -136,7 +136,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -148,13 +148,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -166,7 +166,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -178,13 +178,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -196,17 +196,20 @@ "name" : "first", "type" : "version", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "version", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, "returnType" : "version" } + ], + "examples" : [ + "ROW a = 10, b = 20\n| EVAL l = LEAST(a, b)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json index f4f8003917908..1ef76e46f371a 100644 --- a/docs/reference/esql/functions/kibana/definition/st_contains.json +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_contains", - "description" : "Returns whether the first geometry contains the second geometry.", + "description" : "Returns whether the first geometry contains the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json index 98647b63ff18f..e408a0f98fe6c 100644 --- a/docs/reference/esql/functions/kibana/definition/st_disjoint.json +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_disjoint", - "description" : "Returns whether the two geometries or geometry columns are disjoint.", + "description" : "Returns whether the two geometries or geometry columns are disjoint.\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json index ba619fe57ecf5..2f9f255ab1870 100644 --- a/docs/reference/esql/functions/kibana/definition/st_intersects.json +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_intersects", - "description" : "Returns whether the two geometries or geometry columns intersect.", + "description" : "Returns true if two geometries intersect.\nThey intersect if they have any point in common, including their interior points\n(points along lines or within polygons).\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json index ee98337441ab7..e0cdf62fe0f98 100644 --- a/docs/reference/esql/functions/kibana/definition/st_within.json +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_within", - "description" : "Returns whether the first geometry is within the second geometry.", + "description" : "Returns whether the first geometry is within the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json index 57598b3470e11..c3554a2ee808b 100644 --- a/docs/reference/esql/functions/kibana/definition/st_x.json +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_x", - "description" : "Extracts the x-coordinate from a point geometry.", + "description" : "Extracts the `x` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "point", "type" : "cartesian_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, @@ -22,11 +22,14 @@ "name" : "point", "type" : "geo_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, "returnType" : "double" } + ], + "examples" : [ + "ROW point = TO_GEOPOINT(\"POINT(42.97109629958868 14.7552534006536)\")\n| EVAL x = ST_X(point), y = ST_Y(point)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json index 0dacaa56bb8de..2966ae04f75e4 100644 --- a/docs/reference/esql/functions/kibana/definition/st_y.json +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_y", - "description" : "Extracts the y-coordinate from a point geometry.", + "description" : "Extracts the `y` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "point", "type" : "cartesian_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, @@ -22,11 +22,14 @@ "name" : "point", "type" : "geo_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, "returnType" : "double" } + ], + "examples" : [ + "ROW point = TO_GEOPOINT(\"POINT(42.97109629958868 14.7552534006536)\")\n| EVAL x = ST_X(point), y = ST_Y(point)" ] } diff --git a/docs/reference/esql/functions/kibana/docs/case.md b/docs/reference/esql/functions/kibana/docs/case.md index e1494a5c2af8c..8bb31ee972759 100644 --- a/docs/reference/esql/functions/kibana/docs/case.md +++ b/docs/reference/esql/functions/kibana/docs/case.md @@ -3,6 +3,18 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### CASE -Accepts pairs of conditions and values. -The function returns the value that belongs to the first condition that evaluates to true. +Accepts pairs of conditions and values. The function returns the value that +belongs to the first condition that evaluates to `true`. +If the number of arguments is odd, the last argument is the default value which +is returned when no condition matches. If the number of arguments is even, and +no condition matches, the function returns `null`. + +``` +FROM employees +| EVAL type = CASE( + languages <= 1, "monolingual", + languages <= 2, "bilingual", + "polyglot") +| KEEP emp_no, languages, type +``` diff --git a/docs/reference/esql/functions/kibana/docs/greatest.md b/docs/reference/esql/functions/kibana/docs/greatest.md index 3db0c9ed87aa5..4b3b4027381f8 100644 --- a/docs/reference/esql/functions/kibana/docs/greatest.md +++ b/docs/reference/esql/functions/kibana/docs/greatest.md @@ -3,5 +3,11 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### GREATEST -Returns the maximum value from many columns. +Returns the maximum value from multiple columns. This is similar to <> +except it is intended to run on multiple columns at once. +``` +ROW a = 10, b = 20 +| EVAL g = GREATEST(a, b) +``` +Note: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/esql/functions/kibana/docs/least.md b/docs/reference/esql/functions/kibana/docs/least.md index ff2c19592c8e1..7bbbcf79bc374 100644 --- a/docs/reference/esql/functions/kibana/docs/least.md +++ b/docs/reference/esql/functions/kibana/docs/least.md @@ -3,5 +3,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### LEAST -Returns the minimum value from many columns. +Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. +``` +ROW a = 10, b = 20 +| EVAL l = LEAST(a, b) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_contains.md b/docs/reference/esql/functions/kibana/docs/st_contains.md index 6e23bb9b0f116..99f3a19f9df41 100644 --- a/docs/reference/esql/functions/kibana/docs/st_contains.md +++ b/docs/reference/esql/functions/kibana/docs/st_contains.md @@ -4,6 +4,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_CONTAINS Returns whether the first geometry contains the second geometry. +This is the inverse of the <> function. ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_disjoint.md b/docs/reference/esql/functions/kibana/docs/st_disjoint.md index 7cf66b168bd70..4b42954efa5c1 100644 --- a/docs/reference/esql/functions/kibana/docs/st_disjoint.md +++ b/docs/reference/esql/functions/kibana/docs/st_disjoint.md @@ -4,6 +4,8 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_DISJOINT Returns whether the two geometries or geometry columns are disjoint. +This is the inverse of the <> function. +In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_intersects.md b/docs/reference/esql/functions/kibana/docs/st_intersects.md index e4db33429dbe3..b0a58b3ab2357 100644 --- a/docs/reference/esql/functions/kibana/docs/st_intersects.md +++ b/docs/reference/esql/functions/kibana/docs/st_intersects.md @@ -3,7 +3,11 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_INTERSECTS -Returns whether the two geometries or geometry columns intersect. +Returns true if two geometries intersect. +They intersect if they have any point in common, including their interior points +(points along lines or within polygons). +This is the inverse of the <> function. +In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ ``` FROM airports diff --git a/docs/reference/esql/functions/kibana/docs/st_within.md b/docs/reference/esql/functions/kibana/docs/st_within.md index cbb3ae5ee9aca..9ef046e5006f6 100644 --- a/docs/reference/esql/functions/kibana/docs/st_within.md +++ b/docs/reference/esql/functions/kibana/docs/st_within.md @@ -4,6 +4,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_WITHIN Returns whether the first geometry is within the second geometry. +This is the inverse of the <> function. ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_x.md b/docs/reference/esql/functions/kibana/docs/st_x.md index af2f4de1487cd..b113f19e1c76c 100644 --- a/docs/reference/esql/functions/kibana/docs/st_x.md +++ b/docs/reference/esql/functions/kibana/docs/st_x.md @@ -3,5 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_X -Extracts the x-coordinate from a point geometry. +Extracts the `x` coordinate from the supplied point. +If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. +``` +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| EVAL x = ST_X(point), y = ST_Y(point) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_y.md b/docs/reference/esql/functions/kibana/docs/st_y.md index 575a5bd3c7d33..db88c3ada63bb 100644 --- a/docs/reference/esql/functions/kibana/docs/st_y.md +++ b/docs/reference/esql/functions/kibana/docs/st_y.md @@ -3,5 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_Y -Extracts the y-coordinate from a point geometry. +Extracts the `y` coordinate from the supplied point. +If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. +``` +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| EVAL x = ST_X(point), y = ST_Y(point) +``` diff --git a/docs/reference/esql/functions/layout/case.asciidoc b/docs/reference/esql/functions/layout/case.asciidoc index 192e74522b8d3..edfc768dc7055 100644 --- a/docs/reference/esql/functions/layout/case.asciidoc +++ b/docs/reference/esql/functions/layout/case.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/case.svg[Embedded,opts=inline] include::../parameters/case.asciidoc[] include::../description/case.asciidoc[] include::../types/case.asciidoc[] +include::../examples/case.asciidoc[] diff --git a/docs/reference/esql/functions/layout/greatest.asciidoc b/docs/reference/esql/functions/layout/greatest.asciidoc index 1ff17f3c3adfe..fff9a32412947 100644 --- a/docs/reference/esql/functions/layout/greatest.asciidoc +++ b/docs/reference/esql/functions/layout/greatest.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/greatest.svg[Embedded,opts=inline] include::../parameters/greatest.asciidoc[] include::../description/greatest.asciidoc[] include::../types/greatest.asciidoc[] +include::../examples/greatest.asciidoc[] diff --git a/docs/reference/esql/functions/layout/least.asciidoc b/docs/reference/esql/functions/layout/least.asciidoc index a14a166c8bfe4..0daee9c181a65 100644 --- a/docs/reference/esql/functions/layout/least.asciidoc +++ b/docs/reference/esql/functions/layout/least.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/least.svg[Embedded,opts=inline] include::../parameters/least.asciidoc[] include::../description/least.asciidoc[] include::../types/least.asciidoc[] +include::../examples/least.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_x.asciidoc b/docs/reference/esql/functions/layout/st_x.asciidoc index ce3824aa157b1..2c2dc191a31a4 100644 --- a/docs/reference/esql/functions/layout/st_x.asciidoc +++ b/docs/reference/esql/functions/layout/st_x.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_x.svg[Embedded,opts=inline] include::../parameters/st_x.asciidoc[] include::../description/st_x.asciidoc[] include::../types/st_x.asciidoc[] +include::../examples/st_x.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_y.asciidoc b/docs/reference/esql/functions/layout/st_y.asciidoc index 702e9097ae689..0708465760bb3 100644 --- a/docs/reference/esql/functions/layout/st_y.asciidoc +++ b/docs/reference/esql/functions/layout/st_y.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_y.svg[Embedded,opts=inline] include::../parameters/st_y.asciidoc[] include::../description/st_y.asciidoc[] include::../types/st_y.asciidoc[] +include::../examples/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/least.asciidoc b/docs/reference/esql/functions/least.asciidoc deleted file mode 100644 index 2860eb31090c4..0000000000000 --- a/docs/reference/esql/functions/least.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[discrete] -[[esql-least]] -=== `LEAST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/least.svg[Embedded,opts=inline] - -*Parameters* - -`first`:: -First of the columns to evaluate. - -`rest`:: -The rest of the columns to evaluate. - -*Description* - -Returns the minimum value from multiple columns. This is similar to -<> except it is intended to run on multiple columns at once. - -NOTE: When run on `keyword` or `text` fields, this returns the first string - in alphabetical order. When run on `boolean` columns this will return - `false` if any values are `false`. - -include::types/least.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=least] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=least-result] -|=== diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc index c3617b7c0e32c..ee6f7e499b3b3 100644 --- a/docs/reference/esql/functions/parameters/case.asciidoc +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -3,7 +3,7 @@ *Parameters* `condition`:: - +A condition. `trueValue`:: - +The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches. diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc index 9b62a2e7e0d87..e0860c5bc3030 100644 --- a/docs/reference/esql/functions/parameters/coalesce.asciidoc +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: -Expression to evaluate +Expression to evaluate. `rest`:: -Other expression to evaluate +Other expression to evaluate. diff --git a/docs/reference/esql/functions/parameters/greatest.asciidoc b/docs/reference/esql/functions/parameters/greatest.asciidoc index 83ac29d0bf7c9..8d23101aba7f3 100644 --- a/docs/reference/esql/functions/parameters/greatest.asciidoc +++ b/docs/reference/esql/functions/parameters/greatest.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: - +First of the columns to evaluate. `rest`:: - +The rest of the columns to evaluate. diff --git a/docs/reference/esql/functions/parameters/least.asciidoc b/docs/reference/esql/functions/parameters/least.asciidoc index 83ac29d0bf7c9..8d23101aba7f3 100644 --- a/docs/reference/esql/functions/parameters/least.asciidoc +++ b/docs/reference/esql/functions/parameters/least.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: - +First of the columns to evaluate. `rest`:: - +The rest of the columns to evaluate. diff --git a/docs/reference/esql/functions/parameters/st_contains.asciidoc b/docs/reference/esql/functions/parameters/st_contains.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_contains.asciidoc +++ b/docs/reference/esql/functions/parameters/st_contains.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc +++ b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_intersects.asciidoc b/docs/reference/esql/functions/parameters/st_intersects.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_intersects.asciidoc +++ b/docs/reference/esql/functions/parameters/st_intersects.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_within.asciidoc b/docs/reference/esql/functions/parameters/st_within.asciidoc index e87a0d0eb94f0..2f969f0f3cf05 100644 --- a/docs/reference/esql/functions/parameters/st_within.asciidoc +++ b/docs/reference/esql/functions/parameters/st_within.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_x.asciidoc b/docs/reference/esql/functions/parameters/st_x.asciidoc index 4e8e77dea1f86..b66bfc286a443 100644 --- a/docs/reference/esql/functions/parameters/st_x.asciidoc +++ b/docs/reference/esql/functions/parameters/st_x.asciidoc @@ -3,4 +3,4 @@ *Parameters* `point`:: - +Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/st_y.asciidoc b/docs/reference/esql/functions/parameters/st_y.asciidoc index 4e8e77dea1f86..b66bfc286a443 100644 --- a/docs/reference/esql/functions/parameters/st_y.asciidoc +++ b/docs/reference/esql/functions/parameters/st_y.asciidoc @@ -3,4 +3,4 @@ *Parameters* `point`:: - +Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index b6d178ddd624d..d143681fcf2f2 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -16,9 +16,9 @@ * experimental:[] <> // end::spatial_list[] -include::st_intersects.asciidoc[] -include::st_disjoint.asciidoc[] -include::st_contains.asciidoc[] -include::st_within.asciidoc[] -include::st_x.asciidoc[] -include::st_y.asciidoc[] +include::layout/st_intersects.asciidoc[] +include::layout/st_disjoint.asciidoc[] +include::layout/st_contains.asciidoc[] +include::layout/st_within.asciidoc[] +include::layout/st_x.asciidoc[] +include::layout/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc deleted file mode 100644 index 110c4fe4ca9ec..0000000000000 --- a/docs/reference/esql/functions/st_contains.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[discrete] -[[esql-st_contains]] -=== `ST_CONTAINS` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_contains.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_contains.asciidoc[] -This is the inverse of the <> function. - -include::types/st_contains.asciidoc[] -include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_disjoint.asciidoc b/docs/reference/esql/functions/st_disjoint.asciidoc deleted file mode 100644 index db89ca186a0ff..0000000000000 --- a/docs/reference/esql/functions/st_disjoint.asciidoc +++ /dev/null @@ -1,27 +0,0 @@ -[discrete] -[[esql-st_disjoint]] -=== `ST_DISJOINT` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_disjoint.asciidoc[] -This is the inverse of the <> function. -In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ - -include::types/st_disjoint.asciidoc[] -include::examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc deleted file mode 100644 index d75a7f3a50e0f..0000000000000 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-st_intersects]] -=== `ST_INTERSECTS` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -*Description* - -Returns true if two geometries intersect. -They intersect if they have any point in common, including their interior points -(points along lines or within polygons). -This is the inverse of the <> function. -In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ - -include::types/st_intersects.asciidoc[] -include::examples/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc deleted file mode 100644 index 0f0190a9de638..0000000000000 --- a/docs/reference/esql/functions/st_within.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[discrete] -[[esql-st_within]] -=== `ST_WITHIN` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_within.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_within.asciidoc[] -This is the inverse of the <> function. - -include::types/st_within.asciidoc[] -include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/st_x.asciidoc b/docs/reference/esql/functions/st_x.asciidoc deleted file mode 100644 index eec48894b5150..0000000000000 --- a/docs/reference/esql/functions/st_x.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-st_x]] -=== `ST_X` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_x.svg[Embedded,opts=inline] - -*Parameters* - -`point`:: -Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. - -*Description* - -Extracts the `x` coordinate from the supplied point. -If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. - -include::types/st_x.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_x_y] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] -|=== diff --git a/docs/reference/esql/functions/st_y.asciidoc b/docs/reference/esql/functions/st_y.asciidoc deleted file mode 100644 index 8fc7281e395d2..0000000000000 --- a/docs/reference/esql/functions/st_y.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-st_y]] -=== `ST_Y` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_y.svg[Embedded,opts=inline] - -*Parameters* - -`point`:: -Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. - -*Description* - -Extracts the `y` coordinate from the supplied point. -If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. - -include::types/st_y.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_x_y] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] -|=== diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index e7d627ab915a1..85e4193b5bf2f 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -5,5 +5,15 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== condition | trueValue | result -keyword +boolean | boolean | boolean +boolean | cartesian_point | cartesian_point +boolean | datetime | datetime +boolean | double | double +boolean | geo_point | geo_point +boolean | integer | integer +boolean | ip | ip +boolean | long | long +boolean | text | text +boolean | unsigned_long | unsigned_long +boolean | version | version |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 1d523640731d7..bd52d3b26b336 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -28,8 +28,8 @@ double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" "keyword from_base64(string:keyword|text)" -"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" -"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" +"boolean|double|integer|ip|keyword|long|text|version greatest(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" +"boolean|double|integer|ip|keyword|long|text|version least(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" "keyword left(string:keyword|text, length:integer)" "integer length(string:keyword|text)" "integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" @@ -123,10 +123,10 @@ atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsign avg |number |"double|integer|long" |[""] bin |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |[Numeric or date expression from which to derive buckets., Target number of buckets., Start of the range. Can be a number or a date expressed as a string., End of the range. Can be a number or a date expressed as a string.] bucket |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |[Numeric or date expression from which to derive buckets., Target number of buckets., Start of the range. Can be a number or a date expressed as a string., End of the range. Can be a number or a date expressed as a string.] -case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] +case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |[A condition., The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches.] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., CIDR block to test the IP against.] -coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate +coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate. concat |[string1, string2] |["keyword|text", "keyword|text"] |[Strings to concatenate., Strings to concatenate.] cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. cosh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. @@ -141,8 +141,8 @@ e |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] floor |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. from_base64 |string |"keyword|text" |A base64 string. -greatest |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] -least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] +greatest |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. +least |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |String expression. If `null`, the function returns `null`. locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] @@ -180,12 +180,12 @@ sinh |angle |"double|integer|long|unsigne split |[string, delim] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., Delimiter. Only single byte delimiters are currently supported.] sqrt |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." st_centroid_ag|field |"geo_point|cartesian_point" |[""] -st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_x |point |"geo_point|cartesian_point" |[""] -st_y |point |"geo_point|cartesian_point" |[""] +st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_x |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. +st_y |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. starts_with |[str, prefix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] substring |[string, start, length] |["keyword|text", integer, integer] |[String expression. If `null`\, the function returns `null`., Start position., Length of the substring from the start position. Optional; if omitted\, all positions after `start` are returned.] sum |number |"double|integer|long" |[""] @@ -237,7 +237,7 @@ atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the avg |The average of a numeric field. bin |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. bucket |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. -case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. ceil |Round a number up to the nearest integer. cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. coalesce |Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. @@ -255,8 +255,8 @@ e |Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. ends_with |Returns a boolean that indicates whether a keyword string ends with another string. floor |Round a number down to the nearest integer. from_base64 |Decode a base64 string. -greatest |Returns the maximum value from many columns. -least |Returns the minimum value from many columns. +greatest |Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. +least |Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. length |Returns the character length of a string. locate |Returns an integer that indicates the position of a keyword substring within another string @@ -294,12 +294,12 @@ sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. The input can be any numeric value, the return value is always a double. Square roots of negative numbers and infinites are null. st_centroid_ag|The centroid of a spatial field. -st_contains |Returns whether the first geometry contains the second geometry. -st_disjoint |Returns whether the two geometries or geometry columns are disjoint. -st_intersects |Returns whether the two geometries or geometry columns intersect. -st_within |Returns whether the first geometry is within the second geometry. -st_x |Extracts the x-coordinate from a point geometry. -st_y |Extracts the y-coordinate from a point geometry. +st_contains |Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. +st_disjoint |Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ +st_intersects |Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ +st_within |Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. +st_x |Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. +st_y |Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. starts_with |Returns a boolean that indicates whether a keyword string starts with another string. substring |Returns a substring of a string, specified by a start position and an optional length sum |The sum of a numeric field. @@ -370,8 +370,8 @@ e |double ends_with |boolean |[false, false] |false |false floor |"double|integer|long|unsigned_long" |false |false |false from_base64 |keyword |false |false |false -greatest |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false -least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false +greatest |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false +least |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false left |keyword |[false, false] |false |false length |integer |false |false |false locate |integer |[false, false, true] |false |false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index f00e69ddaabe4..1018a03762cce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -59,12 +60,28 @@ record Condition(Expression condition, Expression value) {} "unsigned_long", "version" }, description = """ - Accepts pairs of conditions and values. - The function returns the value that belongs to the first condition that evaluates to true.""" + Accepts pairs of conditions and values. The function returns the value that + belongs to the first condition that evaluates to `true`. + + If the number of arguments is odd, the last argument is the default value which + is returned when no condition matches. If the number of arguments is even, and + no condition matches, the function returns `null`.""", + examples = { + @Example(description = "Determine whether employees are monolingual, bilingual, or polyglot:", file = "docs", tag = "case"), + @Example( + description = "Calculate the total connection success rate based on log messages:", + file = "conditional", + tag = "docsCaseSuccessRate" + ), + @Example( + description = "Calculate an hourly error rate as a percentage of the total number of log messages:", + file = "conditional", + tag = "docsCaseHourlyErrorRate" + ) } ) public Case( Source source, - @Param(name = "condition", type = { "boolean" }) Expression first, + @Param(name = "condition", type = { "boolean" }, description = "A condition.") Expression first, @Param( name = "trueValue", type = { @@ -79,7 +96,9 @@ public Case( "long", "text", "unsigned_long", - "version" } + "version" }, + description = "The value that's returned when the corresponding condition is the first to evaluate to `true`. " + + "The default value is returned when no condition matches." ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 1794258402aed..b1c761a50d8be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -37,14 +38,26 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, - description = "Returns the maximum value from many columns." + returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Returns the maximum value from multiple columns. This is similar to <>\n" + + "except it is intended to run on multiple columns at once.", + note = "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. " + + "When run on `boolean` columns this will return `true` if any values are `true`.", + examples = @Example(file = "math", tag = "greatest") ) public Greatest( Source source, - @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< - Expression> rest + @Param( + name = "first", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "First of the columns to evaluate." + ) Expression first, + @Param( + name = "rest", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "The rest of the columns to evaluate.", + optional = true + ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 6b4208f7b3d85..8b68196af68a5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -37,14 +38,24 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, - description = "Returns the minimum value from many columns." + returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Returns the minimum value from multiple columns. " + + "This is similar to <> except it is intended to run on multiple columns at once.", + examples = @Example(file = "math", tag = "least") ) public Least( Source source, - @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< - Expression> rest + @Param( + name = "first", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "First of the columns to evaluate." + ) Expression first, + @Param( + name = "rest", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "The rest of the columns to evaluate.", + optional = true + ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 98dc0c7e83d93..8c39a29f67f95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -52,12 +52,12 @@ public Coalesce( @Param( name = "first", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Expression to evaluate" + description = "Expression to evaluate." ) Expression first, @Param( name = "rest", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Other expression to evaluate", + description = "Other expression to evaluate.", optional = true ) List rest ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 279f31e34ac95..31e0a86a1e3ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -111,7 +111,9 @@ private boolean pointRelatesGeometries(long encoded, Component2D[] rightComponen @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the first geometry contains the second geometry.", + description = """ + Returns whether the first geometry contains the second geometry. + This is the inverse of the <> function.""", examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") ) public SpatialContains( @@ -119,12 +121,16 @@ public SpatialContains( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java index 7833f93b6270f..7b85ebfea5ee2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -65,7 +65,10 @@ public class SpatialDisjoint extends SpatialRelatesFunction { @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the two geometries or geometry columns are disjoint.", + description = """ + Returns whether the two geometries or geometry columns are disjoint. + This is the inverse of the <> function. + In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅""", examples = @Example(file = "spatial_shapes", tag = "st_disjoint-airport_city_boundaries") ) public SpatialDisjoint( @@ -73,12 +76,16 @@ public SpatialDisjoint( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 810e3206ada73..462f3bce1aeea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -63,22 +63,27 @@ public class SpatialIntersects extends SpatialRelatesFunction { new CartesianShapeIndexer("ST_Intersects") ); - @FunctionInfo( - returnType = { "boolean" }, - description = "Returns whether the two geometries or geometry columns intersect.", - examples = @Example(file = "spatial", tag = "st_intersects-airports") - ) + @FunctionInfo(returnType = { "boolean" }, description = """ + Returns true if two geometries intersect. + They intersect if they have any point in common, including their interior points + (points along lines or within polygons). + This is the inverse of the <> function. + In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅""", examples = @Example(file = "spatial", tag = "st_intersects-airports")) public SpatialIntersects( Source source, @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index ca285ca07e27b..1eaf1e31e5430 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -66,7 +66,9 @@ public class SpatialWithin extends SpatialRelatesFunction implements SurrogateEx @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the first geometry is within the second geometry.", + description = """ + Returns whether the first geometry is within the second geometry. + This is the inverse of the <> function.""", examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") ) public SpatialWithin( @@ -74,12 +76,16 @@ public SpatialWithin( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java index f86be9290fed1..f5ff933babc9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -34,8 +35,20 @@ * Alternatively it is well described in PostGIS documentation at PostGIS:ST_X. */ public class StX extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Extracts the x-coordinate from a point geometry.") - public StX(Source source, @Param(name = "point", type = { "geo_point", "cartesian_point" }) Expression field) { + @FunctionInfo( + returnType = "double", + description = "Extracts the `x` coordinate from the supplied point.\n" + + "If the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", + examples = @Example(file = "spatial", tag = "st_x_y") + ) + public StX( + Source source, + @Param( + name = "point", + type = { "geo_point", "cartesian_point" }, + description = "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java index 759c23c73374a..48de97da4befb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -34,8 +35,20 @@ * Alternatively it is well described in PostGIS documentation at PostGIS:ST_Y. */ public class StY extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Extracts the y-coordinate from a point geometry.") - public StY(Source source, @Param(name = "point", type = { "geo_point", "cartesian_point" }) Expression field) { + @FunctionInfo( + returnType = "double", + description = "Extracts the `y` coordinate from the supplied point.\n" + + "If the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", + examples = @Example(file = "spatial", tag = "st_x_y") + ) + public StY( + Source source, + @Param( + name = "point", + type = { "geo_point", "cartesian_point" }, + description = "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 90692d5b19df1..ee23cf00a37a0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.math.BigInteger; import java.util.List; import java.util.function.Function; import java.util.function.Supplier; @@ -32,6 +33,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; public class CaseTests extends AbstractFunctionTestCase { @@ -44,26 +46,173 @@ public CaseTests(@Name("TestCase") Supplier testCaseS */ @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("basics", () -> { - List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), - new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") - ); - return new TestCaseSupplier.TestCase( - typedData, - "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " - + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", - DataTypes.KEYWORD, - equalTo(new BytesRef("a")) - ); - }))); + return parameterSuppliersFromTypedData( + List.of(new TestCaseSupplier("keyword", List.of(DataTypes.BOOLEAN, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), + new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(new BytesRef("a")) + ); + }), new TestCaseSupplier("text", List.of(DataTypes.BOOLEAN, DataTypes.TEXT), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.TEXT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.TEXT, + nullValue() + ); + }), new TestCaseSupplier("boolean", List.of(DataTypes.BOOLEAN, DataTypes.BOOLEAN), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BOOLEAN, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.BOOLEAN, + nullValue() + ); + }), new TestCaseSupplier("date", List.of(DataTypes.BOOLEAN, DataTypes.DATETIME), () -> { + long value = randomNonNegativeLong(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.DATETIME, + equalTo(value) + ); + }), new TestCaseSupplier("double", List.of(DataTypes.BOOLEAN, DataTypes.DOUBLE), () -> { + double value = randomDouble(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.DOUBLE, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=DOUBLE, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.DOUBLE, + equalTo(value) + ); + }), new TestCaseSupplier("integer", List.of(DataTypes.BOOLEAN, DataTypes.INTEGER), () -> { + int value = randomInt(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.INTEGER, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=INT, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.INTEGER, + nullValue() + ); + }), new TestCaseSupplier("long", List.of(DataTypes.BOOLEAN, DataTypes.LONG), () -> { + long value = randomLong(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.LONG, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.LONG, + nullValue() + ); + }), new TestCaseSupplier("unsigned_long", List.of(DataTypes.BOOLEAN, DataTypes.UNSIGNED_LONG), () -> { + BigInteger value = randomUnsignedLongBetween(BigInteger.ZERO, UNSIGNED_LONG_MAX); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.UNSIGNED_LONG, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.UNSIGNED_LONG, + equalTo(value) + ); + }), new TestCaseSupplier("ip", List.of(DataTypes.BOOLEAN, DataTypes.IP), () -> { + BytesRef value = (BytesRef) randomLiteral(DataTypes.IP).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.IP, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.IP, + equalTo(value) + ); + }), new TestCaseSupplier("version", List.of(DataTypes.BOOLEAN, DataTypes.VERSION), () -> { + BytesRef value = (BytesRef) randomLiteral(DataTypes.VERSION).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.VERSION, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.VERSION, + nullValue() + ); + }), new TestCaseSupplier("cartesian_point", List.of(DataTypes.BOOLEAN, EsqlDataTypes.CARTESIAN_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(EsqlDataTypes.CARTESIAN_POINT).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, EsqlDataTypes.CARTESIAN_POINT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + EsqlDataTypes.CARTESIAN_POINT, + nullValue() + ); + }), new TestCaseSupplier("geo_point", List.of(DataTypes.BOOLEAN, EsqlDataTypes.GEO_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(EsqlDataTypes.GEO_POINT).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, EsqlDataTypes.GEO_POINT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + EsqlDataTypes.GEO_POINT, + equalTo(value) + ); + })) + ); } @Override protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { if (nullBlock == 0) { - assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + if (data.size() == 2) { + assertThat(value.isNull(0), equalTo(true)); + } else if (data.size() > 2) { + assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + } return; } if (((Boolean) data.get(0)).booleanValue()) { @@ -77,7 +226,11 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo if (nullBlock == 2) { super.assertSimpleWithNulls(data, value, nullBlock); } else { - assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + if (data.size() > 2) { + assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + } else { + super.assertSimpleWithNulls(data, value, nullBlock); + } } } From cb435733a0b1655e2cb47748c02a89c86314f0e9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 10 May 2024 14:53:56 -0400 Subject: [PATCH 117/117] ESQL: Reduce number of ignored tests (#108471) This reduces the number of skipped tests for functions, dropping the number from 130754 to 3226. This doesn't buy us much more coverage, but it doesn't really take any more time so it's probably ok. It'd be nice to have some understanding of each of the skipped tests, 130k is way too many to put in your head. The actual test change is: when you are need to build an evaluator but can't because you'll get a type error, just assert that you get a type error and let the test finish. This is nearly as fast as just bailing, and it gets us to the point where we can start reasoning about the skipped tests. --- .../function/AbstractFunctionTestCase.java | 61 ++++++++++++------- 1 file changed, 40 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 1fd7cfe368068..4867b0c62a18c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -166,14 +166,17 @@ protected static Iterable parameterSuppliersFromTypedData(List allNullsMatcher() { } private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - assumeTrue("Must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); - boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); @@ -401,7 +403,6 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } b++; } - Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); try ( ExpressionEvaluator eval = evaluator(expression).get(context); Block block = eval.eval(new Page(positions, manyPositionsBlocks)) @@ -427,13 +428,15 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } } - // TODO cranky time - public void testSimpleWithNulls() { // TODO replace this with nulls inserted into the test case like anyNullIsNull + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); List simpleData = testCase.getDataValues(); - try (EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext())) { + try (EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(driverContext())) { BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); Block[] orig = BlockUtils.fromListRow(blockFactory, simpleData); for (int i = 0; i < orig.length; i++) { @@ -472,12 +475,16 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo } public final void testEvaluateInManyThreads() throws ExecutionException, InterruptedException { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); int count = 10_000; int threads = 5; - var evalSupplier = evaluator(buildFieldExpression(testCase)); + var evalSupplier = evaluator(expression); ExecutorService exec = Executors.newFixedThreadPool(threads); try { List> futures = new ArrayList<>(); @@ -504,17 +511,25 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru } public final void testEvaluatorToString() { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); - var factory = evaluator(buildFieldExpression(testCase)); + var factory = evaluator(expression); try (ExpressionEvaluator ev = factory.get(driverContext())) { assertThat(ev.toString(), testCase.evaluatorToString()); } } public final void testFactoryToString() { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); var factory = evaluator(buildFieldExpression(testCase)); assertThat(factory.toString(), testCase.evaluatorToString()); } @@ -522,8 +537,7 @@ public final void testFactoryToString() { public final void testFold() { Expression expression = buildLiteralExpression(testCase); if (testCase.getExpectedTypeError() != null) { - assertTrue(expression.typeResolved().unresolved()); - assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); + assertTypeResolutionFailure(expression); return; } assertFalse(expression.typeResolved().unresolved()); @@ -1115,6 +1129,11 @@ protected static DataType[] representableNonSpatialTypes() { return representableNonSpatial().toArray(DataType[]::new); } + protected final void assertTypeResolutionFailure(Expression expression) { + assertTrue("expected unresolved", expression.typeResolved().unresolved()); + assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); + } + @AfterClass public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) {