From 4286d645c9016eb8f42c4e5e7874e6a586c740c0 Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Tue, 7 May 2024 12:53:32 -0400 Subject: [PATCH 01/15] mixed cluster tests are executable --- .../internal/RestrictedBuildApiService.java | 1 + x-pack/plugin/inference/build.gradle | 1 + .../inference/qa/mixed-cluster/build.gradle | 46 +++ .../xpack/inference/qa/mixed/Clusters.java | 28 ++ .../qa/mixed/InferenceBaseRestTest.java | 262 ++++++++++++++++++ .../qa/mixed/MixedClusterInferenceSpecIT.java | 93 +++++++ .../inference/qa/mixed/EsqlClientYamlIT.java | 119 ++++++++ 7 files changed, 550 insertions(+) create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/build.gradle create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/InferenceBaseRestTest.java create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterInferenceSpecIT.java create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/EsqlClientYamlIT.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index 23afcab7bec7c..9d9f76c9ba185 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -128,6 +128,7 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ilm:qa:multi-cluster"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ilm:qa:multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ilm:qa:rest"); + map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:inference:qa:mixed-cluster"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:basic-multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:disabled"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:ml-with-security"); diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 0aef8601ffcc6..804ac2d591d0c 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -27,6 +27,7 @@ base { dependencies { implementation project(path: ':libs:elasticsearch-logging') + testImplementation project(path: ':test:framework') compileOnly project(":server") compileOnly project(path: xpackModule('core')) testImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle new file mode 100644 index 0000000000000..216b147b94ea9 --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle @@ -0,0 +1,46 @@ +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.util.GradleUtils +import org.elasticsearch.gradle.internal.info.BuildParams +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.internal-test-artifact-base' +apply plugin: 'elasticsearch.bwc-test' + +restResources { + restApi { + include '_common', 'bulk', 'get', 'indices', 'inference', 'xpack', 'cluster' + } + restTests { + includeXpack 'inference' + } +} + +dependencies { + testImplementation project(path: ':x-pack:plugin:inference:qa:inference-service-tests') + compileOnly project(':x-pack:plugin:core') + javaRestTestImplementation(testArtifact(project(xpackModule('core')))) + javaRestTestImplementation project(path: xpackModule('inference')) + clusterPlugins project( + ':x-pack:plugin:inference:qa:test-service-plugin' + ) +} + +// inference is available in 8.11 or later +def supportedVersion = bwcVersion -> { + return bwcVersion.onOrAfter(Version.fromString("8.11.0")); +} + +BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> + def javaRestTest = tasks.register("v${bwcVersion}#javaRestTest", StandaloneRestIntegTestTask) { + usesBwcDistribution(bwcVersion) + systemProperty("tests.old_cluster_version", bwcVersion) + maxParallelForks = 1 + } + + tasks.register(bwcTaskName(bwcVersion)) { + dependsOn javaRestTest + } +} + diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java new file mode 100644 index 0000000000000..13e9ea164103f --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; + +public class Clusters { + public static ElasticsearchCluster mixedVersionCluster() { + Version oldVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); + return ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .withNode(node -> node.version(oldVersion)) + .withNode(node -> node.version(Version.CURRENT)) + .withNode(node -> node.version(oldVersion)) + .withNode(node -> node.version(Version.CURRENT)) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("cluster.routing.rebalance.enable", "none") // disable relocation until we have retry in ESQL + .build(); + } +} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/InferenceBaseRestTest.java new file mode 100644 index 0000000000000..733d1134be45a --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/InferenceBaseRestTest.java @@ -0,0 +1,262 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class InferenceBaseRestTest extends ESRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .plugin("inference-service-test") + .user("x_pack_rest_user", "x-pack-test-password") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + static String mockSparseServiceModelConfig() { + return mockSparseServiceModelConfig(null); + } + + static String mockSparseServiceModelConfig(@Nullable TaskType taskTypeInBody) { + var taskType = taskTypeInBody == null ? "" : "\"task_type\": \"" + taskTypeInBody + "\","; + return Strings.format(""" + { + %s + "service": "test_service", + "service_settings": { + "model": "my_model", + "hidden_field": "my_hidden_value", + "api_key": "abc64" + }, + "task_settings": { + "temperature": 3 + } + } + """, taskType); + } + + static String mockSparseServiceModelConfig(@Nullable TaskType taskTypeInBody, boolean shouldReturnHiddenField) { + var taskType = taskTypeInBody == null ? "" : "\"task_type\": \"" + taskTypeInBody + "\","; + return Strings.format(""" + { + %s + "service": "test_service", + "service_settings": { + "model": "my_model", + "hidden_field": "my_hidden_value", + "should_return_hidden_field": %s, + "api_key": "abc64" + }, + "task_settings": { + "temperature": 3 + } + } + """, taskType, shouldReturnHiddenField); + } + + static String mockDenseServiceModelConfig() { + return """ + { + "task_type": "text_embedding", + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_dense_vector_model", + "api_key": "abc64", + "dimensions": 246 + }, + "task_settings": { + } + } + """; + } + + protected void deleteModel(String modelId) throws IOException { + var request = new Request("DELETE", "_inference/" + modelId); + var response = client().performRequest(request); + assertOkOrCreated(response); + } + + protected void deleteModel(String modelId, TaskType taskType) throws IOException { + var request = new Request("DELETE", Strings.format("_inference/%s/%s", taskType, modelId)); + var response = client().performRequest(request); + assertOkOrCreated(response); + } + + protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { + String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + return putRequest(endpoint, modelConfig); + } + + /** + * Task type should be in modelConfig + */ + protected Map putModel(String modelId, String modelConfig) throws IOException { + String endpoint = Strings.format("_inference/%s", modelId); + return putRequest(endpoint, modelConfig); + } + + Map putRequest(String endpoint, String body) throws IOException { + var request = new Request("PUT", endpoint); + request.setJsonEntity(body); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + Map postRequest(String endpoint, String body) throws IOException { + var request = new Request("POST", endpoint); + request.setJsonEntity(body); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + protected Map putE5TrainedModels() throws IOException { + var request = new Request("PUT", "_ml/trained_models/.multilingual-e5-small?wait_for_completion=true"); + + String body = """ + { + "input": { + "field_names": ["text_field"] + } + } + """; + + request.setJsonEntity(body); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + protected Map deployE5TrainedModels() throws IOException { + var request = new Request("POST", "_ml/trained_models/.multilingual-e5-small/deployment/_start?wait_for=fully_allocated"); + + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + @SuppressWarnings("unchecked") + protected Map getModel(String modelId) throws IOException { + var endpoint = Strings.format("_inference/%s", modelId); + return ((List>) getInternal(endpoint).get("endpoints")).get(0); + } + + @SuppressWarnings("unchecked") + protected List> getModels(String modelId, TaskType taskType) throws IOException { + var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + return (List>) getInternal(endpoint).get("endpoints"); + } + + @SuppressWarnings("unchecked") + protected List> getAllModels() throws IOException { + var endpoint = Strings.format("_inference/_all"); + return (List>) getInternal("_inference/_all").get("endpoints"); + } + + private Map getInternal(String endpoint) throws IOException { + var request = new Request("GET", endpoint); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + protected Map inferOnMockService(String modelId, List input) throws IOException { + var endpoint = Strings.format("_inference/%s", modelId); + return inferOnMockServiceInternal(endpoint, input); + } + + protected Map inferOnMockService(String modelId, TaskType taskType, List input) throws IOException { + var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + return inferOnMockServiceInternal(endpoint, input); + } + + private Map inferOnMockServiceInternal(String endpoint, List input) throws IOException { + var request = new Request("POST", endpoint); + + var bodyBuilder = new StringBuilder("{\"input\": ["); + for (var in : input) { + bodyBuilder.append('"').append(in).append('"').append(','); + } + // remove last comma + bodyBuilder.deleteCharAt(bodyBuilder.length() - 1); + bodyBuilder.append("]}"); + + request.setJsonEntity(bodyBuilder.toString()); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + @SuppressWarnings("unchecked") + protected void assertNonEmptyInferenceResults(Map resultMap, int expectedNumberOfResults, TaskType taskType) { + switch (taskType) { + case SPARSE_EMBEDDING -> { + var results = (List>) resultMap.get(TaskType.SPARSE_EMBEDDING.toString()); + assertThat(results, hasSize(expectedNumberOfResults)); + } + case TEXT_EMBEDDING -> { + var results = (List>) resultMap.get(TaskType.TEXT_EMBEDDING.toString()); + assertThat(results, hasSize(expectedNumberOfResults)); + } + default -> fail("test with task type [" + taskType + "] are not supported yet"); + } + } + + protected static void assertOkOrCreated(Response response) throws IOException { + int statusCode = response.getStatusLine().getStatusCode(); + // Once EntityUtils.toString(entity) is called the entity cannot be reused. + // Avoid that call with check here. + if (statusCode == 200 || statusCode == 201) { + return; + } + + String responseStr = EntityUtils.toString(response.getEntity()); + assertThat(responseStr, response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); + } + + protected Map getTrainedModel(String inferenceEntityId) throws IOException { + var endpoint = Strings.format("_ml/trained_models/%s/_stats", inferenceEntityId); + var request = new Request("GET", endpoint); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } +} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterInferenceSpecIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterInferenceSpecIT.java new file mode 100644 index 0000000000000..0f25f77aa660e --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterInferenceSpecIT.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.elasticsearch.Version; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.TestFeatureService; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; + +import static org.hamcrest.Matchers.hasSize; + +public class MixedClusterInferenceSpecIT extends InferenceBaseRestTest { + @ClassRule + public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + static final Version bwcVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); + + private static TestFeatureService oldClusterTestFeatureService = null; + + @Before + public void extractOldClusterFeatures() { + if (oldClusterTestFeatureService == null) { + oldClusterTestFeatureService = testFeatureService; + } + } + + protected static boolean oldClusterHasFeature(String featureId) { + assert oldClusterTestFeatureService != null; + return oldClusterTestFeatureService.clusterHasFeature(featureId); + } + + protected static boolean oldClusterHasFeature(NodeFeature feature) { + return oldClusterHasFeature(feature.id()); + } + + @AfterClass + public static void cleanUp() { + oldClusterTestFeatureService = null; + } + + @SuppressWarnings("unchecked") + public void testGet() throws IOException { + for (int i = 0; i < 5; i++) { + putModel("se_model_" + i, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + } + for (int i = 0; i < 4; i++) { + putModel("te_model_" + i, mockSparseServiceModelConfig(), TaskType.TEXT_EMBEDDING); + } + + var getAllModels = getAllModels(); + assertThat(getAllModels, hasSize(9)); + + var getSparseModels = getModels("_all", TaskType.SPARSE_EMBEDDING); + assertThat(getSparseModels, hasSize(5)); + for (var sparseModel : getSparseModels) { + assertEquals("sparse_embedding", sparseModel.get("task_type")); + } + + var getDenseModels = getModels("_all", TaskType.TEXT_EMBEDDING); + assertThat(getDenseModels, hasSize(4)); + for (var denseModel : getDenseModels) { + assertEquals("text_embedding", denseModel.get("task_type")); + } + + var singleModel = getModels("se_model_1", TaskType.SPARSE_EMBEDDING); + assertThat(singleModel, hasSize(1)); + assertEquals("se_model_1", singleModel.get(0).get("model_id")); + + for (int i = 0; i < 5; i++) { + deleteModel("se_model_" + i, TaskType.SPARSE_EMBEDDING); + } + for (int i = 0; i < 4; i++) { + deleteModel("te_model_" + i, TaskType.TEXT_EMBEDDING); + } + } + +} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/EsqlClientYamlIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/EsqlClientYamlIT.java new file mode 100644 index 0000000000000..dcbf64335cc82 --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/EsqlClientYamlIT.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ClientYamlTestClient; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.ImpersonateOfficialClientTestClient; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; +import org.elasticsearch.test.rest.yaml.section.ApiCallSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.ExecutableSection; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +public class EsqlClientYamlIT extends ESClientYamlSuiteTestCase { + @ClassRule + public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + if (EsqlSpecTestCase.availableVersions().isEmpty()) { + return updateEsqlQueryDoSections(createParameters(), EsqlClientYamlIT::stripVersion); + } + return createParameters(); + } + + @Before + @After + public void assertRequestBreakerEmpty() throws Exception { + EsqlSpecTestCase.assertRequestBreakerEmpty(); + } + + @Override + protected ClientYamlTestClient initClientYamlTestClient( + final ClientYamlSuiteRestSpec restSpec, + final RestClient restClient, + final List hosts + ) { + if (EsqlSpecTestCase.availableVersions().isEmpty()) { + return new ImpersonateOfficialClientTestClient(restSpec, restClient, hosts, this::getClientBuilderWithSniffedHosts, "es=8.13"); + } + return super.initClientYamlTestClient(restSpec, restClient, hosts); + } + + static DoSection stripVersion(DoSection doSection) { + ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi(doSection.getApiCallSection().getApi()); + for (Map body : copy.getBodies()) { + body.remove("version"); + } + doSection.setApiCallSection(copy); + return doSection; + } + + // TODO: refactor, copied from single-node's AbstractEsqlClientYamlIt + public static Iterable updateEsqlQueryDoSections(Iterable parameters, Function modify) + throws Exception { + List result = new ArrayList<>(); + for (Object[] orig : parameters) { + assert orig.length == 1; + ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0]; + try { + ClientYamlTestSection modified = new ClientYamlTestSection( + candidate.getTestSection().getLocation(), + candidate.getTestSection().getName(), + candidate.getTestSection().getPrerequisiteSection(), + candidate.getTestSection().getExecutableSections().stream().map(e -> modifyExecutableSection(e, modify)).toList() + ); + result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("error modifying " + candidate + ": " + e.getMessage(), e); + } + } + return result; + } + + // TODO: refactor, copied from single-node's AbstractEsqlClientYamlIt + private static ExecutableSection modifyExecutableSection(ExecutableSection e, Function modify) { + if (false == (e instanceof DoSection)) { + return e; + } + DoSection doSection = (DoSection) e; + String api = doSection.getApiCallSection().getApi(); + return switch (api) { + case "esql.query" -> modify.apply(doSection); + // case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( + // "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." + // ); + default -> e; + }; + } +} From 4cfa0a66ce9b3821e778bdcd3b88fd8dd13a820e Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Tue, 7 May 2024 15:22:50 -0400 Subject: [PATCH 02/15] add tests from upgrade tests --- .../xpack/inference/qa/mixed/BaseMixedIT.java | 144 ++++++++++ .../qa/mixed/InferenceBaseRestTest.java | 262 ------------------ .../qa/mixed/MixedClusterInferenceSpecIT.java | 93 ------- .../qa/mixed/MixedClusterSpecIT.java | 52 ++++ .../qa/mixed/OpenAIServiceMixedIT.java | 214 ++++++++++++++ .../application/OpenAiServiceUpgradeIT.java | 1 + 6 files changed, 411 insertions(+), 355 deletions(-) create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java delete mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/InferenceBaseRestTest.java delete mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterInferenceSpecIT.java create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecIT.java create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java new file mode 100644 index 0000000000000..2b5b9f3d1066a --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.core.Strings.format; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class BaseMixedIT extends ESRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .plugin("inference-service-test") + .user("x_pack_rest_user", "x-pack-test-password") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + protected static String getUrl(MockWebServer webServer) { + return format("http://%s:%s", webServer.getHostName(), webServer.getPort()); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + protected void delete(String inferenceId, TaskType taskType) throws IOException { + var request = new Request("DELETE", Strings.format("_inference/%s/%s", taskType, inferenceId)); + var response = client().performRequest(request); + assertOK(response); + } + + protected void delete(String inferenceId) throws IOException { + var request = new Request("DELETE", Strings.format("_inference/%s", inferenceId)); + var response = client().performRequest(request); + assertOK(response); + } + + protected Map getAll() throws IOException { + var request = new Request("GET", "_inference/_all"); + var response = client().performRequest(request); + assertOK(response); + return entityAsMap(response); + } + + protected Map get(String inferenceId) throws IOException { + var endpoint = Strings.format("_inference/%s", inferenceId); + var request = new Request("GET", endpoint); + var response = client().performRequest(request); + assertOK(response); + return entityAsMap(response); + } + + protected Map get(TaskType taskType, String inferenceId) throws IOException { + var endpoint = Strings.format("_inference/%s/%s", taskType, inferenceId); + var request = new Request("GET", endpoint); + var response = client().performRequest(request); + assertOK(response); + return entityAsMap(response); + } + + protected Map inference(String inferenceId, TaskType taskType, String input) throws IOException { + var endpoint = Strings.format("_inference/%s/%s", taskType, inferenceId); + var request = new Request("POST", endpoint); + request.setJsonEntity("{\"input\": [" + '"' + input + '"' + "]}"); + + var response = client().performRequest(request); + assertOK(response); + return entityAsMap(response); + } + + protected Map rerank(String inferenceId, List inputs, String query) throws IOException { + var endpoint = Strings.format("_inference/rerank/%s", inferenceId); + var request = new Request("POST", endpoint); + + StringBuilder body = new StringBuilder("{").append("\"query\":\"").append(query).append("\",").append("\"input\":["); + + for (int i = 0; i < inputs.size(); i++) { + body.append("\"").append(inputs.get(i)).append("\""); + if (i < inputs.size() - 1) { + body.append(","); + } + } + + body.append("]}"); + request.setJsonEntity(body.toString()); + + var response = client().performRequest(request); + assertOK(response); + return entityAsMap(response); + } + + protected void put(String inferenceId, String modelConfig, TaskType taskType) throws IOException { + String endpoint = Strings.format("_inference/%s/%s?error_trace", taskType, inferenceId); + var request = new Request("PUT", endpoint); + request.setJsonEntity(modelConfig); + var response = client().performRequest(request); + assertOKAndConsume(response); + logger.warn("PUT response: {}", response.toString()); + } + + protected static void assertOkOrCreated(Response response) throws IOException { + int statusCode = response.getStatusLine().getStatusCode(); + // Once EntityUtils.toString(entity) is called the entity cannot be reused. + // Avoid that call with check here. + if (statusCode == 200 || statusCode == 201) { + return; + } + + String responseStr = EntityUtils.toString(response.getEntity()); + assertThat(responseStr, response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); + } +} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/InferenceBaseRestTest.java deleted file mode 100644 index 733d1134be45a..0000000000000 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/InferenceBaseRestTest.java +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.qa.mixed; - -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.junit.ClassRule; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; - -public class InferenceBaseRestTest extends ESRestTestCase { - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.license.self_generated.type", "trial") - .setting("xpack.security.enabled", "true") - .plugin("inference-service-test") - .user("x_pack_rest_user", "x-pack-test-password") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @Override - protected Settings restClientSettings() { - String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - - static String mockSparseServiceModelConfig() { - return mockSparseServiceModelConfig(null); - } - - static String mockSparseServiceModelConfig(@Nullable TaskType taskTypeInBody) { - var taskType = taskTypeInBody == null ? "" : "\"task_type\": \"" + taskTypeInBody + "\","; - return Strings.format(""" - { - %s - "service": "test_service", - "service_settings": { - "model": "my_model", - "hidden_field": "my_hidden_value", - "api_key": "abc64" - }, - "task_settings": { - "temperature": 3 - } - } - """, taskType); - } - - static String mockSparseServiceModelConfig(@Nullable TaskType taskTypeInBody, boolean shouldReturnHiddenField) { - var taskType = taskTypeInBody == null ? "" : "\"task_type\": \"" + taskTypeInBody + "\","; - return Strings.format(""" - { - %s - "service": "test_service", - "service_settings": { - "model": "my_model", - "hidden_field": "my_hidden_value", - "should_return_hidden_field": %s, - "api_key": "abc64" - }, - "task_settings": { - "temperature": 3 - } - } - """, taskType, shouldReturnHiddenField); - } - - static String mockDenseServiceModelConfig() { - return """ - { - "task_type": "text_embedding", - "service": "text_embedding_test_service", - "service_settings": { - "model": "my_dense_vector_model", - "api_key": "abc64", - "dimensions": 246 - }, - "task_settings": { - } - } - """; - } - - protected void deleteModel(String modelId) throws IOException { - var request = new Request("DELETE", "_inference/" + modelId); - var response = client().performRequest(request); - assertOkOrCreated(response); - } - - protected void deleteModel(String modelId, TaskType taskType) throws IOException { - var request = new Request("DELETE", Strings.format("_inference/%s/%s", taskType, modelId)); - var response = client().performRequest(request); - assertOkOrCreated(response); - } - - protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { - String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - return putRequest(endpoint, modelConfig); - } - - /** - * Task type should be in modelConfig - */ - protected Map putModel(String modelId, String modelConfig) throws IOException { - String endpoint = Strings.format("_inference/%s", modelId); - return putRequest(endpoint, modelConfig); - } - - Map putRequest(String endpoint, String body) throws IOException { - var request = new Request("PUT", endpoint); - request.setJsonEntity(body); - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsMap(response); - } - - Map postRequest(String endpoint, String body) throws IOException { - var request = new Request("POST", endpoint); - request.setJsonEntity(body); - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsMap(response); - } - - protected Map putE5TrainedModels() throws IOException { - var request = new Request("PUT", "_ml/trained_models/.multilingual-e5-small?wait_for_completion=true"); - - String body = """ - { - "input": { - "field_names": ["text_field"] - } - } - """; - - request.setJsonEntity(body); - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsMap(response); - } - - protected Map deployE5TrainedModels() throws IOException { - var request = new Request("POST", "_ml/trained_models/.multilingual-e5-small/deployment/_start?wait_for=fully_allocated"); - - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsMap(response); - } - - @SuppressWarnings("unchecked") - protected Map getModel(String modelId) throws IOException { - var endpoint = Strings.format("_inference/%s", modelId); - return ((List>) getInternal(endpoint).get("endpoints")).get(0); - } - - @SuppressWarnings("unchecked") - protected List> getModels(String modelId, TaskType taskType) throws IOException { - var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - return (List>) getInternal(endpoint).get("endpoints"); - } - - @SuppressWarnings("unchecked") - protected List> getAllModels() throws IOException { - var endpoint = Strings.format("_inference/_all"); - return (List>) getInternal("_inference/_all").get("endpoints"); - } - - private Map getInternal(String endpoint) throws IOException { - var request = new Request("GET", endpoint); - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsMap(response); - } - - protected Map inferOnMockService(String modelId, List input) throws IOException { - var endpoint = Strings.format("_inference/%s", modelId); - return inferOnMockServiceInternal(endpoint, input); - } - - protected Map inferOnMockService(String modelId, TaskType taskType, List input) throws IOException { - var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - return inferOnMockServiceInternal(endpoint, input); - } - - private Map inferOnMockServiceInternal(String endpoint, List input) throws IOException { - var request = new Request("POST", endpoint); - - var bodyBuilder = new StringBuilder("{\"input\": ["); - for (var in : input) { - bodyBuilder.append('"').append(in).append('"').append(','); - } - // remove last comma - bodyBuilder.deleteCharAt(bodyBuilder.length() - 1); - bodyBuilder.append("]}"); - - request.setJsonEntity(bodyBuilder.toString()); - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsMap(response); - } - - @SuppressWarnings("unchecked") - protected void assertNonEmptyInferenceResults(Map resultMap, int expectedNumberOfResults, TaskType taskType) { - switch (taskType) { - case SPARSE_EMBEDDING -> { - var results = (List>) resultMap.get(TaskType.SPARSE_EMBEDDING.toString()); - assertThat(results, hasSize(expectedNumberOfResults)); - } - case TEXT_EMBEDDING -> { - var results = (List>) resultMap.get(TaskType.TEXT_EMBEDDING.toString()); - assertThat(results, hasSize(expectedNumberOfResults)); - } - default -> fail("test with task type [" + taskType + "] are not supported yet"); - } - } - - protected static void assertOkOrCreated(Response response) throws IOException { - int statusCode = response.getStatusLine().getStatusCode(); - // Once EntityUtils.toString(entity) is called the entity cannot be reused. - // Avoid that call with check here. - if (statusCode == 200 || statusCode == 201) { - return; - } - - String responseStr = EntityUtils.toString(response.getEntity()); - assertThat(responseStr, response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); - } - - protected Map getTrainedModel(String inferenceEntityId) throws IOException { - var endpoint = Strings.format("_ml/trained_models/%s/_stats", inferenceEntityId); - var request = new Request("GET", endpoint); - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsMap(response); - } -} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterInferenceSpecIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterInferenceSpecIT.java deleted file mode 100644 index 0f25f77aa660e..0000000000000 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterInferenceSpecIT.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.qa.mixed; - -import org.elasticsearch.Version; -import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.rest.TestFeatureService; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.ClassRule; - -import java.io.IOException; - -import static org.hamcrest.Matchers.hasSize; - -public class MixedClusterInferenceSpecIT extends InferenceBaseRestTest { - @ClassRule - public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - static final Version bwcVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); - - private static TestFeatureService oldClusterTestFeatureService = null; - - @Before - public void extractOldClusterFeatures() { - if (oldClusterTestFeatureService == null) { - oldClusterTestFeatureService = testFeatureService; - } - } - - protected static boolean oldClusterHasFeature(String featureId) { - assert oldClusterTestFeatureService != null; - return oldClusterTestFeatureService.clusterHasFeature(featureId); - } - - protected static boolean oldClusterHasFeature(NodeFeature feature) { - return oldClusterHasFeature(feature.id()); - } - - @AfterClass - public static void cleanUp() { - oldClusterTestFeatureService = null; - } - - @SuppressWarnings("unchecked") - public void testGet() throws IOException { - for (int i = 0; i < 5; i++) { - putModel("se_model_" + i, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); - } - for (int i = 0; i < 4; i++) { - putModel("te_model_" + i, mockSparseServiceModelConfig(), TaskType.TEXT_EMBEDDING); - } - - var getAllModels = getAllModels(); - assertThat(getAllModels, hasSize(9)); - - var getSparseModels = getModels("_all", TaskType.SPARSE_EMBEDDING); - assertThat(getSparseModels, hasSize(5)); - for (var sparseModel : getSparseModels) { - assertEquals("sparse_embedding", sparseModel.get("task_type")); - } - - var getDenseModels = getModels("_all", TaskType.TEXT_EMBEDDING); - assertThat(getDenseModels, hasSize(4)); - for (var denseModel : getDenseModels) { - assertEquals("text_embedding", denseModel.get("task_type")); - } - - var singleModel = getModels("se_model_1", TaskType.SPARSE_EMBEDDING); - assertThat(singleModel, hasSize(1)); - assertEquals("se_model_1", singleModel.get(0).get("model_id")); - - for (int i = 0; i < 5; i++) { - deleteModel("se_model_" + i, TaskType.SPARSE_EMBEDDING); - } - for (int i = 0; i < 4; i++) { - deleteModel("te_model_" + i, TaskType.TEXT_EMBEDDING); - } - } - -} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecIT.java new file mode 100644 index 0000000000000..f2dcf80d5390e --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecIT.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.elasticsearch.Version; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.TestFeatureService; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.ClassRule; + +public class MixedClusterSpecIT extends BaseMixedIT { + @ClassRule + public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + static final Version bwcVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); + + private static TestFeatureService oldClusterTestFeatureService = null; + + @Before + public void extractOldClusterFeatures() { + if (oldClusterTestFeatureService == null) { + oldClusterTestFeatureService = testFeatureService; + } + } + + protected static boolean oldClusterHasFeature(String featureId) { + assert oldClusterTestFeatureService != null; + return oldClusterTestFeatureService.clusterHasFeature(featureId); + } + + protected static boolean oldClusterHasFeature(NodeFeature feature) { + return oldClusterHasFeature(feature.id()); + } + + @AfterClass + public static void cleanUp() { + oldClusterTestFeatureService = null; + } + +} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java new file mode 100644 index 0000000000000..013a2bf0d4784 --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java @@ -0,0 +1,214 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecIT.bwcVersion; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.not; + +public class OpenAIServiceMixedIT extends BaseMixedIT { + + private static final String OPEN_AI_EMBEDDINGS_ADDED = "8.12.0"; + private static final String OPEN_AI_EMBEDDINGS_MODEL_SETTING_MOVED = "8.13.0"; + private static final String OPEN_AI_COMPLETIONS_ADDED = "8.14.0"; + + private static MockWebServer openAiEmbeddingsServer; + private static MockWebServer openAiChatCompletionsServer; + + @BeforeClass + public static void startWebServer() throws IOException { + openAiEmbeddingsServer = new MockWebServer(); + openAiEmbeddingsServer.start(); + + openAiChatCompletionsServer = new MockWebServer(); + openAiChatCompletionsServer.start(); + } + + @AfterClass + public static void shutdown() { + openAiEmbeddingsServer.close(); + openAiChatCompletionsServer.close(); + } + + @SuppressWarnings("unchecked") + public void testOpenAiEmbeddings() throws IOException { + var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_EMBEDDINGS_ADDED)); + assumeTrue("OpenAI embedding service added in " + OPEN_AI_EMBEDDINGS_ADDED, openAiEmbeddingsSupported); + + final String oldClusterId = "old-cluster-embeddings"; + + String inferenceConfig = oldClusterVersionCompatibleEmbeddingConfig(); + // queue a response as PUT will call the service + openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); + put(oldClusterId, inferenceConfig, TaskType.TEXT_EMBEDDING); + + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); + assertThat(configs, hasSize(1)); + assertEquals("openai", configs.get(0).get("service")); + var serviceSettings = (Map) configs.get(0).get("service_settings"); + var taskSettings = (Map) configs.get(0).get("task_settings"); + var modelIdFound = serviceSettings.containsKey("model_id") || taskSettings.containsKey("model_id"); + assertTrue("model_id not found in config: " + configs.toString(), modelIdFound); + + assertEmbeddingInference(oldClusterId); + } + + void assertEmbeddingInference(String inferenceId) throws IOException { + openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); + var inferenceMap = inference(inferenceId, TaskType.TEXT_EMBEDDING, "some text"); + assertThat(inferenceMap.entrySet(), not(empty())); + } + + @SuppressWarnings("unchecked") + public void testOpenAiCompletions() throws IOException { + var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_EMBEDDINGS_ADDED)); + assumeTrue("OpenAI completions service added in " + OPEN_AI_COMPLETIONS_ADDED, openAiEmbeddingsSupported); + + final String oldClusterId = "old-cluster-completions"; + final String upgradedClusterId = "upgraded-cluster-completions"; + + put(oldClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); + + var configsMap = get(TaskType.COMPLETION, oldClusterId); + logger.warn("Configs: {}", configsMap); + var configs = (List>) configsMap.get("endpoints"); + assertThat(configs, hasSize(1)); + assertEquals("openai", configs.get(0).get("service")); + var serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("model_id", "gpt-4")); + var taskSettings = (Map) configs.get(0).get("task_settings"); + assertThat(taskSettings.keySet(), empty()); + + assertCompletionInference(oldClusterId); + } + + void assertCompletionInference(String inferenceId) throws IOException { + openAiChatCompletionsServer.enqueue(new MockResponse().setResponseCode(200).setBody(chatCompletionsResponse())); + var inferenceMap = inference(inferenceId, TaskType.COMPLETION, "some text"); + assertThat(inferenceMap.entrySet(), not(empty())); + } + + private String oldClusterVersionCompatibleEmbeddingConfig() { + if (getOldClusterTestVersion().before(OPEN_AI_EMBEDDINGS_MODEL_SETTING_MOVED)) { + return embeddingConfigWithModelInTaskSettings(getUrl(openAiEmbeddingsServer)); + } else { + return embeddingConfigWithModelInServiceSettings(getUrl(openAiEmbeddingsServer)); + } + } + + protected static org.elasticsearch.test.cluster.util.Version getOldClusterTestVersion() { + return org.elasticsearch.test.cluster.util.Version.fromString(bwcVersion.toString()); + } + + private String embeddingConfigWithModelInTaskSettings(String url) { + return Strings.format(""" + { + "service": "openai", + "service_settings": { + "api_key": "XXXX", + "url": "%s" + }, + "task_settings": { + "model": "text-embedding-ada-002" + } + } + """, url); + } + + static String embeddingConfigWithModelInServiceSettings(String url) { + return Strings.format(""" + { + "service": "openai", + "service_settings": { + "api_key": "XXXX", + "url": "%s", + "model_id": "text-embedding-ada-002" + } + } + """, url); + } + + private String chatCompletionsConfig(String url) { + return Strings.format(""" + { + "service": "openai", + "service_settings": { + "api_key": "XXXX", + "url": "%s", + "model_id": "gpt-4" + } + } + """, url); + } + + static String embeddingResponse() { + return """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + } + + private String chatCompletionsResponse() { + return """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + } + +} diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java index 4e8e1c845b070..82bba94cc2607 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java @@ -120,6 +120,7 @@ public void testOpenAiCompletions() throws IOException { final String upgradedClusterId = "upgraded-cluster-completions"; if (isOldCluster()) { + // TODO why is put only in old cluster? put(oldClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); From 16aab6b4af99dda4fa0cccc96d289c61d1d1e7ba Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Tue, 7 May 2024 16:25:32 -0400 Subject: [PATCH 03/15] [ML] Add mixed cluster tests for existing services --- .../qa/mixed/AzureOpenAiServiceMixedIT.java | 105 +++++++ .../qa/mixed/CohereServiceMixedIT.java | 287 ++++++++++++++++++ .../qa/mixed/HuggingFaceServiceMixedIT.java | 137 +++++++++ 3 files changed, 529 insertions(+) create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java create mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java new file mode 100644 index 0000000000000..bcf1ede6b2d87 --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecIT.bwcVersion; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.not; + +public class AzureOpenAiServiceMixedIT extends BaseMixedIT { + + private static final String OPEN_AI_AZURE_EMBEDDINGS_ADDED = "8.14.0"; + + private static MockWebServer openAiEmbeddingsServer; + + // @BeforeClass test mute + public static void startWebServer() throws IOException { + openAiEmbeddingsServer = new MockWebServer(); + openAiEmbeddingsServer.start(); + } + + // @AfterClass test mute + public static void shutdown() { + openAiEmbeddingsServer.close(); + } + + @SuppressWarnings("unchecked") + @LuceneTestCase.AwaitsFix(bugUrl = "AzureOpenAI doesn't support webserver URL in tests") + public void testOpenAiEmbeddings() throws IOException { + var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_AZURE_EMBEDDINGS_ADDED)); + assumeTrue("Azure OpenAI embedding service added in " + OPEN_AI_AZURE_EMBEDDINGS_ADDED, openAiEmbeddingsSupported); + + final String oldClusterId = "old-cluster-embeddings"; + + openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); + put(oldClusterId, embeddingConfig(getUrl(openAiEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); + assertThat(configs, hasSize(1)); + assertEquals("azureopenai", configs.get(0).get("service")); + assertEmbeddingInference(oldClusterId); + } + + void assertEmbeddingInference(String inferenceId) throws IOException { + openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); + var inferenceMap = inference(inferenceId, TaskType.TEXT_EMBEDDING, "some text"); + assertThat(inferenceMap.entrySet(), not(empty())); + } + + private String embeddingConfig(String url) { + return Strings.format(""" + { + "service": "azureopenai", + "service_settings": { + "api_key": "XXXX", + "url": "%s", + "resource_name": "resource_name", + "deployment_id": "deployment_id", + "api_version": "2024-02-01" + } + } + """, url); + } + + static String embeddingResponse() { + return """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + } +} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java new file mode 100644 index 0000000000000..c157c8135ad7c --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java @@ -0,0 +1,287 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.hamcrest.Matchers; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecIT.bwcVersion; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.oneOf; + +public class CohereServiceMixedIT extends BaseMixedIT { + + private static final String COHERE_EMBEDDINGS_ADDED = "8.13.0"; + private static final String COHERE_RERANK_ADDED = "8.14.0"; + private static final String BYTE_ALIAS_FOR_INT8_ADDED = "8.14.0"; + + private static MockWebServer cohereEmbeddingsServer; + private static MockWebServer cohereRerankServer; + + @BeforeClass + public static void startWebServer() throws IOException { + cohereEmbeddingsServer = new MockWebServer(); + cohereEmbeddingsServer.start(); + + cohereRerankServer = new MockWebServer(); + cohereRerankServer.start(); + } + + @AfterClass + public static void shutdown() { + cohereEmbeddingsServer.close(); + cohereRerankServer.close(); + } + + @SuppressWarnings("unchecked") + public void testCohereEmbeddings() throws IOException { + var embeddingsSupported = bwcVersion.onOrAfter(Version.fromString(COHERE_EMBEDDINGS_ADDED)); + assumeTrue("Cohere embedding service added in " + COHERE_EMBEDDINGS_ADDED, embeddingsSupported); + + final String oldClusterIdInt8 = "old-cluster-embeddings-int8"; + final String oldClusterIdFloat = "old-cluster-embeddings-float"; + + // queue a response as PUT will call the service + cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); + put(oldClusterIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + // float model + cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); + put(oldClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); + assertEquals("cohere", configs.get(0).get("service")); + var serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("model_id", "embed-english-light-v3.0")); + var embeddingType = serviceSettings.get("embedding_type"); + // An upgraded node will report the embedding type as byte, an old node int8 + assertThat(embeddingType, Matchers.is(oneOf("int8", "byte"))); + + configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdFloat).get("endpoints"); + serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("embedding_type", "float")); + + assertEmbeddingInference(oldClusterIdInt8, CohereEmbeddingType.BYTE); + assertEmbeddingInference(oldClusterIdFloat, CohereEmbeddingType.FLOAT); + + { + final String upgradedClusterIdInt8 = "upgraded-cluster-embeddings-int8"; + + cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); + put(upgradedClusterIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdInt8).get("endpoints"); + serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("embedding_type", "byte")); // int8 rewritten to byte + + assertEmbeddingInference(upgradedClusterIdInt8, CohereEmbeddingType.INT8); + delete(upgradedClusterIdInt8); + } + { + final String upgradedClusterIdFloat = "upgraded-cluster-embeddings-float"; + cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); + put(upgradedClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + + configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdFloat).get("endpoints"); + serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("embedding_type", "float")); + + assertEmbeddingInference(upgradedClusterIdFloat, CohereEmbeddingType.FLOAT); + delete(upgradedClusterIdFloat); + } + + delete(oldClusterIdFloat); + delete(oldClusterIdInt8); + + } + + void assertEmbeddingInference(String inferenceId, CohereEmbeddingType type) throws IOException { + switch (type) { + case INT8: + case BYTE: + cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); + break; + case FLOAT: + cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); + } + + var inferenceMap = inference(inferenceId, TaskType.TEXT_EMBEDDING, "some text"); + assertThat(inferenceMap.entrySet(), not(empty())); + } + + @SuppressWarnings("unchecked") + public void testRerank() throws IOException { + var rerankSupported = bwcVersion.onOrAfter(Version.fromString(COHERE_RERANK_ADDED)); + assumeTrue("Cohere rerank service added in " + COHERE_RERANK_ADDED, rerankSupported); + + final String oldClusterId = "old-cluster-rerank"; + + put(oldClusterId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); + assertRerank(oldClusterId); + + var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); + assertThat(configs, hasSize(1)); + assertEquals("cohere", configs.get(0).get("service")); + var serviceSettings = (Map) configs.get(0).get("service_settings"); + assertThat(serviceSettings, hasEntry("model_id", "rerank-english-v3.0")); + var taskSettings = (Map) configs.get(0).get("task_settings"); + assertThat(taskSettings, hasEntry("top_n", 3)); + + assertRerank(oldClusterId); + + } + + private void assertRerank(String inferenceId) throws IOException { + cohereRerankServer.enqueue(new MockResponse().setResponseCode(200).setBody(rerankResponse())); + var inferenceMap = rerank( + inferenceId, + List.of("luke", "like", "leia", "chewy", "r2d2", "star", "wars"), + "star wars main character" + ); + assertThat(inferenceMap.entrySet(), not(empty())); + } + + private String embeddingConfigByte(String url) { + return embeddingConfigTemplate(url, "byte"); + } + + private String embeddingConfigInt8(String url) { + return embeddingConfigTemplate(url, "int8"); + } + + private String embeddingConfigFloat(String url) { + return embeddingConfigTemplate(url, "float"); + } + + private String embeddingConfigTemplate(String url, String embeddingType) { + return Strings.format(""" + { + "service": "cohere", + "service_settings": { + "url": "%s", + "api_key": "XXXX", + "model_id": "embed-english-light-v3.0", + "embedding_type": "%s" + } + } + """, url, embeddingType); + } + + private String embeddingResponseByte() { + return """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": [ + [ + 12, + 56 + ] + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_bytes" + } + """; + } + + private String embeddingResponseFloat() { + return """ + { + "id": "3198467e-399f-4d4a-aa2c-58af93bd6dc4", + "texts": [ + "hello" + ], + "embeddings": [ + [ + -0.0018434525, + 0.01777649 + ] + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_floats" + } + """; + } + + private String rerankConfig(String url) { + return Strings.format(""" + { + "service": "cohere", + "service_settings": { + "api_key": "XXXX", + "model_id": "rerank-english-v3.0", + "url": "%s" + }, + "task_settings": { + "return_documents": false, + "top_n": 3 + } + } + """, url); + } + + private String rerankResponse() { + return """ + { + "index": "d0760819-5a73-4d58-b163-3956d3648b62", + "results": [ + { + "index": 2, + "relevance_score": 0.98005307 + }, + { + "index": 3, + "relevance_score": 0.27904198 + }, + { + "index": 0, + "relevance_score": 0.10194652 + } + ], + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "search_units": 1 + } + } + } + """; + } + +} diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java new file mode 100644 index 0000000000000..e2de60d7393dd --- /dev/null +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.qa.mixed; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecIT.bwcVersion; +import static org.hamcrest.Matchers.*; + +public class HuggingFaceServiceMixedIT extends BaseMixedIT { + + private static final String HF_EMBEDDINGS_ADDED = "8.12.0"; + private static final String HF_ELSER_ADDED = "8.12.0"; + + private static MockWebServer embeddingsServer; + private static MockWebServer elserServer; + + @BeforeClass + public static void startWebServer() throws IOException { + embeddingsServer = new MockWebServer(); + embeddingsServer.start(); + + elserServer = new MockWebServer(); + elserServer.start(); + } + + @AfterClass + public static void shutdown() { + embeddingsServer.close(); + elserServer.close(); + } + + @SuppressWarnings("unchecked") + public void testHFEmbeddings() throws IOException { + var embeddingsSupported = bwcVersion.onOrAfter(Version.fromString(HF_EMBEDDINGS_ADDED)); + assumeTrue("Hugging Face embedding service added in " + HF_EMBEDDINGS_ADDED, embeddingsSupported); + + final String oldClusterId = "old-cluster-embeddings"; + + embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); + put(oldClusterId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); + assertThat(configs, hasSize(1)); + assertEquals("hugging_face", configs.get(0).get("service")); + assertEmbeddingInference(oldClusterId); + } + + void assertEmbeddingInference(String inferenceId) throws IOException { + embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); + var inferenceMap = inference(inferenceId, TaskType.TEXT_EMBEDDING, "some text"); + assertThat(inferenceMap.entrySet(), not(empty())); + } + + @SuppressWarnings("unchecked") + public void testElser() throws IOException { + var supported = bwcVersion.onOrAfter(Version.fromString(HF_ELSER_ADDED)); + assumeTrue("HF elser service added in " + HF_ELSER_ADDED, supported); + + final String oldClusterId = "old-cluster-elser"; + final String upgradedClusterId = "upgraded-cluster-elser"; + + put(oldClusterId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); + + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); + assertThat(configs, hasSize(1)); + assertEquals("hugging_face", configs.get(0).get("service")); + assertElser(oldClusterId); + } + + private void assertElser(String inferenceId) throws IOException { + elserServer.enqueue(new MockResponse().setResponseCode(200).setBody(elserResponse())); + var inferenceMap = inference(inferenceId, TaskType.SPARSE_EMBEDDING, "some text"); + assertThat(inferenceMap.entrySet(), not(empty())); + } + + private String embeddingConfig(String url) { + return Strings.format(""" + { + "service": "hugging_face", + "service_settings": { + "url": "%s", + "api_key": "XXXX" + } + } + """, url); + } + + private String embeddingResponse() { + return """ + [ + [ + 0.014539449, + -0.015288644 + ] + ] + """; + } + + private String elserConfig(String url) { + return Strings.format(""" + { + "service": "hugging_face", + "service_settings": { + "api_key": "XXXX", + "url": "%s" + } + } + """, url); + } + + private String elserResponse() { + return """ + [ + { + ".": 0.133155956864357, + "the": 0.6747211217880249 + } + ] + """; + } + +} From 95b4c6202e3f6bac287dcdbbf7a11b7a0c2a38c4 Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Tue, 7 May 2024 16:31:40 -0400 Subject: [PATCH 04/15] clean up --- .../inference/qa/mixed/EsqlClientYamlIT.java | 119 ------------------ 1 file changed, 119 deletions(-) delete mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/EsqlClientYamlIT.java diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/EsqlClientYamlIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/EsqlClientYamlIT.java deleted file mode 100644 index dcbf64335cc82..0000000000000 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/EsqlClientYamlIT.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.qa.mixed; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.apache.http.HttpHost; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ClientYamlTestClient; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.ImpersonateOfficialClientTestClient; -import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; -import org.elasticsearch.test.rest.yaml.section.ApiCallSection; -import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; -import org.elasticsearch.test.rest.yaml.section.DoSection; -import org.elasticsearch.test.rest.yaml.section.ExecutableSection; -import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.Function; - -public class EsqlClientYamlIT extends ESClientYamlSuiteTestCase { - @ClassRule - public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - if (EsqlSpecTestCase.availableVersions().isEmpty()) { - return updateEsqlQueryDoSections(createParameters(), EsqlClientYamlIT::stripVersion); - } - return createParameters(); - } - - @Before - @After - public void assertRequestBreakerEmpty() throws Exception { - EsqlSpecTestCase.assertRequestBreakerEmpty(); - } - - @Override - protected ClientYamlTestClient initClientYamlTestClient( - final ClientYamlSuiteRestSpec restSpec, - final RestClient restClient, - final List hosts - ) { - if (EsqlSpecTestCase.availableVersions().isEmpty()) { - return new ImpersonateOfficialClientTestClient(restSpec, restClient, hosts, this::getClientBuilderWithSniffedHosts, "es=8.13"); - } - return super.initClientYamlTestClient(restSpec, restClient, hosts); - } - - static DoSection stripVersion(DoSection doSection) { - ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi(doSection.getApiCallSection().getApi()); - for (Map body : copy.getBodies()) { - body.remove("version"); - } - doSection.setApiCallSection(copy); - return doSection; - } - - // TODO: refactor, copied from single-node's AbstractEsqlClientYamlIt - public static Iterable updateEsqlQueryDoSections(Iterable parameters, Function modify) - throws Exception { - List result = new ArrayList<>(); - for (Object[] orig : parameters) { - assert orig.length == 1; - ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0]; - try { - ClientYamlTestSection modified = new ClientYamlTestSection( - candidate.getTestSection().getLocation(), - candidate.getTestSection().getName(), - candidate.getTestSection().getPrerequisiteSection(), - candidate.getTestSection().getExecutableSections().stream().map(e -> modifyExecutableSection(e, modify)).toList() - ); - result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("error modifying " + candidate + ": " + e.getMessage(), e); - } - } - return result; - } - - // TODO: refactor, copied from single-node's AbstractEsqlClientYamlIt - private static ExecutableSection modifyExecutableSection(ExecutableSection e, Function modify) { - if (false == (e instanceof DoSection)) { - return e; - } - DoSection doSection = (DoSection) e; - String api = doSection.getApiCallSection().getApi(); - return switch (api) { - case "esql.query" -> modify.apply(doSection); - // case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( - // "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." - // ); - default -> e; - }; - } -} From 1e586fe481edb5536b828eb66a8dac36260eda41 Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Wed, 8 May 2024 15:49:17 -0400 Subject: [PATCH 05/15] review improvements --- .../internal/RestrictedBuildApiService.java | 1 - x-pack/plugin/inference/build.gradle | 1 - .../inference/qa/mixed-cluster/build.gradle | 9 --- .../qa/mixed/AzureOpenAiServiceMixedIT.java | 2 - .../xpack/inference/qa/mixed/BaseMixedIT.java | 57 ++++++++++--------- .../xpack/inference/qa/mixed/Clusters.java | 2 - 6 files changed, 31 insertions(+), 41 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index 9d9f76c9ba185..23afcab7bec7c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -128,7 +128,6 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ilm:qa:multi-cluster"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ilm:qa:multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ilm:qa:rest"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:inference:qa:mixed-cluster"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:basic-multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:disabled"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:ml-with-security"); diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 804ac2d591d0c..0aef8601ffcc6 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -27,7 +27,6 @@ base { dependencies { implementation project(path: ':libs:elasticsearch-logging') - testImplementation project(path: ':test:framework') compileOnly project(":server") compileOnly project(path: xpackModule('core')) testImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle index 216b147b94ea9..1d5369468b054 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle @@ -8,15 +8,6 @@ apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-test-artifact-base' apply plugin: 'elasticsearch.bwc-test' -restResources { - restApi { - include '_common', 'bulk', 'get', 'indices', 'inference', 'xpack', 'cluster' - } - restTests { - includeXpack 'inference' - } -} - dependencies { testImplementation project(path: ':x-pack:plugin:inference:qa:inference-service-tests') compileOnly project(':x-pack:plugin:core') diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java index bcf1ede6b2d87..1ee1c63c11308 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java @@ -13,8 +13,6 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java index 2b5b9f3d1066a..3aa7457e16e0d 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java @@ -15,10 +15,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.test.rest.ESRestTestCase; +import org.hamcrest.Matchers; import org.junit.ClassRule; import java.io.IOException; @@ -28,9 +30,8 @@ import static org.elasticsearch.core.Strings.format; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -public class BaseMixedIT extends ESRestTestCase { +public abstract class BaseMixedIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) @@ -46,48 +47,48 @@ protected String getTestRestCluster() { } protected static String getUrl(MockWebServer webServer) { - return format("http://%s:%s", webServer.getHostName(), webServer.getPort()); + return Strings.format("http://%s:%s", webServer.getHostName(), webServer.getPort()); } @Override protected Settings restClientSettings() { - String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + String token = ESRestTestCase.basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } protected void delete(String inferenceId, TaskType taskType) throws IOException { var request = new Request("DELETE", Strings.format("_inference/%s/%s", taskType, inferenceId)); - var response = client().performRequest(request); - assertOK(response); + var response = ESRestTestCase.client().performRequest(request); + ESRestTestCase.assertOK(response); } protected void delete(String inferenceId) throws IOException { var request = new Request("DELETE", Strings.format("_inference/%s", inferenceId)); - var response = client().performRequest(request); - assertOK(response); + var response = ESRestTestCase.client().performRequest(request); + ESRestTestCase.assertOK(response); } protected Map getAll() throws IOException { var request = new Request("GET", "_inference/_all"); - var response = client().performRequest(request); - assertOK(response); - return entityAsMap(response); + var response = ESRestTestCase.client().performRequest(request); + ESRestTestCase.assertOK(response); + return ESRestTestCase.entityAsMap(response); } protected Map get(String inferenceId) throws IOException { var endpoint = Strings.format("_inference/%s", inferenceId); var request = new Request("GET", endpoint); - var response = client().performRequest(request); - assertOK(response); - return entityAsMap(response); + var response = ESRestTestCase.client().performRequest(request); + ESRestTestCase.assertOK(response); + return ESRestTestCase.entityAsMap(response); } protected Map get(TaskType taskType, String inferenceId) throws IOException { var endpoint = Strings.format("_inference/%s/%s", taskType, inferenceId); var request = new Request("GET", endpoint); - var response = client().performRequest(request); - assertOK(response); - return entityAsMap(response); + var response = ESRestTestCase.client().performRequest(request); + ESRestTestCase.assertOK(response); + return ESRestTestCase.entityAsMap(response); } protected Map inference(String inferenceId, TaskType taskType, String input) throws IOException { @@ -95,9 +96,9 @@ protected Map inference(String inferenceId, TaskType taskType, S var request = new Request("POST", endpoint); request.setJsonEntity("{\"input\": [" + '"' + input + '"' + "]}"); - var response = client().performRequest(request); - assertOK(response); - return entityAsMap(response); + var response = ESRestTestCase.client().performRequest(request); + ESRestTestCase.assertOK(response); + return ESRestTestCase.entityAsMap(response); } protected Map rerank(String inferenceId, List inputs, String query) throws IOException { @@ -116,17 +117,17 @@ protected Map rerank(String inferenceId, List inputs, St body.append("]}"); request.setJsonEntity(body.toString()); - var response = client().performRequest(request); - assertOK(response); - return entityAsMap(response); + var response = ESRestTestCase.client().performRequest(request); + ESRestTestCase.assertOK(response); + return ESRestTestCase.entityAsMap(response); } protected void put(String inferenceId, String modelConfig, TaskType taskType) throws IOException { String endpoint = Strings.format("_inference/%s/%s?error_trace", taskType, inferenceId); var request = new Request("PUT", endpoint); request.setJsonEntity(modelConfig); - var response = client().performRequest(request); - assertOKAndConsume(response); + var response = ESRestTestCase.client().performRequest(request); + ESRestTestCase.assertOKAndConsume(response); logger.warn("PUT response: {}", response.toString()); } @@ -139,6 +140,10 @@ protected static void assertOkOrCreated(Response response) throws IOException { } String responseStr = EntityUtils.toString(response.getEntity()); - assertThat(responseStr, response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); + ESTestCase.assertThat( + responseStr, + response.getStatusLine().getStatusCode(), + Matchers.anyOf(Matchers.equalTo(200), Matchers.equalTo(201)) + ); } } diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java index 13e9ea164103f..870d64d7603e5 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java @@ -18,8 +18,6 @@ public static ElasticsearchCluster mixedVersionCluster() { .distribution(DistributionType.DEFAULT) .withNode(node -> node.version(oldVersion)) .withNode(node -> node.version(Version.CURRENT)) - .withNode(node -> node.version(oldVersion)) - .withNode(node -> node.version(Version.CURRENT)) .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") .setting("cluster.routing.rebalance.enable", "none") // disable relocation until we have retry in ESQL From c6759aa59e04da79c62e6d82a5c053fa4a30f55c Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Wed, 8 May 2024 15:51:05 -0400 Subject: [PATCH 06/15] spotless --- .../elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java index 3aa7457e16e0d..6b1b5d38b4ced 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java @@ -27,10 +27,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.core.Strings.format; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.equalTo; - public abstract class BaseMixedIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() From 2e9d555d73ab410e56e9e8208fd48ad1f989cfc5 Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Wed, 8 May 2024 16:28:49 -0400 Subject: [PATCH 07/15] remove blocked AzureOpenAI mixed IT --- .../qa/mixed/AzureOpenAiServiceMixedIT.java | 103 ------------------ 1 file changed, 103 deletions(-) delete mode 100644 x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java deleted file mode 100644 index 1ee1c63c11308..0000000000000 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/AzureOpenAiServiceMixedIT.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.qa.mixed; - -import org.apache.lucene.tests.util.LuceneTestCase; -import org.elasticsearch.Version; -import org.elasticsearch.common.Strings; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.http.MockResponse; -import org.elasticsearch.test.http.MockWebServer; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecIT.bwcVersion; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.not; - -public class AzureOpenAiServiceMixedIT extends BaseMixedIT { - - private static final String OPEN_AI_AZURE_EMBEDDINGS_ADDED = "8.14.0"; - - private static MockWebServer openAiEmbeddingsServer; - - // @BeforeClass test mute - public static void startWebServer() throws IOException { - openAiEmbeddingsServer = new MockWebServer(); - openAiEmbeddingsServer.start(); - } - - // @AfterClass test mute - public static void shutdown() { - openAiEmbeddingsServer.close(); - } - - @SuppressWarnings("unchecked") - @LuceneTestCase.AwaitsFix(bugUrl = "AzureOpenAI doesn't support webserver URL in tests") - public void testOpenAiEmbeddings() throws IOException { - var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_AZURE_EMBEDDINGS_ADDED)); - assumeTrue("Azure OpenAI embedding service added in " + OPEN_AI_AZURE_EMBEDDINGS_ADDED, openAiEmbeddingsSupported); - - final String oldClusterId = "old-cluster-embeddings"; - - openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); - put(oldClusterId, embeddingConfig(getUrl(openAiEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); - assertThat(configs, hasSize(1)); - assertEquals("azureopenai", configs.get(0).get("service")); - assertEmbeddingInference(oldClusterId); - } - - void assertEmbeddingInference(String inferenceId) throws IOException { - openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); - var inferenceMap = inference(inferenceId, TaskType.TEXT_EMBEDDING, "some text"); - assertThat(inferenceMap.entrySet(), not(empty())); - } - - private String embeddingConfig(String url) { - return Strings.format(""" - { - "service": "azureopenai", - "service_settings": { - "api_key": "XXXX", - "url": "%s", - "resource_name": "resource_name", - "deployment_id": "deployment_id", - "api_version": "2024-02-01" - } - } - """, url); - } - - static String embeddingResponse() { - return """ - { - "object": "list", - "data": [ - { - "object": "embedding", - "index": 0, - "embedding": [ - 0.0123, - -0.0123 - ] - } - ], - "model": "text-embedding-ada-002", - "usage": { - "prompt_tokens": 8, - "total_tokens": 8 - } - } - """; - } -} From 5fda71ba1c14a5a6966ac6a2986ce309e24fc140 Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Thu, 9 May 2024 09:51:45 -0400 Subject: [PATCH 08/15] improvements from DK review --- .../xpack/inference/qa/mixed/Clusters.java | 1 - .../qa/mixed/CohereServiceMixedIT.java | 56 +++++-------------- .../qa/mixed/HuggingFaceServiceMixedIT.java | 16 +++--- .../qa/mixed/OpenAIServiceMixedIT.java | 16 +++--- .../application/OpenAiServiceUpgradeIT.java | 1 - 5 files changed, 31 insertions(+), 59 deletions(-) diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java index 870d64d7603e5..d7c0a73c9de4e 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java @@ -20,7 +20,6 @@ public static ElasticsearchCluster mixedVersionCluster() { .withNode(node -> node.version(Version.CURRENT)) .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") - .setting("cluster.routing.rebalance.enable", "none") // disable relocation until we have retry in ESQL .build(); } } diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java index c157c8135ad7c..5412339586b51 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java @@ -57,17 +57,17 @@ public void testCohereEmbeddings() throws IOException { var embeddingsSupported = bwcVersion.onOrAfter(Version.fromString(COHERE_EMBEDDINGS_ADDED)); assumeTrue("Cohere embedding service added in " + COHERE_EMBEDDINGS_ADDED, embeddingsSupported); - final String oldClusterIdInt8 = "old-cluster-embeddings-int8"; - final String oldClusterIdFloat = "old-cluster-embeddings-float"; + final String inferenceIdInt8 = "mixed-cluster-cohere-embeddings-int8"; + final String inferenceIdFloat = "mixed-cluster-cohere-embeddings-float"; // queue a response as PUT will call the service cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); - put(oldClusterIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + put(inferenceIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); // float model cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); - put(oldClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + put(inferenceIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdInt8).get("endpoints"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, inferenceIdInt8).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("model_id", "embed-english-light-v3.0")); @@ -75,41 +75,15 @@ public void testCohereEmbeddings() throws IOException { // An upgraded node will report the embedding type as byte, an old node int8 assertThat(embeddingType, Matchers.is(oneOf("int8", "byte"))); - configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterIdFloat).get("endpoints"); + configs = (List>) get(TaskType.TEXT_EMBEDDING, inferenceIdFloat).get("endpoints"); serviceSettings = (Map) configs.get(0).get("service_settings"); assertThat(serviceSettings, hasEntry("embedding_type", "float")); - assertEmbeddingInference(oldClusterIdInt8, CohereEmbeddingType.BYTE); - assertEmbeddingInference(oldClusterIdFloat, CohereEmbeddingType.FLOAT); + assertEmbeddingInference(inferenceIdInt8, CohereEmbeddingType.BYTE); + assertEmbeddingInference(inferenceIdFloat, CohereEmbeddingType.FLOAT); - { - final String upgradedClusterIdInt8 = "upgraded-cluster-embeddings-int8"; - - cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); - put(upgradedClusterIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdInt8).get("endpoints"); - serviceSettings = (Map) configs.get(0).get("service_settings"); - assertThat(serviceSettings, hasEntry("embedding_type", "byte")); // int8 rewritten to byte - - assertEmbeddingInference(upgradedClusterIdInt8, CohereEmbeddingType.INT8); - delete(upgradedClusterIdInt8); - } - { - final String upgradedClusterIdFloat = "upgraded-cluster-embeddings-float"; - cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); - put(upgradedClusterIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - - configs = (List>) get(TaskType.TEXT_EMBEDDING, upgradedClusterIdFloat).get("endpoints"); - serviceSettings = (Map) configs.get(0).get("service_settings"); - assertThat(serviceSettings, hasEntry("embedding_type", "float")); - - assertEmbeddingInference(upgradedClusterIdFloat, CohereEmbeddingType.FLOAT); - delete(upgradedClusterIdFloat); - } - - delete(oldClusterIdFloat); - delete(oldClusterIdInt8); + delete(inferenceIdFloat); + delete(inferenceIdInt8); } @@ -132,12 +106,12 @@ public void testRerank() throws IOException { var rerankSupported = bwcVersion.onOrAfter(Version.fromString(COHERE_RERANK_ADDED)); assumeTrue("Cohere rerank service added in " + COHERE_RERANK_ADDED, rerankSupported); - final String oldClusterId = "old-cluster-rerank"; + final String inferenceId = "mixed-cluster-rerank"; - put(oldClusterId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); - assertRerank(oldClusterId); + put(inferenceId, rerankConfig(getUrl(cohereRerankServer)), TaskType.RERANK); + assertRerank(inferenceId); - var configs = (List>) get(TaskType.RERANK, oldClusterId).get("endpoints"); + var configs = (List>) get(TaskType.RERANK, inferenceId).get("endpoints"); assertThat(configs, hasSize(1)); assertEquals("cohere", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); @@ -145,7 +119,7 @@ public void testRerank() throws IOException { var taskSettings = (Map) configs.get(0).get("task_settings"); assertThat(taskSettings, hasEntry("top_n", 3)); - assertRerank(oldClusterId); + assertRerank(inferenceId); } diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java index e2de60d7393dd..9eed1b8a7fcd3 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java @@ -50,14 +50,14 @@ public void testHFEmbeddings() throws IOException { var embeddingsSupported = bwcVersion.onOrAfter(Version.fromString(HF_EMBEDDINGS_ADDED)); assumeTrue("Hugging Face embedding service added in " + HF_EMBEDDINGS_ADDED, embeddingsSupported); - final String oldClusterId = "old-cluster-embeddings"; + final String inferenceId = "mixed-cluster-embeddings"; embeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); - put(oldClusterId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); + put(inferenceId, embeddingConfig(getUrl(embeddingsServer)), TaskType.TEXT_EMBEDDING); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, inferenceId).get("endpoints"); assertThat(configs, hasSize(1)); assertEquals("hugging_face", configs.get(0).get("service")); - assertEmbeddingInference(oldClusterId); + assertEmbeddingInference(inferenceId); } void assertEmbeddingInference(String inferenceId) throws IOException { @@ -71,15 +71,15 @@ public void testElser() throws IOException { var supported = bwcVersion.onOrAfter(Version.fromString(HF_ELSER_ADDED)); assumeTrue("HF elser service added in " + HF_ELSER_ADDED, supported); - final String oldClusterId = "old-cluster-elser"; + final String inferenceId = "mixed-cluster-elser"; final String upgradedClusterId = "upgraded-cluster-elser"; - put(oldClusterId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); + put(inferenceId, elserConfig(getUrl(elserServer)), TaskType.SPARSE_EMBEDDING); - var configs = (List>) get(TaskType.SPARSE_EMBEDDING, oldClusterId).get("endpoints"); + var configs = (List>) get(TaskType.SPARSE_EMBEDDING, inferenceId).get("endpoints"); assertThat(configs, hasSize(1)); assertEquals("hugging_face", configs.get(0).get("service")); - assertElser(oldClusterId); + assertElser(inferenceId); } private void assertElser(String inferenceId) throws IOException { diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java index 013a2bf0d4784..edf0b97f40c93 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java @@ -54,14 +54,14 @@ public void testOpenAiEmbeddings() throws IOException { var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_EMBEDDINGS_ADDED)); assumeTrue("OpenAI embedding service added in " + OPEN_AI_EMBEDDINGS_ADDED, openAiEmbeddingsSupported); - final String oldClusterId = "old-cluster-embeddings"; + final String inferenceId = "mixed-cluster-embeddings"; String inferenceConfig = oldClusterVersionCompatibleEmbeddingConfig(); // queue a response as PUT will call the service openAiEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponse())); - put(oldClusterId, inferenceConfig, TaskType.TEXT_EMBEDDING); + put(inferenceId, inferenceConfig, TaskType.TEXT_EMBEDDING); - var configs = (List>) get(TaskType.TEXT_EMBEDDING, oldClusterId).get("endpoints"); + var configs = (List>) get(TaskType.TEXT_EMBEDDING, inferenceId).get("endpoints"); assertThat(configs, hasSize(1)); assertEquals("openai", configs.get(0).get("service")); var serviceSettings = (Map) configs.get(0).get("service_settings"); @@ -69,7 +69,7 @@ public void testOpenAiEmbeddings() throws IOException { var modelIdFound = serviceSettings.containsKey("model_id") || taskSettings.containsKey("model_id"); assertTrue("model_id not found in config: " + configs.toString(), modelIdFound); - assertEmbeddingInference(oldClusterId); + assertEmbeddingInference(inferenceId); } void assertEmbeddingInference(String inferenceId) throws IOException { @@ -83,12 +83,12 @@ public void testOpenAiCompletions() throws IOException { var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_EMBEDDINGS_ADDED)); assumeTrue("OpenAI completions service added in " + OPEN_AI_COMPLETIONS_ADDED, openAiEmbeddingsSupported); - final String oldClusterId = "old-cluster-completions"; + final String inferenceId = "mixed-cluster-completions"; final String upgradedClusterId = "upgraded-cluster-completions"; - put(oldClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); + put(inferenceId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); - var configsMap = get(TaskType.COMPLETION, oldClusterId); + var configsMap = get(TaskType.COMPLETION, inferenceId); logger.warn("Configs: {}", configsMap); var configs = (List>) configsMap.get("endpoints"); assertThat(configs, hasSize(1)); @@ -98,7 +98,7 @@ public void testOpenAiCompletions() throws IOException { var taskSettings = (Map) configs.get(0).get("task_settings"); assertThat(taskSettings.keySet(), empty()); - assertCompletionInference(oldClusterId); + assertCompletionInference(inferenceId); } void assertCompletionInference(String inferenceId) throws IOException { diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java index 82bba94cc2607..4e8e1c845b070 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/OpenAiServiceUpgradeIT.java @@ -120,7 +120,6 @@ public void testOpenAiCompletions() throws IOException { final String upgradedClusterId = "upgraded-cluster-completions"; if (isOldCluster()) { - // TODO why is put only in old cluster? put(oldClusterId, chatCompletionsConfig(getUrl(openAiChatCompletionsServer)), TaskType.COMPLETION); var configs = (List>) get(TaskType.COMPLETION, oldClusterId).get("models"); From fca46fd2b6253accc010a2e2a8bf05edfff5ea9b Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Fri, 10 May 2024 16:59:42 -0400 Subject: [PATCH 09/15] temp for testing --- build-tools-internal/version.properties | 87 +++++++++---------- .../main/java/org/elasticsearch/Version.java | 3 +- ...aseMixedIT.java => BaseMixedTestCase.java} | 22 +---- .../qa/mixed/CohereServiceMixedIT.java | 5 +- .../qa/mixed/HuggingFaceServiceMixedIT.java | 7 +- ...cIT.java => MixedClusterSpecTestCase.java} | 3 +- .../qa/mixed/OpenAIServiceMixedIT.java | 4 +- x-pack/plugin/ml/build.gradle | 12 +-- 8 files changed, 61 insertions(+), 82 deletions(-) rename x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/{BaseMixedIT.java => BaseMixedTestCase.java} (88%) rename x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/{MixedClusterSpecIT.java => MixedClusterSpecTestCase.java} (92%) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 044f6c07c756e..4b085cacb44d4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,60 +1,51 @@ -elasticsearch = 8.15.0 -lucene = 9.10.0 - -bundled_jdk_vendor = openjdk -bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac +elasticsearch=8.16.0 +lucene=9.10.0 +bundled_jdk_vendor=openjdk +bundled_jdk=21.0.2+13@f2283984656d49d69e91c558476027ac # optional dependencies -spatial4j = 0.7 -jts = 1.15.0 -jackson = 2.15.0 -snakeyaml = 2.0 -icu4j = 68.2 -supercsv = 2.4.0 -log4j = 2.19.0 -slf4j = 2.0.6 -ecsLogging = 1.2.0 -jna = 5.12.1 -netty = 4.1.109.Final -commons_lang3 = 3.9 -google_oauth_client = 1.34.1 - -antlr4 = 4.13.1 +spatial4j=0.7 +jts=1.15.0 +jackson=2.15.0 +snakeyaml=2.0 +icu4j=68.2 +supercsv=2.4.0 +log4j=2.19.0 +slf4j=2.0.6 +ecsLogging=1.2.0 +jna=5.12.1 +netty=4.1.109.Final +commons_lang3=3.9 +google_oauth_client=1.34.1 +antlr4=4.13.1 # bouncy castle version for non-fips. fips jars use a different version bouncycastle=1.78.1 # used by security and idp (need to be in sync due to cross-dependency in testing) -opensaml = 4.3.0 - +opensaml=4.3.0 # client dependencies -httpclient = 4.5.14 -httpcore = 4.4.13 -httpasyncclient = 4.1.5 -commonslogging = 1.2 -commonscodec = 1.15 -protobuf = 3.21.9 - +httpclient=4.5.14 +httpcore=4.4.13 +httpasyncclient=4.1.5 +commonslogging=1.2 +commonscodec=1.15 +protobuf=3.21.9 # test dependencies -randomizedrunner = 2.8.0 -junit = 4.13.2 -junit5 = 5.7.1 -hamcrest = 2.1 -mocksocket = 1.2 - +randomizedrunner=2.8.0 +junit=4.13.2 +junit5=5.7.1 +hamcrest=2.1 +mocksocket=1.2 # test container dependencies -testcontainer = 1.19.2 -dockerJava = 3.3.4 -ductTape = 1.0.8 -commonsCompress = 1.24.0 - +testcontainer=1.19.2 +dockerJava=3.3.4 +ductTape=1.0.8 +commonsCompress=1.24.0 # packer caching build logic -reflections = 0.10.2 - +reflections=0.10.2 # benchmark dependencies -jmh = 1.26 - +jmh=1.26 # test dependencies # when updating this version, also update :qa:evil-tests -jimfs = 1.3.0 -jimfs_guava = 32.1.1-jre - +jimfs=1.3.0 +jimfs_guava=32.1.1-jre # test framework -networknt_json_schema_validator = 1.0.48 +networknt_json_schema_validator=1.0.48 diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index a2e04d0bf3d48..9e79baa2b2a87 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -176,7 +176,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_4 = new Version(8_13_04_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version V_8_15_0 = new Version(8_15_00_99); - public static final Version CURRENT = V_8_15_0; + public static final Version V_8_16_0 = new Version(8_16_00_99); + public static final Version CURRENT = V_8_16_0; private static final NavigableMap VERSION_IDS; private static final Map VERSION_STRINGS; diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedTestCase.java similarity index 88% rename from x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java rename to x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedTestCase.java index 6b1b5d38b4ced..2c47578f466e3 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/BaseMixedTestCase.java @@ -16,32 +16,15 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.test.rest.ESRestTestCase; import org.hamcrest.Matchers; -import org.junit.ClassRule; import java.io.IOException; import java.util.List; import java.util.Map; -public abstract class BaseMixedIT extends ESRestTestCase { - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.license.self_generated.type", "trial") - .setting("xpack.security.enabled", "true") - .plugin("inference-service-test") - .user("x_pack_rest_user", "x-pack-test-password") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - +public abstract class BaseMixedTestCase extends MixedClusterSpecTestCase { protected static String getUrl(MockWebServer webServer) { return Strings.format("http://%s:%s", webServer.getHostName(), webServer.getPort()); } @@ -123,8 +106,9 @@ protected void put(String inferenceId, String modelConfig, TaskType taskType) th var request = new Request("PUT", endpoint); request.setJsonEntity(modelConfig); var response = ESRestTestCase.client().performRequest(request); - ESRestTestCase.assertOKAndConsume(response); logger.warn("PUT response: {}", response.toString()); + System.out.println("PUT response: " + response.toString()); + ESRestTestCase.assertOKAndConsume(response); } protected static void assertOkOrCreated(Response response) throws IOException { diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java index 5412339586b51..400f96da02b74 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java @@ -21,14 +21,14 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecIT.bwcVersion; +import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecTestCase.bwcVersion; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.oneOf; -public class CohereServiceMixedIT extends BaseMixedIT { +public class CohereServiceMixedIT extends BaseMixedTestCase { private static final String COHERE_EMBEDDINGS_ADDED = "8.13.0"; private static final String COHERE_RERANK_ADDED = "8.14.0"; @@ -63,6 +63,7 @@ public void testCohereEmbeddings() throws IOException { // queue a response as PUT will call the service cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); put(inferenceIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + // float model cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); put(inferenceIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java index 9eed1b8a7fcd3..ce77481eea8ba 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java @@ -19,10 +19,11 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecIT.bwcVersion; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.not; -public class HuggingFaceServiceMixedIT extends BaseMixedIT { +public class HuggingFaceServiceMixedIT extends BaseMixedTestCase { private static final String HF_EMBEDDINGS_ADDED = "8.12.0"; private static final String HF_ELSER_ADDED = "8.12.0"; diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecTestCase.java similarity index 92% rename from x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecIT.java rename to x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecTestCase.java index f2dcf80d5390e..35b25eb0bf7ea 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecTestCase.java @@ -10,12 +10,13 @@ import org.elasticsearch.Version; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.TestFeatureService; import org.junit.AfterClass; import org.junit.Before; import org.junit.ClassRule; -public class MixedClusterSpecIT extends BaseMixedIT { +public abstract class MixedClusterSpecTestCase extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java index edf0b97f40c93..9f4fd136c33ae 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java @@ -19,13 +19,13 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecIT.bwcVersion; +import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecTestCase.bwcVersion; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; -public class OpenAIServiceMixedIT extends BaseMixedIT { +public class OpenAIServiceMixedIT extends BaseMixedTestCase { private static final String OPEN_AI_EMBEDDINGS_ADDED = "8.12.0"; private static final String OPEN_AI_EMBEDDINGS_MODEL_SETTING_MOVED = "8.13.0"; diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 26f5ea053771c..bfb298380b215 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -68,9 +68,9 @@ esplugin.bundleSpec.from { esplugin.bundleSpec.exclude 'platform/licenses/**' ["bundlePlugin", "explodedBundlePlugin"].each { bundleTaskName -> - tasks.named(bundleTaskName).configure { - dependsOn configurations.nativeBundle - } + tasks.named(bundleTaskName).configure { + dependsOn configurations.nativeBundle + } } dependencies { @@ -100,10 +100,10 @@ dependencies { api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" api "org.apache.lucene:lucene-analysis-kuromoji:${versions.lucene}" implementation 'org.ojalgo:ojalgo:51.2.0' - nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:deps@zip") { + nativeBundle("org.elasticsearch.ml:ml-cpp:8.15.0-SNAPSHOT:deps@zip") { changing = true } - nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:nodeps@zip") { + nativeBundle("org.elasticsearch.ml:ml-cpp:8.15.0-SNAPSHOT:nodeps@zip") { changing = true } testImplementation 'org.ini4j:ini4j:0.5.2' @@ -136,7 +136,7 @@ tasks.named('generateNotice').configure { inputs.dir("${project.buildDir}/extractedNativeLicenses/platform/licenses") .withPropertyName('licensingDir') .withPathSensitivity(PathSensitivity.RELATIVE) - licenseDirs.add(tasks.named("extractNativeLicenses").map {new File(it.destinationDir, "platform/licenses") }) + licenseDirs.add(tasks.named("extractNativeLicenses").map { new File(it.destinationDir, "platform/licenses") }) } tasks.named("dependencyLicenses").configure { From 57cfdc941962203af307dc0dac4235d198d71feb Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Mon, 13 May 2024 12:50:44 -0400 Subject: [PATCH 10/15] refactoring and documentation --- TESTING.asciidoc | 419 ++++++++---------- .../qa/mixed/MixedClusterSpecTestCase.java | 2 +- .../{Clusters.java => MixedClustersSpec.java} | 2 +- 3 files changed, 176 insertions(+), 247 deletions(-) rename x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/{Clusters.java => MixedClustersSpec.java} (96%) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 96f94755a2758..608901fba75c8 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -2,11 +2,9 @@ = Testing [partintro] - -Elasticsearch uses JUnit for testing. It also generated random inputs into -tests, either using a random seed, or one that is set via a system -property. The following is a cheatsheet of options for running the -Elasticsearch tests. +Elasticsearch uses JUnit for testing. +It also generated random inputs into tests, either using a random seed, or one that is set via a system property. +The following is a cheatsheet of options for running the Elasticsearch tests. == Creating packages @@ -16,8 +14,7 @@ To build a distribution for your local OS and print its output location upon com ./gradlew localDistro ----------------------------- -To create a platform-specific build, use the following depending on your -operating system: +To create a platform-specific build, use the following depending on your operating system: ----------------------------- ./gradlew :distribution:archives:linux-tar:assemble @@ -31,13 +28,11 @@ You can build a Docker image with: ./gradlew build(Aarch64)DockerImage ----------------------------- -Note: you almost certainly don't want to run `./gradlew assemble` as this -will attempt build every single Elasticsearch distribution. +Note: you almost certainly don't want to run `./gradlew assemble` as this will attempt build every single Elasticsearch distribution. === Running Elasticsearch from a checkout -In order to run Elasticsearch from source without building a package, you can -run it using Gradle: +In order to run Elasticsearch from source without building a package, you can run it using Gradle: ------------------------------------- ./gradlew run @@ -45,45 +40,42 @@ run it using Gradle: ==== Launching and debugging from an IDE -If you want to run and debug Elasticsearch from your IDE, the `./gradlew run` task -supports a remote debugging option. Run the following from your terminal: +If you want to run and debug Elasticsearch from your IDE, the `./gradlew run` task supports a remote debugging option. +Run the following from your terminal: --------------------------------------------------------------------------- ./gradlew run --debug-jvm --------------------------------------------------------------------------- -Next start the "Debug Elasticsearch" run configuration in IntelliJ. This will enable the IDE to connect to the process and allow debug functionality. - +Next start the "Debug Elasticsearch" run configuration in IntelliJ. +This will enable the IDE to connect to the process and allow debug functionality. As such the IDE needs to be instructed to listen for connections on the debug port. -Since we might run multiple JVMs as part of configuring and starting the cluster it's -recommended to configure the IDE to initiate multiple listening attempts. In case of IntelliJ, this option -is called "Auto restart" and needs to be checked. +Since we might run multiple JVMs as part of configuring and starting the cluster it's recommended to configure the IDE to initiate multiple listening attempts. +In case of IntelliJ, this option is called "Auto restart" and needs to be checked. NOTE: If you have imported the project into IntelliJ according to the instructions in -link:/CONTRIBUTING.md#importing-the-project-into-intellij-idea[CONTRIBUTING.md] then a debug run configuration -named "Debug Elasticsearch" will be created for you and configured appropriately. +link:/CONTRIBUTING.md#importing-the-project-into-intellij-idea[CONTRIBUTING.md] then a debug run configuration named "Debug Elasticsearch" will be created for you and configured appropriately. ===== Debugging the CLI launcher -The gradle task does not start the Elasticsearch server process directly; like in the Elasticsearch distribution, -the job of starting the server process is delegated to a launcher CLI tool. If you need to debug the launcher itself, -add the following option to the `run` task: +The gradle task does not start the Elasticsearch server process directly; like in the Elasticsearch distribution, the job of starting the server process is delegated to a launcher CLI tool. +If you need to debug the launcher itself, add the following option to the `run` task: + --------------------------------------------------------------------------- ./gradlew run --debug-cli-jvm --------------------------------------------------------------------------- -This option can be specified in isolation or combined with `--debug-jvm`. Since the CLI launcher lifespan may overlap -with the server process lifespan, the CLI launcher process will be started on a different port (5107 for the first node, -5108 and following for additional cluster nodes). + +This option can be specified in isolation or combined with `--debug-jvm`. +Since the CLI launcher lifespan may overlap with the server process lifespan, the CLI launcher process will be started on a different port (5107 for the first node, 5108 and following for additional cluster nodes). As with the `--debug-jvm` command, the IDE needs to be instructed to listen for connections on the debug port. -You need to configure and start an appropriate Remote JVM Debug configuration, e.g. by cloning and editing -the "Debug Elasticsearch" run configuration to point to the correct debug port. +You need to configure and start an appropriate Remote JVM Debug configuration, e.g. by cloning and editing the "Debug Elasticsearch" run configuration to point to the correct debug port. ==== Disabling assertions -When running Elasticsearch with `./gradlew run`, assertions are enabled by -default. To disable them, add the following command line option: +When running Elasticsearch with `./gradlew run`, assertions are enabled by default. +To disable them, add the following command line option: ------------------------- -Dtests.jvm.argline="-da -dsa" @@ -111,8 +103,7 @@ In order to start a node with a trial license execute the following command: ./gradlew run -Drun.license_type=trial ------------------------------------- -This enables security and other paid features and adds a superuser with the username: `elastic-admin` and -password: `elastic-password`. +This enables security and other paid features and adds a superuser with the username: `elastic-admin` and password: `elastic-password`. ==== Other useful arguments @@ -125,22 +116,21 @@ password: `elastic-password`. - In order to set an Elasticsearch setting, provide a setting with the following prefix: `-Dtests.es.` - In order to pass a JVM setting, e.g. to disable assertions: `-Dtests.jvm.argline="-da"` - In order to use HTTPS: ./gradlew run --https -- In order to start a mock logging APM server on port 9999 and configure ES cluster to connect to it, -use `./gradlew run --with-apm-server` +- In order to start a mock logging APM server on port 9999 and configure ES cluster to connect to it, use `./gradlew run --with-apm-server` ==== Customizing the test cluster for ./gradlew run You may need to customize the cluster configuration for the ./gradlew run task. The settings can be set via the command line, but other options require updating the task itself. You can simply find the task in the source code and configure it there. -(The task is currently defined in build-tools-internal/src/main/groovy/elasticsearch.run.gradle) -However, this requires modifying a source controlled file and is subject to accidental commits. +(The task is currently defined in build-tools-internal/src/main/groovy/elasticsearch.run.gradle) However, this requires modifying a source controlled file and is subject to accidental commits. Alternatively, you can use a Gradle init script to inject custom build logic with the -I flag to configure this task locally. For example: To use a custom certificate for HTTPS with `./gradlew run`, you can do the following. Create a file (for example ~/custom-run.gradle) with the following contents: + ------------------------------------- rootProject { if(project.name == 'elasticsearch') { @@ -152,7 +142,9 @@ rootProject { } } ------------------------------------- + Now tell Gradle to use this init script: + ------------------------------------- ./gradlew run -I ~/custom-run.gradle \ -Dtests.es.xpack.security.http.ssl.enabled=true \ @@ -160,16 +152,15 @@ Now tell Gradle to use this init script: ------------------------------------- Now the http.p12 file will be placed in the config directory of the running cluster and available for use. -Assuming you have the http.ssl.keystore setup correctly, you can now use HTTPS with ./gradlew run without the risk -of accidentally committing your local configurations. +Assuming you have the http.ssl.keystore setup correctly, you can now use HTTPS with ./gradlew run without the risk of accidentally committing your local configurations. ==== Multiple nodes in the test cluster for ./gradlew run -Another desired customization for ./gradlew run might be to run multiple -nodes with different setting for each node. For example, you may want to debug a coordinating only node that fans out -to one or more data nodes. To do this, increase the numberOfNodes and add specific configuration for each -of the nodes. For example, the following will instruct the first node (:9200) to be a coordinating only node, -and all other nodes to be master, data_hot, data_content nodes. +Another desired customization for ./gradlew run might be to run multiple nodes with different setting for each node. +For example, you may want to debug a coordinating only node that fans out to one or more data nodes. +To do this, increase the numberOfNodes and add specific configuration for each of the nodes. +For example, the following will instruct the first node (:9200) to be a coordinating only node, and all other nodes to be master, data_hot, data_content nodes. + ------------------------------------- testClusters.register("runTask") { ... @@ -185,21 +176,22 @@ testClusters.register("runTask") { ------------------------------------- You can also place this config in custom init script (see above) to avoid accidental commits. -If you are passing in the --debug-jvm flag with multiple nodes, you will need multiple remote debuggers running. One -for each node listening at port 5007, 5008, 5009, and so on. Ensure that each remote debugger has auto restart enabled. +If you are passing in the --debug-jvm flag with multiple nodes, you will need multiple remote debuggers running. +One for each node listening at port 5007, 5008, 5009, and so on. +Ensure that each remote debugger has auto restart enabled. ==== Manually testing cross cluster search Use ./gradlew run-ccs to launch 2 clusters wired together for the purposes of cross cluster search. -For example send a search request "my_remote_cluster:*/_search" to the querying cluster (:9200) to query data -in the fulfilling cluster. +For example send a search request "my_remote_cluster:*/_search" to the querying cluster (:9200) to query data in the fulfilling cluster. -If you are passing in the --debug-jvm flag, you will need two remote debuggers running. One at port 5007 and another -one at port 5008. Ensure that each remote debugger has auto restart enabled. +If you are passing in the --debug-jvm flag, you will need two remote debuggers running. +One at port 5007 and another one at port 5008. Ensure that each remote debugger has auto restart enabled. === Test case filtering. -You can run a single test, provided that you specify the Gradle project. See the documentation on +You can run a single test, provided that you specify the Gradle project. +See the documentation on https://docs.gradle.org/current/userguide/userguide_single.html#simple_name_pattern[simple name pattern filtering]. Run a single test case in the `server` project: @@ -230,8 +222,7 @@ Run with a given seed (seed is a hex-encoded long). === Repeats _all_ tests of ClassName N times. -Every test repetition will have a different method seed -(derived from a single random master seed). +Every test repetition will have a different method seed (derived from a single random master seed). -------------------------------------------------- ./gradlew :server:test -Dtests.iters=N --tests org.elasticsearch.package.ClassName @@ -239,8 +230,7 @@ Every test repetition will have a different method seed === Repeats _all_ tests of ClassName N times. -Every test repetition will have exactly the same master (0xdead) and -method-level (0xbeef) seed. +Every test repetition will have exactly the same master (0xdead) and method-level (0xbeef) seed. ------------------------------------------------------------------------ ./gradlew :server:test -Dtests.iters=N -Dtests.seed=DEAD:BEEF --tests org.elasticsearch.package.ClassName @@ -248,9 +238,7 @@ method-level (0xbeef) seed. === Repeats a given test N times -(note the filters - individual test repetitions are given suffixes, -ie: testFoo[0], testFoo[1], etc... so using testmethod or tests.method -ending in a glob is necessary to ensure iterations are run). +(note the filters - individual test repetitions are given suffixes, ie: testFoo[0], testFoo[1], etc... so using testmethod or tests.method ending in a glob is necessary to ensure iterations are run). ------------------------------------------------------------------------- ./gradlew :server:test -Dtests.iters=N --tests org.elasticsearch.package.ClassName.methodName @@ -275,10 +263,9 @@ Default value provided below in [brackets]. === Load balancing and caches. -By default the tests run on multiple processes using all the available cores on all -available CPUs. Not including hyper-threading. -If you want to explicitly specify the number of JVMs you can do so on the command -line: +By default the tests run on multiple processes using all the available cores on all available CPUs. +Not including hyper-threading. +If you want to explicitly specify the number of JVMs you can do so on the command line: ---------------------------- ./gradlew test -Dtests.jvms=8 @@ -290,20 +277,18 @@ Or in `~/.gradle/gradle.properties`: systemProp.tests.jvms=8 ---------------------------- -It's difficult to pick the "right" number here. Hypercores don't count for CPU -intensive tests and you should leave some slack for JVM-internal threads like -the garbage collector. And you have to have enough RAM to handle each JVM. +It's difficult to pick the "right" number here. +Hypercores don't count for CPU intensive tests and you should leave some slack for JVM-internal threads like the garbage collector. +And you have to have enough RAM to handle each JVM. === Test compatibility. -It is possible to provide a version that allows to adapt the tests behaviour -to older features or bugs that have been changed or fixed in the meantime. +It is possible to provide a version that allows to adapt the tests behaviour to older features or bugs that have been changed or fixed in the meantime. ----------------------------------------- ./gradlew test -Dtests.compatibility=1.0.0 ----------------------------------------- - === Miscellaneous. Run all tests without stopping on errors (inspect log files). @@ -318,8 +303,7 @@ Run more verbose output (slave JVM parameters, etc.). ./gradlew test -verbose ---------------------- -Change the default suite timeout to 5 seconds for all -tests (note the exclamation mark). +Change the default suite timeout to 5 seconds for all tests (note the exclamation mark). --------------------------------------- ./gradlew test -Dtests.timeoutSuite=5000! ... @@ -331,8 +315,7 @@ Change the logging level of ES (not Gradle) ./gradlew test -Dtests.es.logger.level=DEBUG -------------------------------- -Print all the logging output from the test runs to the commandline -even if tests are passing. +Print all the logging output from the test runs to the commandline even if tests are passing. ------------------------------ ./gradlew test -Dtests.output=always @@ -370,8 +353,8 @@ To run all verification tasks, including static checks, unit tests, and integrat ./gradlew check --------------------------------------------------------------------------- -Note that this will also run the unit tests and precommit tasks first. If you want to just -run the in memory cluster integration tests (because you are debugging them): +Note that this will also run the unit tests and precommit tasks first. +If you want to just run the in memory cluster integration tests (because you are debugging them): --------------------------------------------------------------------------- ./gradlew internalClusterTest @@ -383,12 +366,10 @@ If you want to just run the precommit checks: ./gradlew precommit --------------------------------------------------------------------------- -Some of these checks will require `docker-compose` installed for bringing up -test fixtures. If it's not present those checks will be skipped automatically. -The host running Docker (or VM if you're using Docker Desktop) needs 4GB of -memory or some of the containers will fail to start. You can tell that you -are short of memory if containers are exiting quickly after starting with -code 137 (128 + 9, where 9 means SIGKILL). +Some of these checks will require `docker-compose` installed for bringing up test fixtures. +If it's not present those checks will be skipped automatically. +The host running Docker (or VM if you're using Docker Desktop) needs 4GB of memory or some of the containers will fail to start. +You can tell that you are short of memory if containers are exiting quickly after starting with code 137 (128 + 9, where 9 means SIGKILL). == Debugging tests @@ -399,20 +380,15 @@ flag to the testing task and connect a debugger on the default port of `5005`. ./gradlew :server:test --debug-jvm --------------------------------------------------------------------------- -For REST tests, if you'd like to debug the Elasticsearch server itself, and -not your test code, use the `--debug-server-jvm` flag and use the -"Debug Elasticsearch" run configuration in IntelliJ to listen on the default -port of `5007`. +For REST tests, if you'd like to debug the Elasticsearch server itself, and not your test code, use the `--debug-server-jvm` flag and use the "Debug Elasticsearch" run configuration in IntelliJ to listen on the default port of `5007`. --------------------------------------------------------------------------- ./gradlew :rest-api-spec:yamlRestTest --debug-server-jvm --------------------------------------------------------------------------- -NOTE: In the case of test clusters using multiple nodes, multiple debuggers -will need to be attached on incrementing ports. For example, for a 3 node -cluster ports `5007`, `5008`, and `5009` will attempt to attach to a listening -debugger. You can use the "Debug Elasticsearch (node 2)" and "(node 3)" run -configurations should you need to debug a multi-node cluster. +NOTE: In the case of test clusters using multiple nodes, multiple debuggers will need to be attached on incrementing ports. +For example, for a 3 node cluster ports `5007`, `5008`, and `5009` will attempt to attach to a listening debugger. +You can use the "Debug Elasticsearch (node 2)" and "(node 3)" run configurations should you need to debug a multi-node cluster. You can also use a combination of both flags to debug both tests and server. This is only applicable to Java REST tests. @@ -423,22 +399,19 @@ This is only applicable to Java REST tests. == Testing the REST layer -The REST layer is tested through specific tests that are executed against -a cluster that is configured and initialized via Gradle. The tests -themselves can be written in either Java or with a YAML based DSL. +The REST layer is tested through specific tests that are executed against a cluster that is configured and initialized via Gradle. +The tests themselves can be written in either Java or with a YAML based DSL. -YAML based REST tests should be preferred since these are shared between all -the elasticsearch official clients. The YAML based tests describe the -operations to be executed and the obtained results that need to be tested. +YAML based REST tests should be preferred since these are shared between all the elasticsearch official clients. +The YAML based tests describe the operations to be executed and the obtained results that need to be tested. The YAML tests support various operators defined in the link:/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc[rest-api-spec] and adhere to the link:/rest-api-spec/README.markdown[Elasticsearch REST API JSON specification] -In order to run the YAML tests, the relevant API specification needs -to be on the test classpath. Any gradle project that has support for REST -tests will get the primary API on it's class path. However, to better support -Gradle incremental builds, it is recommended to explicitly declare which -parts of the API the tests depend upon. +In order to run the YAML tests, the relevant API specification needs to be on the test classpath. +Any gradle project that has support for REST tests will get the primary API on it's class path. +However, to better support Gradle incremental builds, it is recommended to explicitly declare which parts of the API the tests depend upon. For example: + --------------------------------------------------------------------------- restResources { restApi { @@ -447,11 +420,10 @@ restResources { } --------------------------------------------------------------------------- -YAML REST tests that include x-pack specific APIs need to explicitly declare -which APIs are required through a similar `includeXpack` configuration. +YAML REST tests that include x-pack specific APIs need to explicitly declare which APIs are required through a similar `includeXpack` configuration. -The REST tests are run automatically when executing the "./gradlew check" command. To run only the -YAML REST tests use the following command (modules and plugins may also include YAML REST tests): +The REST tests are run automatically when executing the "./gradlew check" command. +To run only the YAML REST tests use the following command (modules and plugins may also include YAML REST tests): --------------------------------------------------------------------------- ./gradlew :rest-api-spec:yamlRestTest @@ -472,20 +444,17 @@ You can run a group of YAML test by using wildcards: --tests "org.elasticsearch.test.rest.ClientYamlTestSuiteIT.test {yaml=index/*/*}" --------------------------------------------------------------------------- -Note that if the selected test via the `--tests` filter is not a valid test, i.e., the YAML test -runner is not able to parse and load it, you might get an error message indicating that the test -was not found. In such cases, running the whole suite without using the `--tests` could show more -specific error messages about why the test runner is not able to parse or load a certain test. +Note that if the selected test via the `--tests` filter is not a valid test, i.e., the YAML test runner is not able to parse and load it, you might get an error message indicating that the test was not found. +In such cases, running the whole suite without using the `--tests` could show more specific error messages about why the test runner is not able to parse or load a certain test. The YAML REST tests support all the options provided by the randomized runner, plus the following: -* `tests.rest.blacklist`: comma separated globs that identify tests that are -blacklisted and need to be skipped -e.g. -Dtests.rest.blacklist=index/*/Index document,get/10_basic/* +* `tests.rest.blacklist`: comma separated globs that identify tests that are blacklisted and need to be skipped e.g. -Dtests.rest.blacklist=index/*/Index document,get/10_basic/* Java REST tests can be run with the "javaRestTest" task. For example : + --------------------------------------------------------------------------- ./gradlew :modules:mapper-extras:javaRestTest --------------------------------------------------------------------------- @@ -497,21 +466,16 @@ A specific test case can be run with the following syntax (fqn.test {params}): --tests "org.elasticsearch.index.mapper.TokenCountFieldMapperIntegrationIT.testSearchByTokenCount {storeCountedFields=true loadCountedFields=false}" --------------------------------------------------------------------------- -yamlRestTest's and javaRestTest's are easy to identify, since they are found in a -respective source directory. However, there are some more specialized REST tests -that use custom task names. These are usually found in "qa" projects commonly -use the "integTest" task. +yamlRestTest's and javaRestTest's are easy to identify, since they are found in a respective source directory. +However, there are some more specialized REST tests that use custom task names. +These are usually found in "qa" projects commonly use the "integTest" task. If in doubt about which command to use, simply run :check == Testing packaging -The packaging tests are run on different build vm cloud instances to verify -that installing and running Elasticsearch distributions works correctly on -supported operating systems. These tests should really only be run on ephemeral -systems because they're destructive; that is, these tests install and remove -packages and freely modify system settings, so you will probably regret it if -you execute them on your development machine. +The packaging tests are run on different build vm cloud instances to verify that installing and running Elasticsearch distributions works correctly on supported operating systems. +These tests should really only be run on ephemeral systems because they're destructive; that is, these tests install and remove packages and freely modify system settings, so you will probably regret it if you execute them on your development machine. === Reproducing packaging tests @@ -519,15 +483,15 @@ To reproduce or debug packaging tests failures we recommend using using our prov == Testing backwards compatibility -Backwards compatibility tests exist to test upgrading from each supported version -to the current version. To run them all use: +Backwards compatibility tests exist to test upgrading from each supported version to the current version. +To run them all use: ------------------------------------------------- ./gradlew bwcTest ------------------------------------------------- -A specific version can be tested as well. For example, to test bwc with -version 5.3.2 run: +A specific version can be tested as well. +For example, to test bwc with version 5.3.2 run: ------------------------------------------------- ./gradlew v5.3.2#bwcTest @@ -535,6 +499,7 @@ version 5.3.2 run: Use -Dtests.class and -Dtests.method to run a specific bwcTest test. For example to run a specific tests from the x-pack rolling upgrade from 7.7.0: + ------------------------------------------------- ./gradlew :x-pack:qa:rolling-upgrade:v7.7.0#bwcTest \ -Dtests.class=org.elasticsearch.upgrades.UpgradeClusterClientYamlTestSuiteIT \ @@ -551,49 +516,48 @@ When running `./gradlew check`, minimal bwc checks are also run against compatib ==== BWC Testing against a specific remote/branch -Sometimes a backward compatibility change spans two versions. A common case is a new functionality -that needs a BWC bridge in an unreleased versioned of a release branch (for example, 5.x). -To test the changes, you can instruct Gradle to build the BWC version from another remote/branch combination instead of -pulling the release branch from GitHub. You do so using the `bwc.remote` and `bwc.refspec.BRANCH` system properties: +Sometimes a backward compatibility change spans two versions. +A common case is a new functionality that needs a BWC bridge in an unreleased versioned of a release branch (for example, 5.x). +Another use case, since the introduction of serverless, is to test BWC against main in addition to the other released branches. +To do so, specify the `bwc.refspec` remote and branch to use for the BWC build as `origin/main`. +To test against main, you will also need to create a new version in link:./server/src/main/java/org/elasticsearch/Version.java[Version.java], increment `elasticsearch` in link:./build-tools-internal/version.properties[version.properties], and hard-code the `project.version` for ml-cpp in link:./x-pack/plugin/ml/build.gradle[ml/build.gradle]. + +In general, to test the changes, you can instruct Gradle to build the BWC version from another remote/branch combination instead of pulling the release branch from GitHub. +You do so using the `bwc.refspec.{VERSION}` system property: ------------------------------------------------- -./gradlew check -Dbwc.remote=${remote} -Dbwc.refspec.5.x=index_req_bwc_5.x +./gradlew check -Dtests.bwc.refspec.8.15=origin/main ------------------------------------------------- -The branch needs to be available on the remote that the BWC makes of the -repository you run the tests from. Using the remote is a handy trick to make -sure that a branch is available and is up to date in the case of multiple runs. +The branch needs to be available on the remote that the BWC makes of the repository you run the tests from. +Using the remote is a handy trick to make sure that a branch is available and is up to date in the case of multiple runs. Example: -Say you need to make a change to `main` and have a BWC layer in `5.x`. You -will need to: -. Create a branch called `index_req_change` off your remote `${remote}`. This -will contain your change. -. Create a branch called `index_req_bwc_5.x` off `5.x`. This will contain your bwc layer. +Say you need to make a change to `main` and have a BWC layer in `5.x`. +You will need to: +. Create a branch called `index_req_change` off your remote `${remote}`. +This will contain your change. +. Create a branch called `index_req_bwc_5.x` off `5.x`. +This will contain your bwc layer. . Push both branches to your remote repository. . Run the tests with `./gradlew check -Dbwc.remote=${remote} -Dbwc.refspec.5.x=index_req_bwc_5.x`. ==== Skip fetching latest -For some BWC testing scenarios, you want to use the local clone of the -repository without fetching latest. For these use cases, you can set the system -property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip -fetching the latest from the remote. +For some BWC testing scenarios, you want to use the local clone of the repository without fetching latest. +For these use cases, you can set the system property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip fetching the latest from the remote. == Testing in FIPS 140-2 mode -We have a CI matrix job that periodically runs all our tests with the JVM configured -to be FIPS 140-2 compliant with the use of the BouncyCastle FIPS approved Security Provider. -FIPS 140-2 imposes certain requirements that affect how our tests should be set up or what -can be tested. This section summarizes what one needs to take into consideration so that -tests won't fail when run in fips mode. +We have a CI matrix job that periodically runs all our tests with the JVM configured to be FIPS 140-2 compliant with the use of the BouncyCastle FIPS approved Security Provider. +FIPS 140-2 imposes certain requirements that affect how our tests should be set up or what can be tested. +This section summarizes what one needs to take into consideration so that tests won't fail when run in fips mode. === Muting tests in FIPS 140-2 mode -If the following limitations cannot be observed, or there is a need to actually test some use -case that is not available/allowed in fips mode, the test can be muted. For unit tests or Java -rest tests one can use +If the following limitations cannot be observed, or there is a need to actually test some use case that is not available/allowed in fips mode, the test can be muted. +For unit tests or Java rest tests one can use ------------------------------------------------ assumeFalse("Justification why this cannot be run in FIPS mode", inFipsJvm()); @@ -624,9 +588,9 @@ The following should be taken into consideration when writing new tests or adjus ==== TLS -`JKS` and `PKCS#12` keystores cannot be used in FIPS mode. If the test depends on being able to use -a keystore, it can be muted when needed ( see `ESTestCase#inFipsJvm` ). Alternatively, one can use -PEM encoded files for keys and certificates for the tests or for setting up TLS in a test cluster. +`JKS` and `PKCS#12` keystores cannot be used in FIPS mode. +If the test depends on being able to use a keystore, it can be muted when needed ( see `ESTestCase#inFipsJvm` ). +Alternatively, one can use PEM encoded files for keys and certificates for the tests or for setting up TLS in a test cluster. Also, when in FIPS 140 mode, hostname verification for TLS cannot be turned off so if you are using `*.verification_mode: none` , you'd need to mute the test in fips mode. @@ -634,26 +598,21 @@ When using TLS, ensure that private keys used are longer than 2048 bits, or mute ==== Password hashing algorithm -Test clusters are configured with `xpack.security.fips_mode.enabled` set to true. This means that -FIPS 140-2 related bootstrap checks are enabled and the test cluster will fail to form if the -password hashing algorithm is set to something else than a PBKDF2 based one. You can delegate the choice -of algorithm to i.e. `SecurityIntegTestCase#getFastStoredHashAlgoForTests` if you don't mind the -actual algorithm used, or depend on default values for the test cluster nodes. +Test clusters are configured with `xpack.security.fips_mode.enabled` set to true. +This means that FIPS 140-2 related bootstrap checks are enabled and the test cluster will fail to form if the password hashing algorithm is set to something else than a PBKDF2 based one. +You can delegate the choice of algorithm to i.e. `SecurityIntegTestCase#getFastStoredHashAlgoForTests` if you don't mind the actual algorithm used, or depend on default values for the test cluster nodes. ==== Password length -While using `pbkdf2` as the password hashing algorithm, FIPS 140-2 imposes a requirement that -passwords are longer than 14 characters. You can either ensure that all test user passwords in -your test are longer than 14 characters and use i.e. `SecurityIntegTestCase#getFastStoredHashAlgoForTests` -to randomly select a hashing algorithm, or use `pbkdf2_stretch` that doesn't have the same -limitation. +While using `pbkdf2` as the password hashing algorithm, FIPS 140-2 imposes a requirement that passwords are longer than 14 characters. +You can either ensure that all test user passwords in your test are longer than 14 characters and use i.e. `SecurityIntegTestCase#getFastStoredHashAlgoForTests` +to randomly select a hashing algorithm, or use `pbkdf2_stretch` that doesn't have the same limitation. ==== Keystore Password -In FIPS 140-2 mode, the elasticsearch keystore needs to be password protected with a password -of appropriate length. This is handled automatically in `fips.gradle` and the keystore is unlocked -on startup by the test clusters tooling in order to have secure settings available. However, you -might need to take into consideration that the keystore is password-protected with `keystore-password` +In FIPS 140-2 mode, the elasticsearch keystore needs to be password protected with a password of appropriate length. +This is handled automatically in `fips.gradle` and the keystore is unlocked on startup by the test clusters tooling in order to have secure settings available. +However, you might need to take into consideration that the keystore is password-protected with `keystore-password` if you need to interact with it in a test. == How to write good tests? @@ -662,100 +621,72 @@ if you need to interact with it in a test. There are multiple base classes for tests: -* **`ESTestCase`**: The base class of all tests. It is typically extended - directly by unit tests. -* **`ESSingleNodeTestCase`**: This test case sets up a cluster that has a - single node. -* **`ESIntegTestCase`**: An integration test case that creates a cluster that - might have multiple nodes. -* **`ESRestTestCase`**: An integration tests that interacts with an external - cluster via the REST API. This is used for Java based REST tests. -* **`ESClientYamlSuiteTestCase` **: A subclass of `ESRestTestCase` used to run - YAML based REST tests. +* **`ESTestCase`**: The base class of all tests. +It is typically extended directly by unit tests. +* **`ESSingleNodeTestCase`**: This test case sets up a cluster that has a single node. +* **`ESIntegTestCase`**: An integration test case that creates a cluster that might have multiple nodes. +* **`ESRestTestCase`**: An integration tests that interacts with an external cluster via the REST API. +This is used for Java based REST tests. +* **`ESClientYamlSuiteTestCase` **: A subclass of `ESRestTestCase` used to run YAML based REST tests. === Good practices ==== What kind of tests should I write? -Unit tests are the preferred way to test some functionality: most of the time -they are simpler to understand, more likely to reproduce, and unlikely to be -affected by changes that are unrelated to the piece of functionality that is -being tested. +Unit tests are the preferred way to test some functionality: most of the time they are simpler to understand, more likely to reproduce, and unlikely to be affected by changes that are unrelated to the piece of functionality that is being tested. -The reason why `ESSingleNodeTestCase` exists is that all our components used to -be very hard to set up in isolation, which had led us to having a number of -integration tests but close to no unit tests. `ESSingleNodeTestCase` is a -workaround for this issue which provides an easy way to spin up a node and get -access to components that are hard to instantiate like `IndicesService`. +The reason why `ESSingleNodeTestCase` exists is that all our components used to be very hard to set up in isolation, which had led us to having a number of integration tests but close to no unit tests. `ESSingleNodeTestCase` is a workaround for this issue which provides an easy way to spin up a node and get access to components that are hard to instantiate like `IndicesService`. Whenever practical, you should prefer unit tests. -Many tests extend `ESIntegTestCase`, mostly because this is how most tests used -to work in the early days of Elasticsearch. However the complexity of these -tests tends to make them hard to debug. Whenever the functionality that is -being tested isn't intimately dependent on how Elasticsearch behaves as a -cluster, it is recommended to write unit tests or REST tests instead. +Many tests extend `ESIntegTestCase`, mostly because this is how most tests used to work in the early days of Elasticsearch. +However the complexity of these tests tends to make them hard to debug. +Whenever the functionality that is being tested isn't intimately dependent on how Elasticsearch behaves as a cluster, it is recommended to write unit tests or REST tests instead. -In short, most new functionality should come with unit tests, and optionally -REST tests to test integration. +In short, most new functionality should come with unit tests, and optionally REST tests to test integration. ==== Refactor code to make it easier to test Unfortunately, a large part of our code base is still hard to unit test. -Sometimes because some classes have lots of dependencies that make them hard to -instantiate. Sometimes because API contracts make tests hard to write. Code -refactors that make functionality easier to unit test are encouraged. If this -sounds very abstract to you, you can have a look at -https://github.com/elastic/elasticsearch/pull/16610[this pull request] for -instance, which is a good example. It refactors `IndicesRequestCache` in such -a way that: - - it no longer depends on objects that are hard to instantiate such as - `IndexShard` or `SearchContext`, - - time-based eviction is applied on top of the cache rather than internally, - which makes it easier to assert on what the cache is expected to contain at - a given time. +Sometimes because some classes have lots of dependencies that make them hard to instantiate. +Sometimes because API contracts make tests hard to write. +Code refactors that make functionality easier to unit test are encouraged. +If this sounds very abstract to you, you can have a look at +https://github.com/elastic/elasticsearch/pull/16610[this pull request] for instance, which is a good example. +It refactors `IndicesRequestCache` in such a way that: +- it no longer depends on objects that are hard to instantiate such as +`IndexShard` or `SearchContext`, - time-based eviction is applied on top of the cache rather than internally, which makes it easier to assert on what the cache is expected to contain at a given time. === Bad practices ==== Use randomized-testing for coverage -In general, randomization should be used for parameters that are not expected -to affect the behavior of the functionality that is being tested. For instance -the number of shards should not impact `date_histogram` aggregations, and the -choice of the `store` type (`niofs` vs `mmapfs`) does not affect the results of -a query. Such randomization helps improve confidence that we are not relying on -implementation details of one component or specifics of some setup. +In general, randomization should be used for parameters that are not expected to affect the behavior of the functionality that is being tested. +For instance the number of shards should not impact `date_histogram` aggregations, and the choice of the `store` type (`niofs` vs `mmapfs`) does not affect the results of a query. +Such randomization helps improve confidence that we are not relying on implementation details of one component or specifics of some setup. -However it should not be used for coverage. For instance if you are testing a -piece of functionality that enters different code paths depending on whether -the index has 1 shards or 2+ shards, then we shouldn't just test against an -index with a random number of shards: there should be one test for the 1-shard -case, and another test for the 2+ shards case. +However it should not be used for coverage. +For instance if you are testing a piece of functionality that enters different code paths depending on whether the index has 1 shards or 2+ shards, then we shouldn't just test against an index with a random number of shards: there should be one test for the 1-shard case, and another test for the 2+ shards case. ==== Abuse randomization in multi-threaded tests -Multi-threaded tests are often not reproducible due to the fact that there is -no guarantee on the order in which operations occur across threads. Adding -randomization to the mix usually makes things worse and should be done with -care. +Multi-threaded tests are often not reproducible due to the fact that there is no guarantee on the order in which operations occur across threads. +Adding randomization to the mix usually makes things worse and should be done with care. == Test coverage analysis Generating test coverage reports for Elasticsearch is currently not possible through Gradle. -However, it _is_ possible to gain insight in code coverage using IntelliJ's built-in coverage -analysis tool that can measure coverage upon executing specific tests. +However, it _is_ possible to gain insight in code coverage using IntelliJ's built-in coverage analysis tool that can measure coverage upon executing specific tests. -Test coverage reporting used to be possible with JaCoCo when Elasticsearch was using Maven -as its build system. Since the switch to Gradle though, this is no longer possible, seeing as -the code currently used to build Elasticsearch does not allow JaCoCo to recognize its tests. +Test coverage reporting used to be possible with JaCoCo when Elasticsearch was using Maven as its build system. +Since the switch to Gradle though, this is no longer possible, seeing as the code currently used to build Elasticsearch does not allow JaCoCo to recognize its tests. For more information on this, see the discussion in https://github.com/elastic/elasticsearch/issues/28867[issue #28867]. == Building with extra plugins -Additional plugins may be built alongside elasticsearch, where their -dependency on elasticsearch will be substituted with the local elasticsearch -build. To add your plugin, create a directory called elasticsearch-extra as -a sibling of elasticsearch. Checkout your plugin underneath elasticsearch-extra -and the build will automatically pick it up. You can verify the plugin is -included as part of the build by checking the projects of the build. + +Additional plugins may be built alongside elasticsearch, where their dependency on elasticsearch will be substituted with the local elasticsearch build. +To add your plugin, create a directory called elasticsearch-extra as a sibling of elasticsearch. +Checkout your plugin underneath elasticsearch-extra and the build will automatically pick it up. +You can verify the plugin is included as part of the build by checking the projects of the build. --------------------------------------------------------------------------- ./gradlew projects @@ -763,12 +694,10 @@ included as part of the build by checking the projects of the build. == Environment misc -There is a known issue with macOS localhost resolve strategy that can cause -some integration tests to fail. This is because integration tests have timings -for cluster formation, discovery, etc. that can be exceeded if name resolution -takes a long time. -To fix this, make sure you have your computer name (as returned by `hostname`) -inside `/etc/hosts`, e.g.: +There is a known issue with macOS localhost resolve strategy that can cause some integration tests to fail. +This is because integration tests have timings for cluster formation, discovery, etc. that can be exceeded if name resolution takes a long time. +To fix this, make sure you have your computer name (as returned by `hostname`) inside `/etc/hosts`, e.g.: + .... 127.0.0.1 localhost ElasticMBP.local 255.255.255.255 broadcasthost @@ -777,16 +706,16 @@ inside `/etc/hosts`, e.g.: == Benchmarking -For changes that might affect the performance characteristics of Elasticsearch -you should also run macrobenchmarks. We maintain a macrobenchmarking tool -called https://github.com/elastic/rally[Rally] -which you can use to measure the performance impact. It comes with a set of -default benchmarks that we also -https://elasticsearch-benchmarks.elastic.co/[run every night]. To get started, -please see https://esrally.readthedocs.io/en/stable/[Rally's documentation]. +For changes that might affect the performance characteristics of Elasticsearch you should also run macrobenchmarks. +We maintain a macrobenchmarking tool called https://github.com/elastic/rally[Rally] +which you can use to measure the performance impact. +It comes with a set of default benchmarks that we also +https://elasticsearch-benchmarks.elastic.co/[run every night]. +To get started, please see https://esrally.readthedocs.io/en/stable/[Rally's documentation]. == Test doc builds -The Elasticsearch docs are in AsciiDoc format. You can test and build the docs -locally using the Elasticsearch documentation build process. See +The Elasticsearch docs are in AsciiDoc format. +You can test and build the docs locally using the Elasticsearch documentation build process. +See https://github.com/elastic/docs. diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecTestCase.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecTestCase.java index 35b25eb0bf7ea..45cd3716f21df 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecTestCase.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClusterSpecTestCase.java @@ -18,7 +18,7 @@ public abstract class MixedClusterSpecTestCase extends ESRestTestCase { @ClassRule - public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); + public static ElasticsearchCluster cluster = MixedClustersSpec.mixedVersionCluster(); @Override protected String getTestRestCluster() { diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClustersSpec.java similarity index 96% rename from x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java rename to x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClustersSpec.java index d7c0a73c9de4e..7802c2e966e01 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/Clusters.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/MixedClustersSpec.java @@ -11,7 +11,7 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.Version; -public class Clusters { +public class MixedClustersSpec { public static ElasticsearchCluster mixedVersionCluster() { Version oldVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); return ElasticsearchCluster.local() From a3bbfce4291c51bdd4bae16ef567f5d84fa02974 Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Mon, 13 May 2024 12:52:39 -0400 Subject: [PATCH 11/15] Revert manual testing configs of "temp for testing" This reverts parts of commit fca46fd2b6253accc010a2e2a8bf05edfff5ea9b. --- build-tools-internal/version.properties | 87 ++++++++++--------- .../main/java/org/elasticsearch/Version.java | 3 +- x-pack/plugin/ml/build.gradle | 12 +-- 3 files changed, 55 insertions(+), 47 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4b085cacb44d4..044f6c07c756e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,51 +1,60 @@ -elasticsearch=8.16.0 -lucene=9.10.0 -bundled_jdk_vendor=openjdk -bundled_jdk=21.0.2+13@f2283984656d49d69e91c558476027ac +elasticsearch = 8.15.0 +lucene = 9.10.0 + +bundled_jdk_vendor = openjdk +bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac # optional dependencies -spatial4j=0.7 -jts=1.15.0 -jackson=2.15.0 -snakeyaml=2.0 -icu4j=68.2 -supercsv=2.4.0 -log4j=2.19.0 -slf4j=2.0.6 -ecsLogging=1.2.0 -jna=5.12.1 -netty=4.1.109.Final -commons_lang3=3.9 -google_oauth_client=1.34.1 -antlr4=4.13.1 +spatial4j = 0.7 +jts = 1.15.0 +jackson = 2.15.0 +snakeyaml = 2.0 +icu4j = 68.2 +supercsv = 2.4.0 +log4j = 2.19.0 +slf4j = 2.0.6 +ecsLogging = 1.2.0 +jna = 5.12.1 +netty = 4.1.109.Final +commons_lang3 = 3.9 +google_oauth_client = 1.34.1 + +antlr4 = 4.13.1 # bouncy castle version for non-fips. fips jars use a different version bouncycastle=1.78.1 # used by security and idp (need to be in sync due to cross-dependency in testing) -opensaml=4.3.0 +opensaml = 4.3.0 + # client dependencies -httpclient=4.5.14 -httpcore=4.4.13 -httpasyncclient=4.1.5 -commonslogging=1.2 -commonscodec=1.15 -protobuf=3.21.9 +httpclient = 4.5.14 +httpcore = 4.4.13 +httpasyncclient = 4.1.5 +commonslogging = 1.2 +commonscodec = 1.15 +protobuf = 3.21.9 + # test dependencies -randomizedrunner=2.8.0 -junit=4.13.2 -junit5=5.7.1 -hamcrest=2.1 -mocksocket=1.2 +randomizedrunner = 2.8.0 +junit = 4.13.2 +junit5 = 5.7.1 +hamcrest = 2.1 +mocksocket = 1.2 + # test container dependencies -testcontainer=1.19.2 -dockerJava=3.3.4 -ductTape=1.0.8 -commonsCompress=1.24.0 +testcontainer = 1.19.2 +dockerJava = 3.3.4 +ductTape = 1.0.8 +commonsCompress = 1.24.0 + # packer caching build logic -reflections=0.10.2 +reflections = 0.10.2 + # benchmark dependencies -jmh=1.26 +jmh = 1.26 + # test dependencies # when updating this version, also update :qa:evil-tests -jimfs=1.3.0 -jimfs_guava=32.1.1-jre +jimfs = 1.3.0 +jimfs_guava = 32.1.1-jre + # test framework -networknt_json_schema_validator=1.0.48 +networknt_json_schema_validator = 1.0.48 diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 9e79baa2b2a87..a2e04d0bf3d48 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -176,8 +176,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_4 = new Version(8_13_04_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version V_8_15_0 = new Version(8_15_00_99); - public static final Version V_8_16_0 = new Version(8_16_00_99); - public static final Version CURRENT = V_8_16_0; + public static final Version CURRENT = V_8_15_0; private static final NavigableMap VERSION_IDS; private static final Map VERSION_STRINGS; diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index bfb298380b215..26f5ea053771c 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -68,9 +68,9 @@ esplugin.bundleSpec.from { esplugin.bundleSpec.exclude 'platform/licenses/**' ["bundlePlugin", "explodedBundlePlugin"].each { bundleTaskName -> - tasks.named(bundleTaskName).configure { - dependsOn configurations.nativeBundle - } + tasks.named(bundleTaskName).configure { + dependsOn configurations.nativeBundle + } } dependencies { @@ -100,10 +100,10 @@ dependencies { api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" api "org.apache.lucene:lucene-analysis-kuromoji:${versions.lucene}" implementation 'org.ojalgo:ojalgo:51.2.0' - nativeBundle("org.elasticsearch.ml:ml-cpp:8.15.0-SNAPSHOT:deps@zip") { + nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:deps@zip") { changing = true } - nativeBundle("org.elasticsearch.ml:ml-cpp:8.15.0-SNAPSHOT:nodeps@zip") { + nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:nodeps@zip") { changing = true } testImplementation 'org.ini4j:ini4j:0.5.2' @@ -136,7 +136,7 @@ tasks.named('generateNotice').configure { inputs.dir("${project.buildDir}/extractedNativeLicenses/platform/licenses") .withPropertyName('licensingDir') .withPathSensitivity(PathSensitivity.RELATIVE) - licenseDirs.add(tasks.named("extractNativeLicenses").map { new File(it.destinationDir, "platform/licenses") }) + licenseDirs.add(tasks.named("extractNativeLicenses").map {new File(it.destinationDir, "platform/licenses") }) } tasks.named("dependencyLicenses").configure { From 9d487c4573a3a545f8ef611e521e96ae78a3c638 Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Mon, 13 May 2024 14:14:22 -0400 Subject: [PATCH 12/15] revert TESTING.asciidoc formatting --- TESTING.asciidoc | 1 - 1 file changed, 1 deletion(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 608901fba75c8..4113ba33a059e 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -528,7 +528,6 @@ You do so using the `bwc.refspec.{VERSION}` system property: ------------------------------------------------- ./gradlew check -Dtests.bwc.refspec.8.15=origin/main ------------------------------------------------- - The branch needs to be available on the remote that the BWC makes of the repository you run the tests from. Using the remote is a handy trick to make sure that a branch is available and is up to date in the case of multiple runs. From 8a591b922f9124a6ba0f44d7f7d8f9de38539b1b Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Mon, 13 May 2024 14:18:07 -0400 Subject: [PATCH 13/15] Update TESTING.asciidoc to avoid reformatting --- TESTING.asciidoc | 410 ++++++++++++++++++++++++++++------------------- 1 file changed, 244 insertions(+), 166 deletions(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 4113ba33a059e..2c205f9090ba8 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -2,9 +2,11 @@ = Testing [partintro] -Elasticsearch uses JUnit for testing. -It also generated random inputs into tests, either using a random seed, or one that is set via a system property. -The following is a cheatsheet of options for running the Elasticsearch tests. + +Elasticsearch uses JUnit for testing. It also generated random inputs into +tests, either using a random seed, or one that is set via a system +property. The following is a cheatsheet of options for running the +Elasticsearch tests. == Creating packages @@ -14,7 +16,8 @@ To build a distribution for your local OS and print its output location upon com ./gradlew localDistro ----------------------------- -To create a platform-specific build, use the following depending on your operating system: +To create a platform-specific build, use the following depending on your +operating system: ----------------------------- ./gradlew :distribution:archives:linux-tar:assemble @@ -28,11 +31,13 @@ You can build a Docker image with: ./gradlew build(Aarch64)DockerImage ----------------------------- -Note: you almost certainly don't want to run `./gradlew assemble` as this will attempt build every single Elasticsearch distribution. +Note: you almost certainly don't want to run `./gradlew assemble` as this +will attempt build every single Elasticsearch distribution. === Running Elasticsearch from a checkout -In order to run Elasticsearch from source without building a package, you can run it using Gradle: +In order to run Elasticsearch from source without building a package, you can +run it using Gradle: ------------------------------------- ./gradlew run @@ -40,42 +45,45 @@ In order to run Elasticsearch from source without building a package, you can ru ==== Launching and debugging from an IDE -If you want to run and debug Elasticsearch from your IDE, the `./gradlew run` task supports a remote debugging option. -Run the following from your terminal: +If you want to run and debug Elasticsearch from your IDE, the `./gradlew run` task +supports a remote debugging option. Run the following from your terminal: --------------------------------------------------------------------------- ./gradlew run --debug-jvm --------------------------------------------------------------------------- -Next start the "Debug Elasticsearch" run configuration in IntelliJ. -This will enable the IDE to connect to the process and allow debug functionality. +Next start the "Debug Elasticsearch" run configuration in IntelliJ. This will enable the IDE to connect to the process and allow debug functionality. + As such the IDE needs to be instructed to listen for connections on the debug port. -Since we might run multiple JVMs as part of configuring and starting the cluster it's recommended to configure the IDE to initiate multiple listening attempts. -In case of IntelliJ, this option is called "Auto restart" and needs to be checked. +Since we might run multiple JVMs as part of configuring and starting the cluster it's +recommended to configure the IDE to initiate multiple listening attempts. In case of IntelliJ, this option +is called "Auto restart" and needs to be checked. NOTE: If you have imported the project into IntelliJ according to the instructions in -link:/CONTRIBUTING.md#importing-the-project-into-intellij-idea[CONTRIBUTING.md] then a debug run configuration named "Debug Elasticsearch" will be created for you and configured appropriately. +link:/CONTRIBUTING.md#importing-the-project-into-intellij-idea[CONTRIBUTING.md] then a debug run configuration +named "Debug Elasticsearch" will be created for you and configured appropriately. ===== Debugging the CLI launcher -The gradle task does not start the Elasticsearch server process directly; like in the Elasticsearch distribution, the job of starting the server process is delegated to a launcher CLI tool. -If you need to debug the launcher itself, add the following option to the `run` task: - +The gradle task does not start the Elasticsearch server process directly; like in the Elasticsearch distribution, +the job of starting the server process is delegated to a launcher CLI tool. If you need to debug the launcher itself, +add the following option to the `run` task: --------------------------------------------------------------------------- ./gradlew run --debug-cli-jvm --------------------------------------------------------------------------- - -This option can be specified in isolation or combined with `--debug-jvm`. -Since the CLI launcher lifespan may overlap with the server process lifespan, the CLI launcher process will be started on a different port (5107 for the first node, 5108 and following for additional cluster nodes). +This option can be specified in isolation or combined with `--debug-jvm`. Since the CLI launcher lifespan may overlap +with the server process lifespan, the CLI launcher process will be started on a different port (5107 for the first node, +5108 and following for additional cluster nodes). As with the `--debug-jvm` command, the IDE needs to be instructed to listen for connections on the debug port. -You need to configure and start an appropriate Remote JVM Debug configuration, e.g. by cloning and editing the "Debug Elasticsearch" run configuration to point to the correct debug port. +You need to configure and start an appropriate Remote JVM Debug configuration, e.g. by cloning and editing +the "Debug Elasticsearch" run configuration to point to the correct debug port. ==== Disabling assertions -When running Elasticsearch with `./gradlew run`, assertions are enabled by default. -To disable them, add the following command line option: +When running Elasticsearch with `./gradlew run`, assertions are enabled by +default. To disable them, add the following command line option: ------------------------- -Dtests.jvm.argline="-da -dsa" @@ -103,7 +111,8 @@ In order to start a node with a trial license execute the following command: ./gradlew run -Drun.license_type=trial ------------------------------------- -This enables security and other paid features and adds a superuser with the username: `elastic-admin` and password: `elastic-password`. +This enables security and other paid features and adds a superuser with the username: `elastic-admin` and +password: `elastic-password`. ==== Other useful arguments @@ -116,21 +125,22 @@ This enables security and other paid features and adds a superuser with the user - In order to set an Elasticsearch setting, provide a setting with the following prefix: `-Dtests.es.` - In order to pass a JVM setting, e.g. to disable assertions: `-Dtests.jvm.argline="-da"` - In order to use HTTPS: ./gradlew run --https -- In order to start a mock logging APM server on port 9999 and configure ES cluster to connect to it, use `./gradlew run --with-apm-server` +- In order to start a mock logging APM server on port 9999 and configure ES cluster to connect to it, +use `./gradlew run --with-apm-server` ==== Customizing the test cluster for ./gradlew run You may need to customize the cluster configuration for the ./gradlew run task. The settings can be set via the command line, but other options require updating the task itself. You can simply find the task in the source code and configure it there. -(The task is currently defined in build-tools-internal/src/main/groovy/elasticsearch.run.gradle) However, this requires modifying a source controlled file and is subject to accidental commits. +(The task is currently defined in build-tools-internal/src/main/groovy/elasticsearch.run.gradle) +However, this requires modifying a source controlled file and is subject to accidental commits. Alternatively, you can use a Gradle init script to inject custom build logic with the -I flag to configure this task locally. For example: To use a custom certificate for HTTPS with `./gradlew run`, you can do the following. Create a file (for example ~/custom-run.gradle) with the following contents: - ------------------------------------- rootProject { if(project.name == 'elasticsearch') { @@ -142,9 +152,7 @@ rootProject { } } ------------------------------------- - Now tell Gradle to use this init script: - ------------------------------------- ./gradlew run -I ~/custom-run.gradle \ -Dtests.es.xpack.security.http.ssl.enabled=true \ @@ -152,15 +160,16 @@ Now tell Gradle to use this init script: ------------------------------------- Now the http.p12 file will be placed in the config directory of the running cluster and available for use. -Assuming you have the http.ssl.keystore setup correctly, you can now use HTTPS with ./gradlew run without the risk of accidentally committing your local configurations. +Assuming you have the http.ssl.keystore setup correctly, you can now use HTTPS with ./gradlew run without the risk +of accidentally committing your local configurations. ==== Multiple nodes in the test cluster for ./gradlew run -Another desired customization for ./gradlew run might be to run multiple nodes with different setting for each node. -For example, you may want to debug a coordinating only node that fans out to one or more data nodes. -To do this, increase the numberOfNodes and add specific configuration for each of the nodes. -For example, the following will instruct the first node (:9200) to be a coordinating only node, and all other nodes to be master, data_hot, data_content nodes. - +Another desired customization for ./gradlew run might be to run multiple +nodes with different setting for each node. For example, you may want to debug a coordinating only node that fans out +to one or more data nodes. To do this, increase the numberOfNodes and add specific configuration for each +of the nodes. For example, the following will instruct the first node (:9200) to be a coordinating only node, +and all other nodes to be master, data_hot, data_content nodes. ------------------------------------- testClusters.register("runTask") { ... @@ -176,22 +185,21 @@ testClusters.register("runTask") { ------------------------------------- You can also place this config in custom init script (see above) to avoid accidental commits. -If you are passing in the --debug-jvm flag with multiple nodes, you will need multiple remote debuggers running. -One for each node listening at port 5007, 5008, 5009, and so on. -Ensure that each remote debugger has auto restart enabled. +If you are passing in the --debug-jvm flag with multiple nodes, you will need multiple remote debuggers running. One +for each node listening at port 5007, 5008, 5009, and so on. Ensure that each remote debugger has auto restart enabled. ==== Manually testing cross cluster search Use ./gradlew run-ccs to launch 2 clusters wired together for the purposes of cross cluster search. -For example send a search request "my_remote_cluster:*/_search" to the querying cluster (:9200) to query data in the fulfilling cluster. +For example send a search request "my_remote_cluster:*/_search" to the querying cluster (:9200) to query data +in the fulfilling cluster. -If you are passing in the --debug-jvm flag, you will need two remote debuggers running. -One at port 5007 and another one at port 5008. Ensure that each remote debugger has auto restart enabled. +If you are passing in the --debug-jvm flag, you will need two remote debuggers running. One at port 5007 and another +one at port 5008. Ensure that each remote debugger has auto restart enabled. === Test case filtering. -You can run a single test, provided that you specify the Gradle project. -See the documentation on +You can run a single test, provided that you specify the Gradle project. See the documentation on https://docs.gradle.org/current/userguide/userguide_single.html#simple_name_pattern[simple name pattern filtering]. Run a single test case in the `server` project: @@ -222,7 +230,8 @@ Run with a given seed (seed is a hex-encoded long). === Repeats _all_ tests of ClassName N times. -Every test repetition will have a different method seed (derived from a single random master seed). +Every test repetition will have a different method seed +(derived from a single random master seed). -------------------------------------------------- ./gradlew :server:test -Dtests.iters=N --tests org.elasticsearch.package.ClassName @@ -230,7 +239,8 @@ Every test repetition will have a different method seed (derived from a single r === Repeats _all_ tests of ClassName N times. -Every test repetition will have exactly the same master (0xdead) and method-level (0xbeef) seed. +Every test repetition will have exactly the same master (0xdead) and +method-level (0xbeef) seed. ------------------------------------------------------------------------ ./gradlew :server:test -Dtests.iters=N -Dtests.seed=DEAD:BEEF --tests org.elasticsearch.package.ClassName @@ -238,7 +248,9 @@ Every test repetition will have exactly the same master (0xdead) and method-leve === Repeats a given test N times -(note the filters - individual test repetitions are given suffixes, ie: testFoo[0], testFoo[1], etc... so using testmethod or tests.method ending in a glob is necessary to ensure iterations are run). +(note the filters - individual test repetitions are given suffixes, +ie: testFoo[0], testFoo[1], etc... so using testmethod or tests.method +ending in a glob is necessary to ensure iterations are run). ------------------------------------------------------------------------- ./gradlew :server:test -Dtests.iters=N --tests org.elasticsearch.package.ClassName.methodName @@ -263,9 +275,10 @@ Default value provided below in [brackets]. === Load balancing and caches. -By default the tests run on multiple processes using all the available cores on all available CPUs. -Not including hyper-threading. -If you want to explicitly specify the number of JVMs you can do so on the command line: +By default the tests run on multiple processes using all the available cores on all +available CPUs. Not including hyper-threading. +If you want to explicitly specify the number of JVMs you can do so on the command +line: ---------------------------- ./gradlew test -Dtests.jvms=8 @@ -277,18 +290,20 @@ Or in `~/.gradle/gradle.properties`: systemProp.tests.jvms=8 ---------------------------- -It's difficult to pick the "right" number here. -Hypercores don't count for CPU intensive tests and you should leave some slack for JVM-internal threads like the garbage collector. -And you have to have enough RAM to handle each JVM. +It's difficult to pick the "right" number here. Hypercores don't count for CPU +intensive tests and you should leave some slack for JVM-internal threads like +the garbage collector. And you have to have enough RAM to handle each JVM. === Test compatibility. -It is possible to provide a version that allows to adapt the tests behaviour to older features or bugs that have been changed or fixed in the meantime. +It is possible to provide a version that allows to adapt the tests behaviour +to older features or bugs that have been changed or fixed in the meantime. ----------------------------------------- ./gradlew test -Dtests.compatibility=1.0.0 ----------------------------------------- + === Miscellaneous. Run all tests without stopping on errors (inspect log files). @@ -303,7 +318,8 @@ Run more verbose output (slave JVM parameters, etc.). ./gradlew test -verbose ---------------------- -Change the default suite timeout to 5 seconds for all tests (note the exclamation mark). +Change the default suite timeout to 5 seconds for all +tests (note the exclamation mark). --------------------------------------- ./gradlew test -Dtests.timeoutSuite=5000! ... @@ -315,7 +331,8 @@ Change the logging level of ES (not Gradle) ./gradlew test -Dtests.es.logger.level=DEBUG -------------------------------- -Print all the logging output from the test runs to the commandline even if tests are passing. +Print all the logging output from the test runs to the commandline +even if tests are passing. ------------------------------ ./gradlew test -Dtests.output=always @@ -353,8 +370,8 @@ To run all verification tasks, including static checks, unit tests, and integrat ./gradlew check --------------------------------------------------------------------------- -Note that this will also run the unit tests and precommit tasks first. -If you want to just run the in memory cluster integration tests (because you are debugging them): +Note that this will also run the unit tests and precommit tasks first. If you want to just +run the in memory cluster integration tests (because you are debugging them): --------------------------------------------------------------------------- ./gradlew internalClusterTest @@ -366,10 +383,12 @@ If you want to just run the precommit checks: ./gradlew precommit --------------------------------------------------------------------------- -Some of these checks will require `docker-compose` installed for bringing up test fixtures. -If it's not present those checks will be skipped automatically. -The host running Docker (or VM if you're using Docker Desktop) needs 4GB of memory or some of the containers will fail to start. -You can tell that you are short of memory if containers are exiting quickly after starting with code 137 (128 + 9, where 9 means SIGKILL). +Some of these checks will require `docker-compose` installed for bringing up +test fixtures. If it's not present those checks will be skipped automatically. +The host running Docker (or VM if you're using Docker Desktop) needs 4GB of +memory or some of the containers will fail to start. You can tell that you +are short of memory if containers are exiting quickly after starting with +code 137 (128 + 9, where 9 means SIGKILL). == Debugging tests @@ -380,15 +399,20 @@ flag to the testing task and connect a debugger on the default port of `5005`. ./gradlew :server:test --debug-jvm --------------------------------------------------------------------------- -For REST tests, if you'd like to debug the Elasticsearch server itself, and not your test code, use the `--debug-server-jvm` flag and use the "Debug Elasticsearch" run configuration in IntelliJ to listen on the default port of `5007`. +For REST tests, if you'd like to debug the Elasticsearch server itself, and +not your test code, use the `--debug-server-jvm` flag and use the +"Debug Elasticsearch" run configuration in IntelliJ to listen on the default +port of `5007`. --------------------------------------------------------------------------- ./gradlew :rest-api-spec:yamlRestTest --debug-server-jvm --------------------------------------------------------------------------- -NOTE: In the case of test clusters using multiple nodes, multiple debuggers will need to be attached on incrementing ports. -For example, for a 3 node cluster ports `5007`, `5008`, and `5009` will attempt to attach to a listening debugger. -You can use the "Debug Elasticsearch (node 2)" and "(node 3)" run configurations should you need to debug a multi-node cluster. +NOTE: In the case of test clusters using multiple nodes, multiple debuggers +will need to be attached on incrementing ports. For example, for a 3 node +cluster ports `5007`, `5008`, and `5009` will attempt to attach to a listening +debugger. You can use the "Debug Elasticsearch (node 2)" and "(node 3)" run +configurations should you need to debug a multi-node cluster. You can also use a combination of both flags to debug both tests and server. This is only applicable to Java REST tests. @@ -399,19 +423,22 @@ This is only applicable to Java REST tests. == Testing the REST layer -The REST layer is tested through specific tests that are executed against a cluster that is configured and initialized via Gradle. -The tests themselves can be written in either Java or with a YAML based DSL. +The REST layer is tested through specific tests that are executed against +a cluster that is configured and initialized via Gradle. The tests +themselves can be written in either Java or with a YAML based DSL. -YAML based REST tests should be preferred since these are shared between all the elasticsearch official clients. -The YAML based tests describe the operations to be executed and the obtained results that need to be tested. +YAML based REST tests should be preferred since these are shared between all +the elasticsearch official clients. The YAML based tests describe the +operations to be executed and the obtained results that need to be tested. The YAML tests support various operators defined in the link:/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/README.asciidoc[rest-api-spec] and adhere to the link:/rest-api-spec/README.markdown[Elasticsearch REST API JSON specification] -In order to run the YAML tests, the relevant API specification needs to be on the test classpath. -Any gradle project that has support for REST tests will get the primary API on it's class path. -However, to better support Gradle incremental builds, it is recommended to explicitly declare which parts of the API the tests depend upon. +In order to run the YAML tests, the relevant API specification needs +to be on the test classpath. Any gradle project that has support for REST +tests will get the primary API on it's class path. However, to better support +Gradle incremental builds, it is recommended to explicitly declare which +parts of the API the tests depend upon. For example: - --------------------------------------------------------------------------- restResources { restApi { @@ -420,10 +447,11 @@ restResources { } --------------------------------------------------------------------------- -YAML REST tests that include x-pack specific APIs need to explicitly declare which APIs are required through a similar `includeXpack` configuration. +YAML REST tests that include x-pack specific APIs need to explicitly declare +which APIs are required through a similar `includeXpack` configuration. -The REST tests are run automatically when executing the "./gradlew check" command. -To run only the YAML REST tests use the following command (modules and plugins may also include YAML REST tests): +The REST tests are run automatically when executing the "./gradlew check" command. To run only the +YAML REST tests use the following command (modules and plugins may also include YAML REST tests): --------------------------------------------------------------------------- ./gradlew :rest-api-spec:yamlRestTest @@ -444,17 +472,20 @@ You can run a group of YAML test by using wildcards: --tests "org.elasticsearch.test.rest.ClientYamlTestSuiteIT.test {yaml=index/*/*}" --------------------------------------------------------------------------- -Note that if the selected test via the `--tests` filter is not a valid test, i.e., the YAML test runner is not able to parse and load it, you might get an error message indicating that the test was not found. -In such cases, running the whole suite without using the `--tests` could show more specific error messages about why the test runner is not able to parse or load a certain test. +Note that if the selected test via the `--tests` filter is not a valid test, i.e., the YAML test +runner is not able to parse and load it, you might get an error message indicating that the test +was not found. In such cases, running the whole suite without using the `--tests` could show more +specific error messages about why the test runner is not able to parse or load a certain test. The YAML REST tests support all the options provided by the randomized runner, plus the following: -* `tests.rest.blacklist`: comma separated globs that identify tests that are blacklisted and need to be skipped e.g. -Dtests.rest.blacklist=index/*/Index document,get/10_basic/* +* `tests.rest.blacklist`: comma separated globs that identify tests that are +blacklisted and need to be skipped +e.g. -Dtests.rest.blacklist=index/*/Index document,get/10_basic/* Java REST tests can be run with the "javaRestTest" task. For example : - --------------------------------------------------------------------------- ./gradlew :modules:mapper-extras:javaRestTest --------------------------------------------------------------------------- @@ -466,16 +497,21 @@ A specific test case can be run with the following syntax (fqn.test {params}): --tests "org.elasticsearch.index.mapper.TokenCountFieldMapperIntegrationIT.testSearchByTokenCount {storeCountedFields=true loadCountedFields=false}" --------------------------------------------------------------------------- -yamlRestTest's and javaRestTest's are easy to identify, since they are found in a respective source directory. -However, there are some more specialized REST tests that use custom task names. -These are usually found in "qa" projects commonly use the "integTest" task. +yamlRestTest's and javaRestTest's are easy to identify, since they are found in a +respective source directory. However, there are some more specialized REST tests +that use custom task names. These are usually found in "qa" projects commonly +use the "integTest" task. If in doubt about which command to use, simply run :check == Testing packaging -The packaging tests are run on different build vm cloud instances to verify that installing and running Elasticsearch distributions works correctly on supported operating systems. -These tests should really only be run on ephemeral systems because they're destructive; that is, these tests install and remove packages and freely modify system settings, so you will probably regret it if you execute them on your development machine. +The packaging tests are run on different build vm cloud instances to verify +that installing and running Elasticsearch distributions works correctly on +supported operating systems. These tests should really only be run on ephemeral +systems because they're destructive; that is, these tests install and remove +packages and freely modify system settings, so you will probably regret it if +you execute them on your development machine. === Reproducing packaging tests @@ -483,15 +519,15 @@ To reproduce or debug packaging tests failures we recommend using using our prov == Testing backwards compatibility -Backwards compatibility tests exist to test upgrading from each supported version to the current version. -To run them all use: +Backwards compatibility tests exist to test upgrading from each supported version +to the current version. To run them all use: ------------------------------------------------- ./gradlew bwcTest ------------------------------------------------- -A specific version can be tested as well. -For example, to test bwc with version 5.3.2 run: +A specific version can be tested as well. For example, to test bwc with +version 5.3.2 run: ------------------------------------------------- ./gradlew v5.3.2#bwcTest @@ -499,7 +535,6 @@ For example, to test bwc with version 5.3.2 run: Use -Dtests.class and -Dtests.method to run a specific bwcTest test. For example to run a specific tests from the x-pack rolling upgrade from 7.7.0: - ------------------------------------------------- ./gradlew :x-pack:qa:rolling-upgrade:v7.7.0#bwcTest \ -Dtests.class=org.elasticsearch.upgrades.UpgradeClusterClientYamlTestSuiteIT \ @@ -520,7 +555,9 @@ Sometimes a backward compatibility change spans two versions. A common case is a new functionality that needs a BWC bridge in an unreleased versioned of a release branch (for example, 5.x). Another use case, since the introduction of serverless, is to test BWC against main in addition to the other released branches. To do so, specify the `bwc.refspec` remote and branch to use for the BWC build as `origin/main`. -To test against main, you will also need to create a new version in link:./server/src/main/java/org/elasticsearch/Version.java[Version.java], increment `elasticsearch` in link:./build-tools-internal/version.properties[version.properties], and hard-code the `project.version` for ml-cpp in link:./x-pack/plugin/ml/build.gradle[ml/build.gradle]. +To test against main, you will also need to create a new version in link:./server/src/main/java/org/elasticsearch/Version.java[Version.java], +increment `elasticsearch` in link:./build-tools-internal/version.properties[version.properties], and hard-code the `project.version` for ml-cpp +in link:./x-pack/plugin/ml/build.gradle[ml/build.gradle]. In general, to test the changes, you can instruct Gradle to build the BWC version from another remote/branch combination instead of pulling the release branch from GitHub. You do so using the `bwc.refspec.{VERSION}` system property: @@ -528,35 +565,41 @@ You do so using the `bwc.refspec.{VERSION}` system property: ------------------------------------------------- ./gradlew check -Dtests.bwc.refspec.8.15=origin/main ------------------------------------------------- -The branch needs to be available on the remote that the BWC makes of the repository you run the tests from. -Using the remote is a handy trick to make sure that a branch is available and is up to date in the case of multiple runs. + +The branch needs to be available on the remote that the BWC makes of the +repository you run the tests from. Using the remote is a handy trick to make +sure that a branch is available and is up to date in the case of multiple runs. Example: -Say you need to make a change to `main` and have a BWC layer in `5.x`. -You will need to: -. Create a branch called `index_req_change` off your remote `${remote}`. -This will contain your change. -. Create a branch called `index_req_bwc_5.x` off `5.x`. -This will contain your bwc layer. +Say you need to make a change to `main` and have a BWC layer in `5.x`. You +will need to: +. Create a branch called `index_req_change` off your remote `${remote}`. This +will contain your change. +. Create a branch called `index_req_bwc_5.x` off `5.x`. This will contain your bwc layer. . Push both branches to your remote repository. . Run the tests with `./gradlew check -Dbwc.remote=${remote} -Dbwc.refspec.5.x=index_req_bwc_5.x`. ==== Skip fetching latest -For some BWC testing scenarios, you want to use the local clone of the repository without fetching latest. -For these use cases, you can set the system property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip fetching the latest from the remote. +For some BWC testing scenarios, you want to use the local clone of the +repository without fetching latest. For these use cases, you can set the system +property `tests.bwc.git_fetch_latest` to `false` and the BWC builds will skip +fetching the latest from the remote. == Testing in FIPS 140-2 mode -We have a CI matrix job that periodically runs all our tests with the JVM configured to be FIPS 140-2 compliant with the use of the BouncyCastle FIPS approved Security Provider. -FIPS 140-2 imposes certain requirements that affect how our tests should be set up or what can be tested. -This section summarizes what one needs to take into consideration so that tests won't fail when run in fips mode. +We have a CI matrix job that periodically runs all our tests with the JVM configured +to be FIPS 140-2 compliant with the use of the BouncyCastle FIPS approved Security Provider. +FIPS 140-2 imposes certain requirements that affect how our tests should be set up or what +can be tested. This section summarizes what one needs to take into consideration so that +tests won't fail when run in fips mode. === Muting tests in FIPS 140-2 mode -If the following limitations cannot be observed, or there is a need to actually test some use case that is not available/allowed in fips mode, the test can be muted. -For unit tests or Java rest tests one can use +If the following limitations cannot be observed, or there is a need to actually test some use +case that is not available/allowed in fips mode, the test can be muted. For unit tests or Java +rest tests one can use ------------------------------------------------ assumeFalse("Justification why this cannot be run in FIPS mode", inFipsJvm()); @@ -587,9 +630,9 @@ The following should be taken into consideration when writing new tests or adjus ==== TLS -`JKS` and `PKCS#12` keystores cannot be used in FIPS mode. -If the test depends on being able to use a keystore, it can be muted when needed ( see `ESTestCase#inFipsJvm` ). -Alternatively, one can use PEM encoded files for keys and certificates for the tests or for setting up TLS in a test cluster. +`JKS` and `PKCS#12` keystores cannot be used in FIPS mode. If the test depends on being able to use +a keystore, it can be muted when needed ( see `ESTestCase#inFipsJvm` ). Alternatively, one can use +PEM encoded files for keys and certificates for the tests or for setting up TLS in a test cluster. Also, when in FIPS 140 mode, hostname verification for TLS cannot be turned off so if you are using `*.verification_mode: none` , you'd need to mute the test in fips mode. @@ -597,21 +640,26 @@ When using TLS, ensure that private keys used are longer than 2048 bits, or mute ==== Password hashing algorithm -Test clusters are configured with `xpack.security.fips_mode.enabled` set to true. -This means that FIPS 140-2 related bootstrap checks are enabled and the test cluster will fail to form if the password hashing algorithm is set to something else than a PBKDF2 based one. -You can delegate the choice of algorithm to i.e. `SecurityIntegTestCase#getFastStoredHashAlgoForTests` if you don't mind the actual algorithm used, or depend on default values for the test cluster nodes. +Test clusters are configured with `xpack.security.fips_mode.enabled` set to true. This means that +FIPS 140-2 related bootstrap checks are enabled and the test cluster will fail to form if the +password hashing algorithm is set to something else than a PBKDF2 based one. You can delegate the choice +of algorithm to i.e. `SecurityIntegTestCase#getFastStoredHashAlgoForTests` if you don't mind the +actual algorithm used, or depend on default values for the test cluster nodes. ==== Password length -While using `pbkdf2` as the password hashing algorithm, FIPS 140-2 imposes a requirement that passwords are longer than 14 characters. -You can either ensure that all test user passwords in your test are longer than 14 characters and use i.e. `SecurityIntegTestCase#getFastStoredHashAlgoForTests` -to randomly select a hashing algorithm, or use `pbkdf2_stretch` that doesn't have the same limitation. +While using `pbkdf2` as the password hashing algorithm, FIPS 140-2 imposes a requirement that +passwords are longer than 14 characters. You can either ensure that all test user passwords in +your test are longer than 14 characters and use i.e. `SecurityIntegTestCase#getFastStoredHashAlgoForTests` +to randomly select a hashing algorithm, or use `pbkdf2_stretch` that doesn't have the same +limitation. ==== Keystore Password -In FIPS 140-2 mode, the elasticsearch keystore needs to be password protected with a password of appropriate length. -This is handled automatically in `fips.gradle` and the keystore is unlocked on startup by the test clusters tooling in order to have secure settings available. -However, you might need to take into consideration that the keystore is password-protected with `keystore-password` +In FIPS 140-2 mode, the elasticsearch keystore needs to be password protected with a password +of appropriate length. This is handled automatically in `fips.gradle` and the keystore is unlocked +on startup by the test clusters tooling in order to have secure settings available. However, you +might need to take into consideration that the keystore is password-protected with `keystore-password` if you need to interact with it in a test. == How to write good tests? @@ -620,72 +668,100 @@ if you need to interact with it in a test. There are multiple base classes for tests: -* **`ESTestCase`**: The base class of all tests. -It is typically extended directly by unit tests. -* **`ESSingleNodeTestCase`**: This test case sets up a cluster that has a single node. -* **`ESIntegTestCase`**: An integration test case that creates a cluster that might have multiple nodes. -* **`ESRestTestCase`**: An integration tests that interacts with an external cluster via the REST API. -This is used for Java based REST tests. -* **`ESClientYamlSuiteTestCase` **: A subclass of `ESRestTestCase` used to run YAML based REST tests. +* **`ESTestCase`**: The base class of all tests. It is typically extended + directly by unit tests. +* **`ESSingleNodeTestCase`**: This test case sets up a cluster that has a + single node. +* **`ESIntegTestCase`**: An integration test case that creates a cluster that + might have multiple nodes. +* **`ESRestTestCase`**: An integration tests that interacts with an external + cluster via the REST API. This is used for Java based REST tests. +* **`ESClientYamlSuiteTestCase` **: A subclass of `ESRestTestCase` used to run + YAML based REST tests. === Good practices ==== What kind of tests should I write? -Unit tests are the preferred way to test some functionality: most of the time they are simpler to understand, more likely to reproduce, and unlikely to be affected by changes that are unrelated to the piece of functionality that is being tested. +Unit tests are the preferred way to test some functionality: most of the time +they are simpler to understand, more likely to reproduce, and unlikely to be +affected by changes that are unrelated to the piece of functionality that is +being tested. -The reason why `ESSingleNodeTestCase` exists is that all our components used to be very hard to set up in isolation, which had led us to having a number of integration tests but close to no unit tests. `ESSingleNodeTestCase` is a workaround for this issue which provides an easy way to spin up a node and get access to components that are hard to instantiate like `IndicesService`. +The reason why `ESSingleNodeTestCase` exists is that all our components used to +be very hard to set up in isolation, which had led us to having a number of +integration tests but close to no unit tests. `ESSingleNodeTestCase` is a +workaround for this issue which provides an easy way to spin up a node and get +access to components that are hard to instantiate like `IndicesService`. Whenever practical, you should prefer unit tests. -Many tests extend `ESIntegTestCase`, mostly because this is how most tests used to work in the early days of Elasticsearch. -However the complexity of these tests tends to make them hard to debug. -Whenever the functionality that is being tested isn't intimately dependent on how Elasticsearch behaves as a cluster, it is recommended to write unit tests or REST tests instead. +Many tests extend `ESIntegTestCase`, mostly because this is how most tests used +to work in the early days of Elasticsearch. However the complexity of these +tests tends to make them hard to debug. Whenever the functionality that is +being tested isn't intimately dependent on how Elasticsearch behaves as a +cluster, it is recommended to write unit tests or REST tests instead. -In short, most new functionality should come with unit tests, and optionally REST tests to test integration. +In short, most new functionality should come with unit tests, and optionally +REST tests to test integration. ==== Refactor code to make it easier to test Unfortunately, a large part of our code base is still hard to unit test. -Sometimes because some classes have lots of dependencies that make them hard to instantiate. -Sometimes because API contracts make tests hard to write. -Code refactors that make functionality easier to unit test are encouraged. -If this sounds very abstract to you, you can have a look at -https://github.com/elastic/elasticsearch/pull/16610[this pull request] for instance, which is a good example. -It refactors `IndicesRequestCache` in such a way that: -- it no longer depends on objects that are hard to instantiate such as -`IndexShard` or `SearchContext`, - time-based eviction is applied on top of the cache rather than internally, which makes it easier to assert on what the cache is expected to contain at a given time. +Sometimes because some classes have lots of dependencies that make them hard to +instantiate. Sometimes because API contracts make tests hard to write. Code +refactors that make functionality easier to unit test are encouraged. If this +sounds very abstract to you, you can have a look at +https://github.com/elastic/elasticsearch/pull/16610[this pull request] for +instance, which is a good example. It refactors `IndicesRequestCache` in such +a way that: + - it no longer depends on objects that are hard to instantiate such as + `IndexShard` or `SearchContext`, + - time-based eviction is applied on top of the cache rather than internally, + which makes it easier to assert on what the cache is expected to contain at + a given time. === Bad practices ==== Use randomized-testing for coverage -In general, randomization should be used for parameters that are not expected to affect the behavior of the functionality that is being tested. -For instance the number of shards should not impact `date_histogram` aggregations, and the choice of the `store` type (`niofs` vs `mmapfs`) does not affect the results of a query. -Such randomization helps improve confidence that we are not relying on implementation details of one component or specifics of some setup. +In general, randomization should be used for parameters that are not expected +to affect the behavior of the functionality that is being tested. For instance +the number of shards should not impact `date_histogram` aggregations, and the +choice of the `store` type (`niofs` vs `mmapfs`) does not affect the results of +a query. Such randomization helps improve confidence that we are not relying on +implementation details of one component or specifics of some setup. -However it should not be used for coverage. -For instance if you are testing a piece of functionality that enters different code paths depending on whether the index has 1 shards or 2+ shards, then we shouldn't just test against an index with a random number of shards: there should be one test for the 1-shard case, and another test for the 2+ shards case. +However it should not be used for coverage. For instance if you are testing a +piece of functionality that enters different code paths depending on whether +the index has 1 shards or 2+ shards, then we shouldn't just test against an +index with a random number of shards: there should be one test for the 1-shard +case, and another test for the 2+ shards case. ==== Abuse randomization in multi-threaded tests -Multi-threaded tests are often not reproducible due to the fact that there is no guarantee on the order in which operations occur across threads. -Adding randomization to the mix usually makes things worse and should be done with care. +Multi-threaded tests are often not reproducible due to the fact that there is +no guarantee on the order in which operations occur across threads. Adding +randomization to the mix usually makes things worse and should be done with +care. == Test coverage analysis Generating test coverage reports for Elasticsearch is currently not possible through Gradle. -However, it _is_ possible to gain insight in code coverage using IntelliJ's built-in coverage analysis tool that can measure coverage upon executing specific tests. +However, it _is_ possible to gain insight in code coverage using IntelliJ's built-in coverage +analysis tool that can measure coverage upon executing specific tests. -Test coverage reporting used to be possible with JaCoCo when Elasticsearch was using Maven as its build system. -Since the switch to Gradle though, this is no longer possible, seeing as the code currently used to build Elasticsearch does not allow JaCoCo to recognize its tests. +Test coverage reporting used to be possible with JaCoCo when Elasticsearch was using Maven +as its build system. Since the switch to Gradle though, this is no longer possible, seeing as +the code currently used to build Elasticsearch does not allow JaCoCo to recognize its tests. For more information on this, see the discussion in https://github.com/elastic/elasticsearch/issues/28867[issue #28867]. == Building with extra plugins - -Additional plugins may be built alongside elasticsearch, where their dependency on elasticsearch will be substituted with the local elasticsearch build. -To add your plugin, create a directory called elasticsearch-extra as a sibling of elasticsearch. -Checkout your plugin underneath elasticsearch-extra and the build will automatically pick it up. -You can verify the plugin is included as part of the build by checking the projects of the build. +Additional plugins may be built alongside elasticsearch, where their +dependency on elasticsearch will be substituted with the local elasticsearch +build. To add your plugin, create a directory called elasticsearch-extra as +a sibling of elasticsearch. Checkout your plugin underneath elasticsearch-extra +and the build will automatically pick it up. You can verify the plugin is +included as part of the build by checking the projects of the build. --------------------------------------------------------------------------- ./gradlew projects @@ -693,10 +769,12 @@ You can verify the plugin is included as part of the build by checking the proje == Environment misc -There is a known issue with macOS localhost resolve strategy that can cause some integration tests to fail. -This is because integration tests have timings for cluster formation, discovery, etc. that can be exceeded if name resolution takes a long time. -To fix this, make sure you have your computer name (as returned by `hostname`) inside `/etc/hosts`, e.g.: - +There is a known issue with macOS localhost resolve strategy that can cause +some integration tests to fail. This is because integration tests have timings +for cluster formation, discovery, etc. that can be exceeded if name resolution +takes a long time. +To fix this, make sure you have your computer name (as returned by `hostname`) +inside `/etc/hosts`, e.g.: .... 127.0.0.1 localhost ElasticMBP.local 255.255.255.255 broadcasthost @@ -705,16 +783,16 @@ To fix this, make sure you have your computer name (as returned by `hostname`) i == Benchmarking -For changes that might affect the performance characteristics of Elasticsearch you should also run macrobenchmarks. -We maintain a macrobenchmarking tool called https://github.com/elastic/rally[Rally] -which you can use to measure the performance impact. -It comes with a set of default benchmarks that we also -https://elasticsearch-benchmarks.elastic.co/[run every night]. -To get started, please see https://esrally.readthedocs.io/en/stable/[Rally's documentation]. +For changes that might affect the performance characteristics of Elasticsearch +you should also run macrobenchmarks. We maintain a macrobenchmarking tool +called https://github.com/elastic/rally[Rally] +which you can use to measure the performance impact. It comes with a set of +default benchmarks that we also +https://elasticsearch-benchmarks.elastic.co/[run every night]. To get started, +please see https://esrally.readthedocs.io/en/stable/[Rally's documentation]. == Test doc builds -The Elasticsearch docs are in AsciiDoc format. -You can test and build the docs locally using the Elasticsearch documentation build process. -See +The Elasticsearch docs are in AsciiDoc format. You can test and build the docs +locally using the Elasticsearch documentation build process. See https://github.com/elastic/docs. From c44899b3a711aa511027b13c9fea1dc9b7c79d1c Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Mon, 13 May 2024 15:00:32 -0400 Subject: [PATCH 14/15] add minimum version for tests to match minimum version in services --- .../inference/qa/mixed/CohereServiceMixedIT.java | 12 ++++++++++++ .../qa/mixed/HuggingFaceServiceMixedIT.java | 12 ++++++++++++ .../inference/qa/mixed/OpenAIServiceMixedIT.java | 12 ++++++++++++ 3 files changed, 36 insertions(+) diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java index 400f96da02b74..4f7e836c12637 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java @@ -7,12 +7,15 @@ package org.elasticsearch.xpack.inference.qa.mixed; +import org.elasticsearch.TransportVersion; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.inference.services.cohere.CohereService; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; +import org.elasticsearch.xpack.inference.services.openai.OpenAiService; import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -33,6 +36,7 @@ public class CohereServiceMixedIT extends BaseMixedTestCase { private static final String COHERE_EMBEDDINGS_ADDED = "8.13.0"; private static final String COHERE_RERANK_ADDED = "8.14.0"; private static final String BYTE_ALIAS_FOR_INT8_ADDED = "8.14.0"; + private static final String MINIMUM_SUPPORTED_VERSION = "8.15.0"; private static MockWebServer cohereEmbeddingsServer; private static MockWebServer cohereRerankServer; @@ -56,6 +60,10 @@ public static void shutdown() { public void testCohereEmbeddings() throws IOException { var embeddingsSupported = bwcVersion.onOrAfter(Version.fromString(COHERE_EMBEDDINGS_ADDED)); assumeTrue("Cohere embedding service added in " + COHERE_EMBEDDINGS_ADDED, embeddingsSupported); + assumeTrue( + "Cohere service requires at least " + MINIMUM_SUPPORTED_VERSION, + bwcVersion.onOrAfter(Version.fromString(MINIMUM_SUPPORTED_VERSION)) + ); final String inferenceIdInt8 = "mixed-cluster-cohere-embeddings-int8"; final String inferenceIdFloat = "mixed-cluster-cohere-embeddings-float"; @@ -106,6 +114,10 @@ void assertEmbeddingInference(String inferenceId, CohereEmbeddingType type) thro public void testRerank() throws IOException { var rerankSupported = bwcVersion.onOrAfter(Version.fromString(COHERE_RERANK_ADDED)); assumeTrue("Cohere rerank service added in " + COHERE_RERANK_ADDED, rerankSupported); + assumeTrue( + "Cohere service requires at least " + MINIMUM_SUPPORTED_VERSION, + bwcVersion.onOrAfter(Version.fromString(MINIMUM_SUPPORTED_VERSION)) + ); final String inferenceId = "mixed-cluster-rerank"; diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java index ce77481eea8ba..3676d18421a0d 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java @@ -7,11 +7,14 @@ package org.elasticsearch.xpack.inference.qa.mixed; +import org.elasticsearch.TransportVersion; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.inference.services.cohere.CohereService; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceService; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -27,6 +30,7 @@ public class HuggingFaceServiceMixedIT extends BaseMixedTestCase { private static final String HF_EMBEDDINGS_ADDED = "8.12.0"; private static final String HF_ELSER_ADDED = "8.12.0"; + private static final String MINIMUM_SUPPORTED_VERSION = "8.15.0"; private static MockWebServer embeddingsServer; private static MockWebServer elserServer; @@ -50,6 +54,10 @@ public static void shutdown() { public void testHFEmbeddings() throws IOException { var embeddingsSupported = bwcVersion.onOrAfter(Version.fromString(HF_EMBEDDINGS_ADDED)); assumeTrue("Hugging Face embedding service added in " + HF_EMBEDDINGS_ADDED, embeddingsSupported); + assumeTrue( + "HuggingFace service requires at least " + MINIMUM_SUPPORTED_VERSION, + bwcVersion.onOrAfter(Version.fromString(MINIMUM_SUPPORTED_VERSION)) + ); final String inferenceId = "mixed-cluster-embeddings"; @@ -71,6 +79,10 @@ void assertEmbeddingInference(String inferenceId) throws IOException { public void testElser() throws IOException { var supported = bwcVersion.onOrAfter(Version.fromString(HF_ELSER_ADDED)); assumeTrue("HF elser service added in " + HF_ELSER_ADDED, supported); + assumeTrue( + "HuggingFace service requires at least " + MINIMUM_SUPPORTED_VERSION, + bwcVersion.onOrAfter(Version.fromString(MINIMUM_SUPPORTED_VERSION)) + ); final String inferenceId = "mixed-cluster-elser"; final String upgradedClusterId = "upgraded-cluster-elser"; diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java index 9f4fd136c33ae..ad3f576c90927 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java @@ -7,11 +7,14 @@ package org.elasticsearch.xpack.inference.qa.mixed; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.inference.services.openai.OpenAiService; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -30,6 +33,7 @@ public class OpenAIServiceMixedIT extends BaseMixedTestCase { private static final String OPEN_AI_EMBEDDINGS_ADDED = "8.12.0"; private static final String OPEN_AI_EMBEDDINGS_MODEL_SETTING_MOVED = "8.13.0"; private static final String OPEN_AI_COMPLETIONS_ADDED = "8.14.0"; + private static final String MINIMUM_SUPPORTED_VERSION = "8.15.0"; private static MockWebServer openAiEmbeddingsServer; private static MockWebServer openAiChatCompletionsServer; @@ -53,6 +57,10 @@ public static void shutdown() { public void testOpenAiEmbeddings() throws IOException { var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_EMBEDDINGS_ADDED)); assumeTrue("OpenAI embedding service added in " + OPEN_AI_EMBEDDINGS_ADDED, openAiEmbeddingsSupported); + assumeTrue( + "OpenAI service requires at least " + MINIMUM_SUPPORTED_VERSION, + bwcVersion.onOrAfter(Version.fromString(MINIMUM_SUPPORTED_VERSION)) + ); final String inferenceId = "mixed-cluster-embeddings"; @@ -82,6 +90,10 @@ void assertEmbeddingInference(String inferenceId) throws IOException { public void testOpenAiCompletions() throws IOException { var openAiEmbeddingsSupported = bwcVersion.onOrAfter(Version.fromString(OPEN_AI_EMBEDDINGS_ADDED)); assumeTrue("OpenAI completions service added in " + OPEN_AI_COMPLETIONS_ADDED, openAiEmbeddingsSupported); + assumeTrue( + "OpenAI service requires at least " + MINIMUM_SUPPORTED_VERSION, + bwcVersion.onOrAfter(Version.fromString(MINIMUM_SUPPORTED_VERSION)) + ); final String inferenceId = "mixed-cluster-completions"; final String upgradedClusterId = "upgraded-cluster-completions"; From 539d59e00985caf1c12940eace95f88356d79c4b Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Mon, 13 May 2024 15:09:24 -0400 Subject: [PATCH 15/15] spotless --- .../xpack/inference/qa/mixed/CohereServiceMixedIT.java | 3 --- .../xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java | 3 --- .../xpack/inference/qa/mixed/OpenAIServiceMixedIT.java | 3 --- 3 files changed, 9 deletions(-) diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java index 4f7e836c12637..69274b46d75c1 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java @@ -7,15 +7,12 @@ package org.elasticsearch.xpack.inference.qa.mixed; -import org.elasticsearch.TransportVersion; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.xpack.inference.services.cohere.CohereService; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; -import org.elasticsearch.xpack.inference.services.openai.OpenAiService; import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.BeforeClass; diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java index 3676d18421a0d..a2793f9060d8a 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/HuggingFaceServiceMixedIT.java @@ -7,14 +7,11 @@ package org.elasticsearch.xpack.inference.qa.mixed; -import org.elasticsearch.TransportVersion; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.xpack.inference.services.cohere.CohereService; -import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceService; import org.junit.AfterClass; import org.junit.BeforeClass; diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java index ad3f576c90927..33cad6a179281 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/OpenAIServiceMixedIT.java @@ -7,14 +7,11 @@ package org.elasticsearch.xpack.inference.qa.mixed; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.xpack.inference.services.openai.OpenAiService; import org.junit.AfterClass; import org.junit.BeforeClass;