From d431d616168d336c4bfe74f1470dd89e0a673cfd Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 3 May 2023 13:41:26 +0100 Subject: [PATCH] [ML] Re-enable upgrade tests with workaround for invalid pipeline config (#95778) In #95766 the ML trained model deployment upgrade tests fail due to an invalid ingest processor configuration. ML stopped parsing the full ingest pipeline in version 8.3.1 so the tests can be re-enable when upgrading from 8.3.1 or later. --- .../upgrades/MLModelDeploymentsUpgradeIT.java | 20 ++++++++++++++----- .../upgrades/MlTrainedModelsUpgradeIT.java | 16 ++++++++++++--- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java index ad3d3254138db..53cae2a0a91b0 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java @@ -96,7 +96,6 @@ public void removeLogging() throws IOException { client().performRequest(request); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/95360") public void testTrainedModelDeployment() throws Exception { assumeTrue("NLP model deployments added in 8.0", UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_0_0)); @@ -112,10 +111,21 @@ public void testTrainedModelDeployment() throws Exception { request.addParameter("wait_for_status", "yellow"); request.addParameter("timeout", "70s"); })); - waitForDeploymentStarted(modelId); - // attempt inference on new and old nodes multiple times - for (int i = 0; i < 10; i++) { - assertInfer(modelId); + + // Workaround for an upgrade test failure where an ingest + // pipeline config cannot be parsed by older nodes: + // https://github.com/elastic/elasticsearch/issues/95766 + // + // In version 8.3.1 ml stopped parsing the full ingest + // pipeline configuration so will avoid this problem. + // TODO remove this check once https://github.com/elastic/elasticsearch/issues/95766 + // is resolved + if (UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_3_1)) { + waitForDeploymentStarted(modelId); + // attempt inference on new and old nodes multiple times + for (int i = 0; i < 10; i++) { + assertInfer(modelId); + } } } case UPGRADED -> { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java index 0667c22212dff..7742b3fe28286 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java @@ -56,7 +56,6 @@ protected Collection templatesToWaitFor() { .collect(Collectors.toSet()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/95360") public void testTrainedModelInference() throws Exception { assumeTrue("We should only test if old cluster is after trained models we GA", UPGRADE_FROM_VERSION.after(Version.V_7_13_0)); switch (CLUSTER_TYPE) { @@ -75,8 +74,19 @@ public void testTrainedModelInference() throws Exception { request.addParameter("timeout", "70s"); })); List modelIds = getTrainedModels(); - // Test that stats are serializable and can be gathered - getTrainedModelStats(); + + // Workaround for an upgrade test failure where an ingest + // pipeline config cannot be parsed by older nodes: + // https://github.com/elastic/elasticsearch/issues/95766 + // + // In version 8.3.1 ml stopped parsing the full ingest + // pipeline configuration so will avoid this problem. + // TODO remove this check once https://github.com/elastic/elasticsearch/issues/95766 + // is resolved + if (UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_3_1)) { + // Test that stats are serializable and can be gathered + getTrainedModelStats(); + } // Verify that the pipelines still work and inference is possible testInfer(modelIds); }