diff --git a/.github/workflows/CI-workflow.yml b/.github/workflows/CI-workflow.yml index 7c38b23eab..a316e7a5c1 100644 --- a/.github/workflows/CI-workflow.yml +++ b/.github/workflows/CI-workflow.yml @@ -38,7 +38,6 @@ jobs: # using the same image which is used by opensearch-build team to build the OpenSearch Distribution # this image tag is subject to change as more dependencies and updates will arrive over time image: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-version-linux }} - # need to switch to root so that github actions can install runner binary on container without permission issues. options: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-start-options }} steps: diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index d64cd49177..a4b76b9530 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index d296387b57..f19a9a4690 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=a4b4158601f8636cdeeab09bd76afb640030bb5b144aafe261a5e8af027dc612 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.8-bin.zip +distributionSha256Sum=f397b287023acdba1e9f6fc5ea72d22dd63669d59ed4a289a29b1a76eee151c6 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 1aa94a4269..f5feea6d6b 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -84,7 +86,8 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum diff --git a/gradlew.bat b/gradlew.bat index 7101f8e467..9b42019c79 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,6 +13,8 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## diff --git a/ml-algorithms/src/main/java/org/opensearch/ml/engine/utils/FileUtils.java b/ml-algorithms/src/main/java/org/opensearch/ml/engine/utils/FileUtils.java index 677ca1aa9d..82551042e1 100644 --- a/ml-algorithms/src/main/java/org/opensearch/ml/engine/utils/FileUtils.java +++ b/ml-algorithms/src/main/java/org/opensearch/ml/engine/utils/FileUtils.java @@ -45,16 +45,16 @@ public class FileUtils { * @throws IOException */ public static List splitFileIntoChunks(File file, Path outputPath, int chunkSize) throws IOException { - int fileSize = (int) file.length(); + long fileSize = file.length(); ArrayList nameList = new ArrayList<>(); try (InputStream inStream = new BufferedInputStream(new FileInputStream(file))) { int numberOfChunk = 0; - int totalBytesRead = 0; + long totalBytesRead = 0; while (totalBytesRead < fileSize) { String partName = numberOfChunk + ""; - int bytesRemaining = fileSize - totalBytesRead; + long bytesRemaining = fileSize - totalBytesRead; if (bytesRemaining < chunkSize) { - chunkSize = bytesRemaining; + chunkSize = (int) bytesRemaining; } byte[] temporary = new byte[chunkSize]; int bytesRead = inStream.read(temporary, 0, chunkSize); diff --git a/ml-algorithms/src/test/java/org/opensearch/ml/engine/utils/FileUtilsTest.java b/ml-algorithms/src/test/java/org/opensearch/ml/engine/utils/FileUtilsTest.java new file mode 100644 index 0000000000..390286aadd --- /dev/null +++ b/ml-algorithms/src/test/java/org/opensearch/ml/engine/utils/FileUtilsTest.java @@ -0,0 +1,63 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.ml.engine.utils; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; +import java.util.Random; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +public class FileUtilsTest { + private TemporaryFolder tempDir; + + @Before + public void setUp() throws Exception { + tempDir = new TemporaryFolder(); + tempDir.create(); + } + + @After + public void tearDown() { + if (tempDir != null) { + tempDir.delete(); + } + } + + @Test + public void testSplitFileIntoChunks() throws Exception { + // Write file. + Random random = new Random(); + File file = tempDir.newFile("large_file"); + byte[] data = new byte[1017]; + random.nextBytes(data); + Files.write(file.toPath(), data); + + // Split file into chunks. + int chunkSize = 325; + List chunkPaths = FileUtils.splitFileIntoChunks(file, tempDir.newFolder().toPath(), chunkSize); + + // Verify. + int currentPosition = 0; + for (String chunkPath : chunkPaths) { + byte[] chunk = Files.readAllBytes(Path.of(chunkPath)); + assertTrue("Chunk size", currentPosition + chunk.length <= data.length); + Assert.assertArrayEquals(Arrays.copyOfRange(data, currentPosition, currentPosition + chunk.length), chunk); + currentPosition += chunk.length; + } + assertEquals(currentPosition, data.length); + } +} diff --git a/plugin/src/main/java/org/opensearch/ml/autoredeploy/MLModelAutoReDeployer.java b/plugin/src/main/java/org/opensearch/ml/autoredeploy/MLModelAutoReDeployer.java index ac38ce24c6..0b885ac9ab 100644 --- a/plugin/src/main/java/org/opensearch/ml/autoredeploy/MLModelAutoReDeployer.java +++ b/plugin/src/main/java/org/opensearch/ml/autoredeploy/MLModelAutoReDeployer.java @@ -186,6 +186,10 @@ private void triggerAutoDeployModels(List addedNodes) { modelAutoRedeployArrangements.add(modelAutoRedeployArrangement); }); redeployAModel(); + } else { + log.info("Could not find any models in the index, not performing auto reloading!"); + startCronjobAndClearListener(); + return; } }, e -> { if (e instanceof IndexNotFoundException) { @@ -261,6 +265,7 @@ private void queryRunningModels(ActionListener listener) { private void triggerModelRedeploy(ModelAutoRedeployArrangement modelAutoRedeployArrangement) { if (modelAutoRedeployArrangement == null) { log.info("No more models in arrangement, skipping the redeployment"); + startCronjobAndClearListener(); return; } String modelId = modelAutoRedeployArrangement.getSearchResponse().getId(); @@ -275,10 +280,12 @@ private void triggerModelRedeploy(ModelAutoRedeployArrangement modelAutoRedeploy "Model function_name or algorithm is null, model is not in correct status, please check the model, model id is: {}", modelId ); + redeployAModel(); return; } if (FunctionName.REMOTE == FunctionName.from(functionName)) { log.info("Skipping redeploying remote model {} as remote model deployment can be done at prediction time.", modelId); + redeployAModel(); return; } List planningWorkerNodes = (List) sourceAsMap.get(MLModel.PLANNING_WORKER_NODES_FIELD); @@ -302,6 +309,7 @@ private void triggerModelRedeploy(ModelAutoRedeployArrangement modelAutoRedeploy .info( "Allow custom deployment plan is true and deploy to all nodes is false and added nodes are not in planning worker nodes list, not to auto redeploy the model to the new nodes!" ); + redeployAModel(); return; }