Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add test for different variance of ONNX flavor #4469

Merged
merged 3 commits into from
Jun 7, 2022
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@

# Copyright 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# Copyright 2021-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
Expand All @@ -24,7 +24,6 @@
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
name: "onnx_float32_int32_int32"
platform: "onnxruntime_onnx"
max_batch_size: 8
version_policy: { latest { num_versions: 1 }}
Expand Down
63 changes: 44 additions & 19 deletions qa/L0_mlflow/plugin_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,31 +42,20 @@ class PluginTest(tu.TestResultCollector):
def setUp(self):
self.client_ = get_deploy_client('triton')

def test_onnx_flavor(self):
# Log the ONNX model to MLFlow
import mlflow.onnx
import onnx
model = onnx.load(
"./mlflow-triton-plugin/examples/onnx_float32_int32_int32/1/model.onnx"
)
# Use a different name to ensure the plugin operates on correct model
mlflow.onnx.log_model(model,
"triton",
registered_model_name="onnx_model")

def _validate_deployment(self, model_name):
# create
self.client_.create_deployment("onnx_model",
"models:/onnx_model/1",
self.client_.create_deployment(model_name,
"models:/{}/1".format(model_name),
flavor="onnx")

# list
deployment_list = self.client_.list_deployments()
self.assertEqual(len(deployment_list), 1)
self.assertEqual(deployment_list[0]['name'], "onnx_model")
self.assertEqual(deployment_list[0]['name'], model_name)

# get
deployment = self.client_.get_deployment("onnx_model")
self.assertEqual(deployment['name'], "onnx_model")
deployment = self.client_.get_deployment(model_name)
self.assertEqual(deployment['name'], model_name)

# predict
inputs = {}
Expand All @@ -75,7 +64,7 @@ def test_onnx_flavor(self):
for key, value in input_json['inputs'].items():
inputs[key] = np.array(value, dtype=np.float32)

output = self.client_.predict("onnx_model", inputs)
output = self.client_.predict(model_name, inputs)
with open("./mlflow-triton-plugin/examples/expected_output.json",
"r") as f:
output_json = json.load(f)
Expand All @@ -86,7 +75,43 @@ def test_onnx_flavor(self):
err_msg='Inference result is not correct')

# delete
self.client_.delete_deployment("onnx_model")
self.client_.delete_deployment(model_name)

def test_onnx_flavor(self):
# Log the ONNX model to MLFlow
import mlflow.onnx
import onnx
model = onnx.load(
"./mlflow-triton-plugin/examples/onnx_float32_int32_int32/1/model.onnx"
)
# Use a different name to ensure the plugin operates on correct model
mlflow.onnx.log_model(model,
"triton",
registered_model_name="onnx_model")

self._validate_deployment("onnx_model")

def test_onnx_flavor_with_files(self):
# Log the ONNX model to MLFlow
tanmayv25 marked this conversation as resolved.
Show resolved Hide resolved
import mlflow.onnx
import onnx
model = onnx.load(
"./mlflow-triton-plugin/examples/onnx_float32_int32_int32/1/model.onnx"
)
config_path = "./mlflow-triton-plugin/examples/onnx_float32_int32_int32/config.pbtxt"
# Use a different name to ensure the plugin operates on correct model
mlflow.onnx.log_model(model,
"triton",
registered_model_name="onnx_model_with_files")
mlflow.log_artifact(config_path, "triton")

self._validate_deployment("onnx_model_with_files")

# Check if the additional files are properly copied
import filecmp
self.assertTrue(
filecmp.cmp(config_path,
"./models/onnx_model_with_files/config.pbtxt"))


if __name__ == '__main__':
Expand Down
11 changes: 10 additions & 1 deletion qa/L0_mlflow/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,14 @@ mkdir -p ./mlflow/artifacts

pip install ./mlflow-triton-plugin/

# Clear mlflow registered models if any
python - << EOF
from mlflow.tracking import MlflowClient
c = MlflowClient()
for m in c.list_registered_models():
c.delete_registered_model(m.name)
EOF

rm -rf ./models
mkdir -p ./models
SERVER=/opt/tritonserver/bin/tritonserver
Expand Down Expand Up @@ -137,6 +145,7 @@ if [ $CLI_RET -ne 0 ]; then
fi
set -e

# ONNX flavor with Python package
set +e
PY_LOG=plugin_py.log
PY_TEST=plugin_test.py
Expand All @@ -147,7 +156,7 @@ if [ $? -ne 0 ]; then
echo -e "\n***\n*** Python Test Failed\n***"
RET=1
else
check_test_results $TEST_RESULT_FILE 1
check_test_results $TEST_RESULT_FILE 2
if [ $? -ne 0 ]; then
cat $PY_LOG
echo -e "\n***\n*** Test Result Verification Failed\n***"
Expand Down