Skip to content

Commit

Permalink
Include tracebacks in errors returned from failing requests (#343)
Browse files Browse the repository at this point in the history
* Include tracebacks in errors returned from failing requests

* Address a linter error

* Supply the full schema when processing requests and responses
  • Loading branch information
karlhigley authored May 4, 2023
1 parent b150e23 commit 2b1b90b
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 19 deletions.
9 changes: 8 additions & 1 deletion merlin/systems/triton/models/executor_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,17 @@ def execute(self, request):
be the same as `requests`
"""
inputs = triton_request_to_tensor_table(request, self.ensemble.input_schema)

try:
outputs = self.ensemble.transform(inputs, runtime=TritonExecutorRuntime())
except Exception as exc:
raise pb_utils.TritonModelException(str(exc)) from exc
import traceback

raise pb_utils.TritonModelException(
f"Error: {type(exc)} - {str(exc)}, "
f"Traceback: {traceback.format_tb(exc.__traceback__)}"
) from exc

return tensor_table_to_triton_response(outputs, self.ensemble.output_schema)


Expand Down
47 changes: 29 additions & 18 deletions merlin/systems/triton/models/workflow_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,24 +97,35 @@ def execute(self, request):
"""Transforms the input batches by running through a NVTabular workflow.transform
function.
"""
# transform the triton tensors to a dict of name:numpy tensor
input_tensors = {
name: _convert_tensor(pb_utils.get_input_tensor_by_name(request, name))
for name in self.input_dtypes
}

# multihots are represented as a tuple of (values, offsets)
for name, dtype in self.input_multihots.items():
values = _convert_tensor(pb_utils.get_input_tensor_by_name(request, name + "__values"))
offsets = _convert_tensor(
pb_utils.get_input_tensor_by_name(request, name + "__offsets")
)
input_tensors[name] = (values, offsets)

transformed = self.runner.run_workflow(input_tensors)
result = [pb_utils.Tensor(name, data) for name, data in transformed.items()]

return pb_utils.InferenceResponse(result)
try:
# transform the triton tensors to a dict of name:numpy tensor
input_tensors = {
name: _convert_tensor(pb_utils.get_input_tensor_by_name(request, name))
for name in self.input_dtypes
}

# multihots are represented as a tuple of (values, offsets)
for name, dtype in self.input_multihots.items():
values = _convert_tensor(
pb_utils.get_input_tensor_by_name(request, name + "__values")
)
offsets = _convert_tensor(
pb_utils.get_input_tensor_by_name(request, name + "__offsets")
)
input_tensors[name] = (values, offsets)

transformed = self.runner.run_workflow(input_tensors)
result = [pb_utils.Tensor(name, data) for name, data in transformed.items()]

return pb_utils.InferenceResponse(result)

except Exception as exc:
import traceback

raise pb_utils.TritonModelException(
f"Error: {type(exc)} - {str(exc)}, "
f"Traceback: {traceback.format_tb(exc.__traceback__)}"
) from exc

def _is_list_dtype(self, column: str) -> bool:
"""Check if a column of a Workflow contains list elements"""
Expand Down

0 comments on commit 2b1b90b

Please sign in to comment.