Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enabling profiler with empty Optional causes segmentation fault #22890

Open
amancini-N opened this issue Nov 19, 2024 · 0 comments · May be fixed by #22891
Open

Enabling profiler with empty Optional causes segmentation fault #22890

amancini-N opened this issue Nov 19, 2024 · 0 comments · May be fixed by #22891
Labels
core runtime issues related to core runtime

Comments

@amancini-N
Copy link

Describe the issue

When enabling profiling over a model which includes empty Optional, a segfault is caused.

To reproduce

Following python script creates a model which reproduces the error:

"""Following module creates a relu model using Optional, OptionalHasElement, OptionalGetElement, If operators.
The model is saved as 'relu_with_optional.onnx' in the current directory.
It is then loaded and run using onnxruntime. The input is -1 and the expected output is 0.
Enabling profiling causes a segfault"""

import onnx
import onnxruntime as ort
import numpy as np

def make_optional_tensor_value_info(name, data_type, shape):
    value_info_proto = onnx.ValueInfoProto()
    value_info_proto.name = name

    tensor_type_proto = onnx.helper.make_tensor_type_proto(data_type, shape, None)
    optional_type_proto = onnx.helper.make_optional_type_proto(tensor_type_proto)
    value_info_proto.type.CopyFrom(optional_type_proto)

    return value_info_proto

def create_model():
    input_info_proto = onnx.helper.make_tensor_value_info("input", onnx.TensorProto.INT32, shape=[1])
    output_info_proto = onnx.helper.make_tensor_value_info("output", onnx.TensorProto.INT32, shape=[1])

    zero = onnx.helper.make_node("Constant", [], ["zero"], value=onnx.helper.make_tensor(name="zero", data_type=onnx.TensorProto.INT32, dims=[1], vals=[0]))
    is_input_positive = onnx.helper.make_node("Greater", ["input", "zero"], ["is_input_positive"])

    # Make a graph, return optional of input
    opt_wrap = onnx.helper.make_node("Optional", ["input"], ["opt_wrap"])
    then_graph = onnx.helper.make_graph(
        [opt_wrap],
        "then",
        [],
        [make_optional_tensor_value_info("opt_wrap", onnx.TensorProto.INT32, shape=[1])],
        [],
    )

    # Make a graph, return empty optional
    empty_opt_wrap = onnx.helper.make_node("Optional", [], ["empty_opt"], type=onnx.helper.make_tensor_type_proto( onnx.TensorProto.INT32, [1]))
    else_graph = onnx.helper.make_graph(
        [empty_opt_wrap],
        "else",
        [],
        [make_optional_tensor_value_info("empty_opt", onnx.TensorProto.INT32, shape=[1])],
        [],
    )

    # Make If node
    if_node = onnx.helper.make_node("If", ["is_input_positive"], ["intermediate"], then_branch=then_graph, else_branch=else_graph)

    # If condition: OptionalGet
    opt_has = onnx.helper.make_node("OptionalHasElement", ["intermediate"], ["has"])

    # If then branch: OptionalGet
    opt_get = onnx.helper.make_node("OptionalGetElement", ["intermediate"], ["get"])
    then_graph = onnx.helper.make_graph(
        [opt_get],
        "then",
        [],
        [onnx.helper.make_tensor_value_info("get", onnx.TensorProto.INT32, shape=[1])],
        [],
    )

    # If else branch: Constant
    const = onnx.helper.make_node("Constant", [], ["const"], value=onnx.helper.make_tensor(name="const", data_type=onnx.TensorProto.INT32, dims=[1], vals=[0]))
    else_graph = onnx.helper.make_graph(
        [const],
        "else",
        [],
        [onnx.helper.make_tensor_value_info("const", onnx.TensorProto.INT32, shape=[1])],
        [],
    )

    # Make If node
    if_node_2 = onnx.helper.make_node("If", ["has"], ["output"], then_branch=then_graph, else_branch=else_graph)

    # Make main graph
    graph = onnx.helper.make_graph(
        [zero, is_input_positive, if_node, opt_has, if_node_2],
        "main",
        [input_info_proto],
        [output_info_proto],
        [],
    )

    # Make model
    model = onnx.helper.make_model(
        graph,
        opset_imports=[onnx.helper.make_opsetid("", 17)],
        doc_string="Relu implementation using Optional, OptionalHasElement, OptionalGetElement, If operators",
    )

    # Save model
    onnx.save(model, "relu_with_optional.onnx")

def run_model():
    sess_options = ort.SessionOptions()
    sess_options.enable_profiling = True
    sess = ort.InferenceSession("relu_with_optional.onnx", sess_options)

    input = np.array([-1], dtype=np.int32)
    output = sess.run(["output"], {"input": input})

    print(output)

create_model()
run_model()

Urgency

No response

Platform

Linux

OS Version

Ubuntu 20.04

ONNX Runtime Installation

Built from Source

ONNX Runtime Version or Commit ID

09c9843

ONNX Runtime API

Python

Architecture

X64

Execution Provider

Default CPU

Execution Provider Library Version

No response

@amancini-N amancini-N linked a pull request Nov 19, 2024 that will close this issue
@amarin16 amarin16 added the core runtime issues related to core runtime label Nov 19, 2024
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
core runtime issues related to core runtime
Projects
None yet
Development

Successfully merging a pull request may close this issue.

2 participants