Skip to content

Commit

Permalink
Remove ops from decompositions where converters exist
Browse files Browse the repository at this point in the history
  • Loading branch information
HolyWu committed May 3, 2024
1 parent 4dc9acf commit a6e3e4b
Showing 1 changed file with 0 additions and 5 deletions.
5 changes: 0 additions & 5 deletions py/torch_tensorrt/dynamo/lowering/_decomposition_groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
aten.glu_backward,
aten.hardshrink,
aten.hardshrink_backward,
aten.hardsigmoid,
aten.hardsigmoid_backward,
aten.hardswish,
aten.hardswish_,
Expand All @@ -66,11 +65,9 @@
aten.index_copy_,
aten.index_fill,
aten.index_fill_,
aten.index_select,
aten.isneginf,
aten.isposinf,
aten.l1_loss,
aten.leaky_relu,
aten.leaky_relu_,
aten.leaky_relu_backward,
aten.lerp,
Expand Down Expand Up @@ -134,7 +131,6 @@
aten.soft_margin_loss_backward,
aten._softmax.out,
aten._softmax_backward_data,
aten.softplus,
aten.softplus_backward,
aten.softshrink,
aten.softshrink_backward,
Expand Down Expand Up @@ -172,7 +168,6 @@
aten.linalg_vector_norm,
aten.full,
aten.repeat,
aten.var_mean,
}
torch_disabled_decompositions: Set[Union[OpOverload, OpOverloadPacket]] = {
aten._softmax.default,
Expand Down

0 comments on commit a6e3e4b

Please sign in to comment.