Skip to content

Commit

Permalink
Revert D10412639: [nomnigraph] Add new NeuralNetOps for fusion
Browse files Browse the repository at this point in the history
Differential Revision:
D10412639

Original commit changeset: a4c523fda96b

fbshipit-source-id: 973b6dd30b63b9a08069275278b0780b65067635
  • Loading branch information
ezyang authored and facebook-github-bot committed Oct 24, 2018
1 parent 17c6d16 commit 97d4c05
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 25 deletions.
12 changes: 1 addition & 11 deletions caffe2/core/nomnigraph/include/nomnigraph/Generated/OpClasses.h
Original file line number Diff line number Diff line change
Expand Up @@ -639,23 +639,13 @@ class ChannelShuffle : public NeuralNetOperator {

class Add : public NeuralNetOperator {
public:
Add(int broadcast = 0)
: NeuralNetOperator(NNKind::Add), broadcast_(broadcast) {}
Add() : NeuralNetOperator(NNKind::Add) {}

~Add() {}

NOMNIGRAPH_DEFINE_NN_RTTI(Add);

int getBroadcast() const {
return broadcast_;
}

void setBroadcast(int broadcast) {
broadcast_ = broadcast;
}

private:
int broadcast_;
};

class Reshape : public NeuralNetOperator {
Expand Down
1 change: 0 additions & 1 deletion caffe2/core/nomnigraph/ops.def
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ Concat
Softmax
ChannelShuffle
Add
- Broadcast : int : 0
Reshape
Flatten

Expand Down
20 changes: 7 additions & 13 deletions caffe2/opt/converter.cc
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,10 @@ OperatorDef Converter::convertToOperatorDef(
const nom::repr::NeuralNetOperator* nnOp) {
auto* annotation = nnOp->getAnnotation();
// Default to using the stored operator.
if (annotation && isa<Caffe2Annotation>(annotation)) {
if (isa<Caffe2Annotation>(annotation)) {
return dyn_cast<Caffe2Annotation>(annotation)->getOperatorDef();
}
LOG(WARNING) << "Cannot instantiate this OperatorDef from nomnigraph, falling back";
caffe2::OperatorDef op;
op.set_type(nnOp->getName());
return op;
CAFFE_THROW("TODO: Cannot yet instantiate OperatorDef from nomnigraph");
}

std::vector<int> getKernelShape(std::map<std::string, caffe2::Argument> argMap) {
Expand Down Expand Up @@ -159,13 +156,11 @@ class ClipConverter : public Converter {
float max = std::numeric_limits<float>::max();

if (argMap.count("min")) {
CAFFE_ENFORCE(argMap["min"].has_f(), "Invalid 'min' argument");
min = static_cast<float>(argMap["min"].f());
min = static_cast<float>(argMap["min"].i());
}

if (argMap.count("max")) {
CAFFE_ENFORCE(argMap["max"].has_f(), "Invalid 'max' argument");
max = static_cast<float>(argMap["max"].f());
max = static_cast<float>(argMap["max"].i());
}

return util::make_unique<repr::Clip>(min, max);
Expand Down Expand Up @@ -368,14 +363,13 @@ repr::NNModule convertToNNModule(
caffe2::OperatorDef convertToOperatorDef(
const repr::NNGraph::NodeRef& instrNode) {
auto *nnOp = repr::nn::get<repr::NeuralNetOperator>(instrNode);
auto op_type = nnOp->getName();
auto *annotation = nnOp->getAnnotation();
caffe2::OperatorDef op;

if (ConverterRegistry()->Has(op_type)) {
op = ConverterRegistry()->Create(op_type)->convertToOperatorDef(nnOp);
if (ConverterRegistry()->Has(op.type())) {
op = ConverterRegistry()->Create(op.type())->convertToOperatorDef(nnOp);
} else if (!annotation) {
op.set_type(op_type);
op.set_type(nnOp->getName());
} else {
if (isa<Caffe2Annotation>(annotation)) {
auto c2_annotation = dyn_cast<Caffe2Annotation>(annotation);
Expand Down

0 comments on commit 97d4c05

Please sign in to comment.