Skip to content

Commit

Permalink
refactor: burn-import unary operators (#548)
Browse files Browse the repository at this point in the history
  • Loading branch information
nathanielsimard authored Jul 27, 2023
1 parent 74c41bd commit f0a7135
Show file tree
Hide file tree
Showing 8 changed files with 200 additions and 283 deletions.
24 changes: 7 additions & 17 deletions burn-import/src/burn/node/base.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
use super::{
add::AddNode, batch_norm::BatchNormNode, constant::ConstantNode, conv2d::Conv2dNode,
equal::EqualNode, flatten::FlattenNode, linear::LinearNode, log_softmax::LogSoftmaxNode,
matmul::MatmulNode, max_pool2d::MaxPool2dNode, relu::ReLUNode, reshape::ReshapeNode,
sigmoid::SigmoidNode,
equal::EqualNode, linear::LinearNode, matmul::MatmulNode, max_pool2d::MaxPool2dNode,
reshape::ReshapeNode, unary::UnaryNode,
};
use crate::burn::{BurnImports, Scope, Type};
use burn::record::PrecisionSettings;
Expand Down Expand Up @@ -78,13 +77,10 @@ pub enum Node<PS: PrecisionSettings> {
MaxPool2d(MaxPool2dNode),
Linear(LinearNode<PS>),
BatchNorm(BatchNormNode<PS>),
ReLU(ReLUNode),
Flatten(FlattenNode),
LogSoftmax(LogSoftmaxNode),
Constant(ConstantNode),
Equal(EqualNode),
Unary(UnaryNode),
Reshape(ReshapeNode),
Sigmoid(SigmoidNode),
}

macro_rules! match_all {
Expand All @@ -96,13 +92,10 @@ macro_rules! match_all {
Node::MaxPool2d(node) => $func(node),
Node::Linear(node) => $func(node),
Node::BatchNorm(node) => $func(node),
Node::ReLU(node) => $func(node),
Node::Flatten(node) => $func(node),
Node::LogSoftmax(node) => $func(node),
Node::Constant(node) => $func(node),
Node::Equal(node) => $func(node),
Node::Reshape(node) => $func(node),
Node::Sigmoid(node) => $func(node),
Node::Unary(node) => $func(node),
}
}};
}
Expand All @@ -126,12 +119,9 @@ impl<PS: PrecisionSettings> Node<PS> {
Node::MaxPool2d(_) => "max_pool2d",
Node::Linear(_) => "linear",
Node::BatchNorm(_) => "batch_norm",
Node::ReLU(_) => "relu",
Node::Flatten(_) => "flatten",
Node::LogSoftmax(_) => "log_softmax",
Node::Equal(_) => "equal",
Node::Reshape(_) => "reshape",
Node::Sigmoid(_) => "sigmoid",
Node::Unary(unary) => unary.kind.as_str(),
}
}
}
Expand Down Expand Up @@ -264,9 +254,9 @@ pub(crate) mod tests {
module::Module,
tensor::{backend::Backend, Tensor},
};
use burn::nn::PaddingConfig2d;
use burn::nn::conv::Conv2d;
use burn::nn::conv::Conv2dConfig;
use burn::nn::conv::Conv2d;
use burn::nn::PaddingConfig2d;

#[derive(Module, Debug)]
pub struct Model <B: Backend> {
Expand Down
64 changes: 0 additions & 64 deletions burn-import/src/burn/node/flatten.rs

This file was deleted.

61 changes: 0 additions & 61 deletions burn-import/src/burn/node/log_softmax.rs

This file was deleted.

5 changes: 1 addition & 4 deletions burn-import/src/burn/node/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,11 @@ pub(crate) mod batch_norm;
pub(crate) mod constant;
pub(crate) mod conv2d;
pub(crate) mod equal;
pub(crate) mod flatten;
pub(crate) mod linear;
pub(crate) mod log_softmax;
pub(crate) mod matmul;
pub(crate) mod max_pool2d;
pub(crate) mod relu;
pub(crate) mod reshape;
pub(crate) mod sigmoid;
pub(crate) mod unary;

pub(crate) use base::*;

Expand Down
58 changes: 0 additions & 58 deletions burn-import/src/burn/node/relu.rs

This file was deleted.

61 changes: 0 additions & 61 deletions burn-import/src/burn/node/sigmoid.rs

This file was deleted.

Loading

0 comments on commit f0a7135

Please sign in to comment.