Skip to content

Commit

Permalink
Merge pull request BVLC#8 from Russell91/cppfeat
Browse files Browse the repository at this point in the history
Merging minor edits / bug fixes for C++ interface
  • Loading branch information
Nicolas D. Jimenez committed Aug 23, 2015
2 parents 1f62afa + abd3405 commit 23a647a
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 6 deletions.
54 changes: 54 additions & 0 deletions examples/apollocaffe/cpp/train.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>

#include <vector>

#include "caffe/apollonet.hpp"
#include "caffe/caffe.hpp"
#include "caffe/data_layers.hpp"
#include "caffe/layer.hpp"
#include "caffe/util/io.hpp"

using namespace caffe; // NOLINT(build/namespaces)
using namespace cv; // NOLINT(build/namespaces)

using std::vector;

int main() {
ApolloNet<float> net;
shared_ptr<Layer<float> > dataLayer, labelLayer;
shared_ptr<MatDataLayer<float> > dataMemLayer, labelMemLayer;
typedef LayerRegistry<float> LR;
dataLayer = LR::CreateLayer("name: 'data' type: 'MatData'"
"top: 'data' phase: TEST");
labelLayer = LR::CreateLayer("name: 'label' type: 'MatData'"
"top: 'label' phase: TEST");
dataMemLayer = boost::dynamic_pointer_cast<MatDataLayer<float> > (dataLayer);
labelMemLayer =
boost::dynamic_pointer_cast<MatDataLayer<float> > (labelLayer);
Mat example = Mat(1, 1, CV_8UC1);
vector<Mat> matVec(1);
vector<Mat> labelMatVec(1);
double loss;
for (int i = 0; i < 200; ++i) {
randu(example, Scalar::all(0), Scalar::all(50));
matVec[0] = example;
labelMatVec[0] = example * 3;
dataMemLayer->AddMatVector(matVec);
labelMemLayer->AddMatVector(labelMatVec);
net.f(dataLayer);
net.f(labelLayer);
net.f("name: 'conv' type: 'Convolution' bottom: 'data' top: 'conv'"
"convolution_param { num_output: 1 weight_filler { type: 'xavier' } "
"bias_filler { type: 'constant' value: 0.0 } kernel_h: 1 kernel_w: 1}");
loss = net.f("name: 'loss' type: 'EuclideanLoss' bottom: 'conv' "
"bottom: 'label' top: 'loss' ");
net.Backward();
net.Update(0.0001, 0, -1, 0);
net.ResetForward();
if (i % 10 == 0) {
std::cout << loss << std::endl;
}
}
return 0;
}
2 changes: 2 additions & 0 deletions include/caffe/apollonet.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ class ApolloNet {

Dtype ForwardLayer(const string& layer_param_string);

Dtype ForwardLayer(const LayerParameter& layer_param);

Dtype f(shared_ptr<Layer<Dtype> > layer);

Dtype f(const string& layer_prototxt);
Expand Down
2 changes: 2 additions & 0 deletions python/apollocaffe/cpp/_apollocaffe.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,8 @@ cdef class ApolloNet:
p = caffe_pb2.LayerParameter()
Merge(layer, p)
loss = self.thisptr.ForwardLayer(p.SerializeToString())
elif not hasattr(layer, 'p'):
loss = self.thisptr.ForwardLayer(layer.SerializeToString())
elif layer.p.type != 'Py':
loss = self.thisptr.ForwardLayer(layer.p.SerializeToString())
else:
Expand Down
29 changes: 23 additions & 6 deletions src/caffe/apollonet.cpp
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#include <google/protobuf/text_format.h>

#include <map>
#include <set>
#include <string>
Expand All @@ -21,9 +23,26 @@ ApolloNet<Dtype>::ApolloNet() {

template <typename Dtype>
Dtype ApolloNet<Dtype>::f(const string& layer_prototxt) {
shared_ptr<Layer<Dtype> > layer =
LayerRegistry<Dtype>::CreateLayer(layer_prototxt);
return f(layer);
LayerParameter p;
bool success =
google::protobuf::TextFormat::ParseFromString(layer_prototxt, &p);
ASSERT(success, "Invalid prototxt string");
return ForwardLayer(p);
}

template <typename Dtype>
Dtype ApolloNet<Dtype>::ForwardLayer(const string& layer_param_string) {
/* This function will
* 1) Check if the layer name is in the cache
* 2) Create the layer if it is new
* 3) Set up the top blobs
* 4) Set up the bottom blobs
* 5) Set up the parameters
* 6) Call the Forward function */

LayerParameter active_layer_param;
ASSERT(active_layer_param.ParseFromString(layer_param_string), "");
return ForwardLayer(active_layer_param);
}

template <typename Dtype>
Expand Down Expand Up @@ -144,7 +163,7 @@ Dtype ApolloNet<Dtype>::f(shared_ptr<Layer<Dtype> > layer) {
}

template <typename Dtype>
Dtype ApolloNet<Dtype>::ForwardLayer(const string& layer_param_string) {
Dtype ApolloNet<Dtype>::ForwardLayer(const LayerParameter& active_layer_param) {
/* This function will
* 1) Check if the layer name is in the cache
* 2) Create the layer if it is new
Expand All @@ -153,8 +172,6 @@ Dtype ApolloNet<Dtype>::ForwardLayer(const string& layer_param_string) {
* 5) Set up the parameters
* 6) Call the Forward function */

LayerParameter active_layer_param;
ASSERT(active_layer_param.ParseFromString(layer_param_string), "");
RuntimeParameter runtime_param = active_layer_param.rp();
ASSERT(active_layer_param.has_name(), "");
const string& layer_name = active_layer_param.name();
Expand Down

0 comments on commit 23a647a

Please sign in to comment.