Skip to content

Commit

Permalink
Added initModule() procedure to explicitly init builtin layers
Browse files Browse the repository at this point in the history
  • Loading branch information
Vitaliy Lyudvichenko committed Aug 13, 2015
1 parent 160d864 commit 06f949a
Show file tree
Hide file tree
Showing 26 changed files with 454 additions and 290 deletions.
50 changes: 3 additions & 47 deletions modules/dnn/include/opencv2/dnn/dnn.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ namespace cv
{
namespace dnn
{
CV_EXPORTS void initModule();

class CV_EXPORTS LayerParams : public Dict
{
public:
Expand Down Expand Up @@ -90,56 +92,10 @@ namespace dnn
CV_EXPORTS Ptr<Importer> createTorchImporter(const String &filename, bool isBinary = true);

CV_EXPORTS Blob readTorchMat(const String &filename, bool isBinary = true);


//Layer factory allows to create instances of registered layers.
class CV_EXPORTS LayerRegister
{
public:

typedef Ptr<Layer>(*Constuctor)(LayerParams &params);

static void registerLayer(const String &type, Constuctor constructor);

static void unregisterLayer(const String &type);

static Ptr<Layer> createLayerInstance(const String &type, LayerParams& params);

private:
LayerRegister();

struct Impl;
static Ptr<Impl> impl;
};

//allows automatically register created layer on module load time
struct _LayerRegisterer
{
String type;

_LayerRegisterer(const String &type, LayerRegister::Constuctor constuctor)
{
this->type = type;
LayerRegister::registerLayer(type, constuctor);
}

~_LayerRegisterer()
{
LayerRegister::unregisterLayer(type);
}
};

//registers layer on module load time
#define REGISTER_LAYER_FUNC(type, constuctorFunc) \
static _LayerRegisterer __layerRegisterer_##type(#type, constuctorFunc);

#define REGISTER_LAYER_CLASS(type, class) \
Ptr<Layer> __layerRegisterer_func_##type(LayerParams &params) \
{ return Ptr<Layer>(new class(params)); } \
static _LayerRegisterer __layerRegisterer_##type(#type, __layerRegisterer_func_##type);
}
}

#include <opencv2/dnn/layer.hpp>
#include <opencv2/dnn/dnn.inl.hpp>

#endif /* __OPENCV_DNN_DNN_HPP__ */
71 changes: 71 additions & 0 deletions modules/dnn/include/opencv2/dnn/layer.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
#ifndef __OPENCV_DNN_LAYER_HPP__
#define __OPENCV_DNN_LAYER_HPP__
#include <opencv2/dnn.hpp>

namespace cv
{
namespace dnn
{

//Layer factory allows to create instances of registered layers.
class CV_EXPORTS LayerRegister
{
public:

typedef Ptr<Layer>(*Constuctor)(LayerParams &params);

static void registerLayer(const String &type, Constuctor constructor);

static void unregisterLayer(const String &type);

static Ptr<Layer> createLayerInstance(const String &type, LayerParams& params);

private:
LayerRegister();

struct Impl;
static Ptr<Impl> impl;
};

template<typename LayerClass>
Ptr<Layer> _layerDynamicRegisterer(LayerParams &params)
{
return Ptr<Layer>(new LayerClass(params));
}

#define REG_RUNTIME_LAYER_FUNC(type, constuctorFunc) \
LayerRegister::registerLayer(#type, constuctorFunc);

#define REG_RUNTIME_LAYER_CLASS(type, class) \
LayerRegister::registerLayer(#type, _layerDynamicRegisterer<class>);

//allows automatically register created layer on module load time
struct _LayerStaticRegisterer
{
String type;

_LayerStaticRegisterer(const String &type, LayerRegister::Constuctor constuctor)
{
this->type = type;
LayerRegister::registerLayer(type, constuctor);
}

~_LayerStaticRegisterer()
{
LayerRegister::unregisterLayer(type);
}
};

//registers layer constructor on module load time
#define REG_STATIC_LAYER_FUNC(type, constuctorFunc) \
static _LayerStaticRegisterer __LayerStaticRegisterer_##type(#type, constuctorFunc);

//registers layer class on module load time
#define REG_STATIC_LAYER_CLASS(type, class) \
Ptr<Layer> __LayerStaticRegisterer_func_##type(LayerParams &params) \
{ return Ptr<Layer>(new class(params)); } \
static _LayerStaticRegisterer __LayerStaticRegisterer_##type(#type, __LayerStaticRegisterer_func_##type);

}
}
#endif
64 changes: 64 additions & 0 deletions modules/dnn/src/init.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
#include "precomp.hpp"

#include "layers/concat_layer.hpp"
#include "layers/convolution_layer.hpp"
#include "layers/blank_layer.hpp"
#include "layers/elementwise_layers.hpp"
#include "layers/fully_connected_layer.hpp"
#include "layers/lrn_layer.hpp"
#include "layers/mvn_layer.hpp"
#include "layers/pooling_layer.hpp"
#include "layers/reshape_layer.hpp"
#include "layers/slice_layer.hpp"
#include "layers/softmax_layer.hpp"
#include "layers/split_layer.hpp"

namespace cv
{
namespace dnn
{

struct AutoInitializer
{
bool status;

AutoInitializer() : status(false)
{
cv::dnn::initModule();
}
};

static AutoInitializer init;

void initModule()
{
if (init.status)
return;

REG_RUNTIME_LAYER_CLASS(Slice, SliceLayer)
REG_RUNTIME_LAYER_CLASS(Softmax, SoftMaxLayer)
REG_RUNTIME_LAYER_CLASS(Split, SplitLayer)
REG_RUNTIME_LAYER_CLASS(Reshape, ReshapeLayer)
REG_STATIC_LAYER_FUNC(Flatten, createFlattenLayer)
REG_RUNTIME_LAYER_CLASS(Pooling, PoolingLayer)
REG_RUNTIME_LAYER_CLASS(MVN, MVNLayer)
REG_RUNTIME_LAYER_CLASS(LRN, LRNLayer)
REG_RUNTIME_LAYER_CLASS(InnerProduct, FullyConnectedLayer)

REG_RUNTIME_LAYER_CLASS(ReLU, ElementWiseLayer<ReLUFunctor>)
REG_RUNTIME_LAYER_CLASS(TanH, ElementWiseLayer<TanHFunctor>)
REG_RUNTIME_LAYER_CLASS(BNLL, ElementWiseLayer<BNLLFunctor>)
REG_RUNTIME_LAYER_CLASS(Power, ElementWiseLayer<PowerFunctor>)
REG_RUNTIME_LAYER_CLASS(AbsVal, ElementWiseLayer<AbsValFunctor>)
REG_RUNTIME_LAYER_CLASS(Sigmoid, ElementWiseLayer<SigmoidFunctor>)
REG_RUNTIME_LAYER_CLASS(Dropout, BlankLayer)

REG_RUNTIME_LAYER_CLASS(Convolution, ConvolutionLayer)
REG_RUNTIME_LAYER_CLASS(Deconvolution, DeConvolutionLayer)
REG_RUNTIME_LAYER_CLASS(Concat, ConcatLayer)

init.status = true;
}

}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#ifndef __OPENCV_DNN_LAYERS_BLANK_LAYER_HPP__
#define __OPENCV_DNN_LAYERS_BLANK_LAYER_HPP__
#include "../precomp.hpp"
#include "layers_common.hpp"

namespace cv
{
Expand Down Expand Up @@ -27,7 +28,6 @@ namespace dnn
outputs[i] = *inputs[i];
}
};

REGISTER_LAYER_CLASS(Dropout, BlankLayer)
}
}
}
#endif
15 changes: 1 addition & 14 deletions modules/dnn/src/layers/concat_layer.cpp
Original file line number Diff line number Diff line change
@@ -1,24 +1,11 @@
#include "../precomp.hpp"
#include "layers_common.hpp"
#include "concat_layer.hpp"

namespace cv
{
namespace dnn
{
class ConcatLayer : public Layer
{
int axis;

public:
ConcatLayer(LayerParams& params);
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
};


REGISTER_LAYER_CLASS(Concat, ConcatLayer)


ConcatLayer::ConcatLayer(LayerParams &params)
{
axis = params.get<int>("axis", 1);
Expand Down
20 changes: 20 additions & 0 deletions modules/dnn/src/layers/concat_layer.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#ifndef __OPENCV_DNN_LAYERS_CONCAT_LAYER_HPP__
#define __OPENCV_DNN_LAYERS_CONCAT_LAYER_HPP__
#include "../precomp.hpp"

namespace cv
{
namespace dnn
{
class ConcatLayer : public Layer
{
int axis;

public:
ConcatLayer(LayerParams& params);
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
};
}
}
#endif
46 changes: 1 addition & 45 deletions modules/dnn/src/layers/convolution_layer.cpp
Original file line number Diff line number Diff line change
@@ -1,56 +1,12 @@
#include "../precomp.hpp"
#include "layers_common.hpp"
#include "convolution_layer.hpp"
#include "im2col.hpp"

namespace cv
{
namespace dnn
{
//TODO: simultaneously convolution and bias addition for cache optimization
class ConvolutionLayer : public Layer
{
protected:
bool bias;
int numOutput, group;
int padH, padW;
int kerH, kerW;
int strideH, strideW;

int inpH, inpW, inpCn;
int outH, outW, outCn;
int topH, topW, topCn; //switched between inp/out on deconv/conv
int inpGroupCn, outGroupCn;
int ksize;

Mat colMat, biasOnesMat;

inline bool is1x1() const;
virtual void computeInpOutShape(const Blob &inpBlob);
void im2col(Blob &inpBlob, int imNum, int cnGroup);

public:
ConvolutionLayer() {}
ConvolutionLayer(LayerParams &params);
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
};

class DeConvolutionLayer : public ConvolutionLayer
{
protected:
void computeInpOutShape(const Blob &inpBlob);
void col2im(Mat &dstMat);

public:
DeConvolutionLayer(LayerParams &params) : ConvolutionLayer(params) {}
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
};


REGISTER_LAYER_CLASS(Convolution, ConvolutionLayer)
REGISTER_LAYER_CLASS(Deconvolution, DeConvolutionLayer)


ConvolutionLayer::ConvolutionLayer(LayerParams &params)
{
getKernelParams(params, kerH, kerW, padH, padW, strideH, strideW);
Expand Down
50 changes: 50 additions & 0 deletions modules/dnn/src/layers/convolution_layer.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
#ifndef __OPENCV_DNN_LAYERS_CONVOLUTION_LAYER_HPP__
#define __OPENCV_DNN_LAYERS_CONVOLUTION_LAYER_HPP__
#include "../precomp.hpp"

namespace cv
{
namespace dnn
{
//TODO: simultaneously convolution and bias addition for cache optimization
class ConvolutionLayer : public Layer
{
protected:
bool bias;
int numOutput, group;
int padH, padW;
int kerH, kerW;
int strideH, strideW;

int inpH, inpW, inpCn;
int outH, outW, outCn;
int topH, topW, topCn; //switched between inp/out on deconv/conv
int inpGroupCn, outGroupCn;
int ksize;

Mat colMat, biasOnesMat;

inline bool is1x1() const;
virtual void computeInpOutShape(const Blob &inpBlob);
void im2col(Blob &inpBlob, int imNum, int cnGroup);

public:
ConvolutionLayer() {}
ConvolutionLayer(LayerParams &params);
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
};

class DeConvolutionLayer : public ConvolutionLayer
{
protected:
void computeInpOutShape(const Blob &inpBlob);
void col2im(Mat &dstMat);

public:
DeConvolutionLayer(LayerParams &params) : ConvolutionLayer(params) {}
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
};
}
}
#endif
Empty file.
Loading

0 comments on commit 06f949a

Please sign in to comment.