add_merge.hpp
Go to the documentation of this file.
1 
13 #ifndef MLPACK_METHODS_ANN_LAYER_ADD_MERGE_HPP
14 #define MLPACK_METHODS_ANN_LAYER_ADD_MERGE_HPP
15 
16 #include <mlpack/prereqs.hpp>
17 
18 #include "../visitor/delete_visitor.hpp"
19 #include "../visitor/delta_visitor.hpp"
20 #include "../visitor/output_parameter_visitor.hpp"
21 
22 #include "layer_types.hpp"
23 
24 namespace mlpack {
25 namespace ann {
26 
37 template<
38  typename InputDataType = arma::mat,
39  typename OutputDataType = arma::mat,
40  typename... CustomLayers
41 >
42 class AddMerge
43 {
44  public:
51  AddMerge(const bool model = false, const bool run = true);
52 
54  ~AddMerge();
55 
63  template<typename InputType, typename OutputType>
64  void Forward(InputType&& /* input */, OutputType&& output);
65 
75  template<typename eT>
76  void Backward(const arma::Mat<eT>&& /* input */,
77  arma::Mat<eT>&& gy,
78  arma::Mat<eT>&& g);
79 
89  template<typename eT>
90  void Backward(const arma::Mat<eT>&& /* input */,
91  arma::Mat<eT>&& gy,
92  arma::Mat<eT>&& g,
93  const size_t index);
94 
95  /*
96  * Calculate the gradient using the output delta and the input activation.
97  *
98  * @param input The input parameter used for calculating the gradient.
99  * @param error The calculated error.
100  * @param gradient The calculated gradient.
101  */
102  template<typename eT>
103  void Gradient(arma::Mat<eT>&& input,
104  arma::Mat<eT>&& error,
105  arma::Mat<eT>&& gradient);
106 
107  /*
108  * This is the overload of Gradient() that runs a specific layer with the
109  * given input.
110  *
111  * @param input The input parameter used for calculating the gradient.
112  * @param error The calculated error.
113  * @param gradient The calculated gradient.
114  * @param The index of the layer to run.
115  */
116  template<typename eT>
117  void Gradient(arma::Mat<eT>&& input,
118  arma::Mat<eT>&& error,
119  arma::Mat<eT>&& gradient,
120  const size_t index);
121 
122  /*
123  * Add a new module to the model.
124  *
125  * @param args The layer parameter.
126  */
127  template <class LayerType, class... Args>
128  void Add(Args... args) { network.push_back(new LayerType(args...)); }
129 
130  /*
131  * Add a new module to the model.
132  *
133  * @param layer The Layer to be added to the model.
134  */
135  void Add(LayerTypes<CustomLayers...> layer) { network.push_back(layer); }
136 
138  InputDataType const& InputParameter() const { return inputParameter; }
140  InputDataType& InputParameter() { return inputParameter; }
141 
143  OutputDataType const& OutputParameter() const { return outputParameter; }
145  OutputDataType& OutputParameter() { return outputParameter; }
146 
148  OutputDataType const& Delta() const { return delta; }
150  OutputDataType& Delta() { return delta; }
151 
153  std::vector<LayerTypes<CustomLayers...> >& Model()
154  {
155  if (model)
156  {
157  return network;
158  }
159 
160  return empty;
161  }
162 
164  OutputDataType const& Parameters() const { return weights; }
166  OutputDataType& Parameters() { return weights; }
167 
169  bool Run() const { return run; }
171  bool& Run() { return run; }
172 
176  template<typename Archive>
177  void serialize(Archive& ar, const unsigned int /* version */);
178 
179  private:
181  bool model;
182 
185  bool run;
186 
188  bool ownsLayer;
189 
191  std::vector<LayerTypes<CustomLayers...> > network;
192 
194  std::vector<LayerTypes<CustomLayers...> > empty;
195 
197  DeleteVisitor deleteVisitor;
198 
200  OutputParameterVisitor outputParameterVisitor;
201 
203  DeltaVisitor deltaVisitor;
204 
206  OutputDataType delta;
207 
209  OutputDataType gradient;
210 
212  InputDataType inputParameter;
213 
215  OutputDataType outputParameter;
216 
218  OutputDataType weights;
219 }; // class AddMerge
220 
221 } // namespace ann
222 } // namespace mlpack
223 
224 // Include implementation.
225 #include "add_merge_impl.hpp"
226 
227 #endif
DeleteVisitor executes the destructor of the instantiated object.
InputDataType const & InputParameter() const
Get the input parameter.
Definition: add_merge.hpp:138
void Forward(InputType &&, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
bool & Run()
Modify the value of run parameter.
Definition: add_merge.hpp:171
Implementation of the AddMerge module class.
Definition: add_merge.hpp:42
.hpp
Definition: add_to_po.hpp:21
The core includes that mlpack expects; standard C++ includes and Armadillo.
OutputDataType const & Delta() const
Get the delta.
Definition: add_merge.hpp:148
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
OutputDataType & Parameters()
Modify the parameters.
Definition: add_merge.hpp:166
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
void Add(Args... args)
Definition: add_merge.hpp:128
OutputDataType & OutputParameter()
Modify the output parameter.
Definition: add_merge.hpp:145
OutputDataType & Delta()
Modify the delta.
Definition: add_merge.hpp:150
OutputParameterVisitor exposes the output parameter of the given module.
AddMerge(const bool model=false, const bool run=true)
Create the AddMerge object using the specified parameters.
InputDataType & InputParameter()
Modify the input parameter.
Definition: add_merge.hpp:140
DeltaVisitor exposes the delta parameter of the given module.
void Gradient(arma::Mat< eT > &&input, arma::Mat< eT > &&error, arma::Mat< eT > &&gradient)
~AddMerge()
Destructor to release allocated memory.
std::vector< LayerTypes< CustomLayers... > > & Model()
Return the model modules.
Definition: add_merge.hpp:153
OutputDataType const & OutputParameter() const
Get the output parameter.
Definition: add_merge.hpp:143
OutputDataType const & Parameters() const
Get the parameters.
Definition: add_merge.hpp:164
void Add(LayerTypes< CustomLayers... > layer)
Definition: add_merge.hpp:135
bool Run() const
Get the value of run parameter.
Definition: add_merge.hpp:169
void Backward(const arma::Mat< eT > &&, arma::Mat< eT > &&gy, arma::Mat< eT > &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...