layer_types.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
13 #define MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
14 
15 #include <boost/variant.hpp>
16 
17 // Layer modules.
60 
61 // Convolution modules.
65 
66 // Regularizers.
68 
69 // Loss function modules.
71 
72 namespace mlpack {
73 namespace ann {
74 
75 template<typename InputDataType, typename OutputDataType> class BatchNorm;
76 template<typename InputDataType, typename OutputDataType> class DropConnect;
77 template<typename InputDataType, typename OutputDataType> class Glimpse;
78 template<typename InputDataType, typename OutputDataType> class LayerNorm;
79 template<typename InputDataType, typename OutputDataType> class LSTM;
80 template<typename InputDataType, typename OutputDataType> class GRU;
81 template<typename InputDataType, typename OutputDataType> class FastLSTM;
82 template<typename InputDataType, typename OutputDataType> class VRClassReward;
83 template<typename InputDataType, typename OutputDataType> class Concatenate;
84 template<typename InputDataType, typename OutputDataType> class Padding;
85 
86 template<typename InputDataType,
87  typename OutputDataType,
88  typename RegularizerType>
89 class Linear;
90 
91 template<typename InputDataType,
92  typename OutputDataType,
93  typename Activation>
94 class RBF;
95 
96 template<typename InputDataType,
97  typename OutputDataType,
98  typename RegularizerType>
100 
101 template<typename InputDataType,
102  typename OutputDataType>
104 
105 template<typename InputDataType,
106  typename OutputDataType,
107  typename RegularizerType>
108 class Linear3D;
109 
110 template<typename InputDataType,
111  typename OutputDataType
112 >
114 
115 template<typename InputDataType,
116  typename OutputDataType
117 >
119 
120 template <typename InputDataType,
121  typename OutputDataType,
122  typename RegularizerType>
124 
125 template<typename InputDataType,
126  typename OutputDataType
127 >
129 
130 template<typename InputDataType,
131  typename OutputDataType,
132  typename... CustomLayers
133 >
134 class AddMerge;
135 
136 template<typename InputDataType,
137  typename OutputDataType,
138  bool residual,
139  typename... CustomLayers
140 >
142 
143 template<typename InputDataType,
144  typename OutputDataType,
145  typename... CustomLayers
146 >
147 class Highway;
148 
149 template<typename InputDataType,
150  typename OutputDataType,
151  typename... CustomLayers
152 >
153 class Recurrent;
154 
155 template<typename InputDataType,
156  typename OutputDataType,
157  typename... CustomLayers
158 >
159 class Concat;
160 
161 template<
162  typename OutputLayerType,
163  typename InputDataType,
164  typename OutputDataType
165 >
166 class ConcatPerformance;
167 
168 template<
169  typename ForwardConvolutionRule,
170  typename BackwardConvolutionRule,
171  typename GradientConvolutionRule,
172  typename InputDataType,
173  typename OutputDataType
174 >
175 class Convolution;
176 
177 template<
178  typename ForwardConvolutionRule,
179  typename BackwardConvolutionRule,
180  typename GradientConvolutionRule,
181  typename InputDataType,
182  typename OutputDataType
183 >
185 
186 template<
187  typename ForwardConvolutionRule,
188  typename BackwardConvolutionRule,
189  typename GradientConvolutionRule,
190  typename InputDataType,
191  typename OutputDataType
192 >
193 class AtrousConvolution;
194 
195 template<
196  typename InputDataType,
197  typename OutputDataType
198 >
200 
201 template<typename InputDataType,
202  typename OutputDataType,
203  typename... CustomLayers
204 >
206 
207 template <typename InputDataType,
208  typename OutputDataType,
209  typename... CustomLayers
210 >
212 
213 template <typename InputDataType,
214  typename OutputDataType
215 >
216 class AdaptiveMaxPooling;
217 
218 template <typename InputDataType,
219  typename OutputDataType
220 >
221 class AdaptiveMeanPooling;
222 
223 using MoreTypes = boost::variant<
244 >;
245 
246 template <typename... CustomLayers>
247 using LayerTypes = boost::variant<
256  arma::mat, arma::mat>*,
268  arma::mat, arma::mat>*,
271  NaiveConvolution<FullConvolution>,
272  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
301  NaiveConvolution<ValidConvolution>,
302  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
304  MoreTypes,
305  CustomLayers*...
306 >;
307 
308 } // namespace ann
309 } // namespace mlpack
310 
311 #endif
Implementation of the variance reduced classification reinforcement layer.
Definition: layer_types.hpp:82
Implementation of the Add module class.
Definition: add.hpp:34
Implementation of the AdaptiveMaxPooling layer.
boost::variant< Linear3D< arma::mat, arma::mat, NoRegularizer > *, LpPooling< arma::mat, arma::mat > *, PixelShuffle< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, MultiheadAttention< arma::mat, arma::mat, NoRegularizer > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Reparametrization< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Subview< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, VirtualBatchNorm< arma::mat, arma::mat > *, RBF< arma::mat, arma::mat, GaussianFunction > *, BaseLayer< GaussianFunction, arma::mat, arma::mat > *, PositionalEncoding< arma::mat, arma::mat > *, ISRLU< arma::mat, arma::mat > *> MoreTypes
Implementation of the Concatenate module class.
Definition: concatenate.hpp:36
The ISRLU activation function, defined by.
Definition: isrlu.hpp:60
Implementation of the log softmax layer.
Definition: log_softmax.hpp:36
Implementation of the AddMerge module class.
Definition: add_merge.hpp:42
Linear algebra utility functions, generally performed on matrices or vectors.
Implementation of the Padding module class.
Definition: layer_types.hpp:84
Declaration of the VirtualBatchNorm layer class.
The FlexibleReLU activation function, defined by.
Implementation of the Transposed Convolution class.
Implementation of the reinforce normal layer.
Implementation of the LPPooling.
Definition: lp_pooling.hpp:32
boost::variant< AdaptiveMaxPooling< arma::mat, arma::mat > *, AdaptiveMeanPooling< arma::mat, arma::mat > *, Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, NoisyLinear< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Softmax< arma::mat, arma::mat > *, SpatialDropout< arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
Implementation of the Linear layer class.
Definition: layer_types.hpp:89
The LeakyReLU activation function, defined by.
Definition: leaky_relu.hpp:44
This class implements the Recurrent Model for Visual Attention, using a variety of possible layer imp...
Implementation of the Convolution class.
Definition: convolution.hpp:77
Positional Encoding injects some information about the relative or absolute position of the tokens in...
Implementation of the MeanPooling.
Implementation of the Reparametrization layer class.
Implementation of the Join module class.
Definition: join.hpp:33
Implementation of the concat performance class.
Declaration of the WeightNorm layer class.
The Hard Tanh activation function, defined by.
Definition: hard_tanh.hpp:49
The select module selects the specified column from a given input matrix.
Definition: select.hpp:32
Implementation of the negative log likelihood layer.
Implementation of the Softmax layer.
Definition: softmax.hpp:38
Multihead Attention allows the model to jointly attend to information from different representation s...
The PReLU activation function, defined by (where alpha is trainable)
Implementation of the AdaptiveMeanPooling.
Implementation of the base layer.
Definition: base_layer.hpp:71
Implementation of the PixelShuffle layer.
Implementation of the Concat class.
Definition: concat.hpp:45
Implementation of the Highway layer.
Definition: highway.hpp:60
Implementation of the LSTM module class.
Definition: layer_types.hpp:79
Implementation of the Linear3D layer class.
Declaration of the Layer Normalization class.
Definition: layer_norm.hpp:65
The Lookup class stores word embeddings and retrieves them using tokens.
Definition: lookup.hpp:41
Implementation of the NoisyLinear layer class.
Implementation of the subview layer.
Definition: subview.hpp:34
Implementation of the MiniBatchDiscrimination layer.
Implementation of the MultiplyMerge module class.
Implementation of the LinearNoBias class.
Definition: layer_types.hpp:99
A concatenated ReLU has two outputs, one ReLU and one negative ReLU, concatenated together...
Definition: c_relu.hpp:50
Computes the two-dimensional convolution.
An implementation of a gru network layer.
Definition: gru.hpp:58
The dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values to zero a...
Definition: dropout.hpp:53
The glimpse layer returns a retina-like representation (down-scaled cropped images) of increasing sca...
Definition: glimpse.hpp:88
The DropConnect layer is a regularizer that randomly with probability ratio sets the connection value...
Definition: dropconnect.hpp:63
Implementation of the multiply constant layer.
The alpha - dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values t...
The CELU activation function, defined by.
Definition: celu.hpp:60
Declaration of the Batch Normalization layer class.
Definition: batch_norm.hpp:56
Implementation of the RecurrentLayer class.
Implementation of the Sequential class.
Implementation of the constant layer.
Definition: constant.hpp:34
Implementation of the MaxPooling layer.
Definition: max_pooling.hpp:52
The ELU activation function, defined by.
Definition: elu.hpp:111
Implementation of the Radial Basis Function layer.
Definition: layer_types.hpp:94
Implementation of the SpatialDropout layer.
Definition and Implementation of the Bilinear Interpolation Layer.
An implementation of a faster version of the Fast LSTM network layer.
Definition: fast_lstm.hpp:66
Implementation of the Atrous Convolution class.