12 #ifndef MLPACK_METHODS_ANN_LAYER_WEIGHTNORM_HPP 13 #define MLPACK_METHODS_ANN_LAYER_WEIGHTNORM_HPP 49 typename InputType = arma::mat,
50 typename OutputType = arma::mat
85 void SetWeights(
typename OutputType::elem_type* weightsPtr);
96 void Forward(
const InputType& input, OutputType& output);
106 void Backward(
const InputType& input,
107 const OutputType& gy,
118 void Gradient(
const InputType& input,
119 const OutputType& error,
120 OutputType& gradient);
141 template<
typename Archive>
142 void serialize(Archive& ar,
const uint32_t );
146 size_t biasWeightSize;
152 size_t layerWeightSize;
155 void ResetGradients(OutputType& gradient);
158 OutputType scalarParameter;
161 OutputType vectorParameter;
167 OutputType layerGradients;
170 OutputType layerWeights;
180 #include "weight_norm_impl.hpp" WeightNormType()
Create an empty WeightNorm layer.
std::vector< size_t > inputDimensions
Logical input dimensions of each point.
~WeightNormType()
Destructor to release allocated memory.
WeightNormType & operator=(const WeightNormType &other)
Copy the given layer.
Linear algebra utility functions, generally performed on matrices or vectors.
void Backward(const InputType &input, const OutputType &gy, OutputType &g)
Backward pass through the layer.
const size_t WeightSize() const
Get the total number of trainable weights in the layer.
The core includes that mlpack expects; standard C++ includes and Armadillo.
const std::vector< size_t > & OutputDimensions()
Get the output dimensions.
void SetWeights(typename OutputType::elem_type *weightsPtr)
Reset the layer parameters.
void Gradient(const InputType &input, const OutputType &error, OutputType &gradient)
Calculate the gradient using the output delta, input activations and the weights of the wrapped layer...
void Forward(const InputType &input, OutputType &output)
Forward pass of the WeightNorm layer.
void serialize(Archive &ar, const uint32_t)
Serialize the layer.
const std::vector< size_t > & InputDimensions() const
Get the input dimensions.
const std::vector< size_t > OutputDimensions() const
OutputType const & Parameters() const
Get the parameters.
Declaration of the WeightNorm layer class.
OutputType & Parameters()
Modify the parameters.
WeightNormType< arma::mat, arma::mat > WeightNorm
A layer is an abstract class implementing common neural networks operations, such as convolution...
Layer< InputType, OutputType > *const & WrappedLayer()
Get the wrapped layer.
WeightNormType * Clone() const
Clone the WeightNormType object. This handles polymorphism correctly.
virtual size_t WeightSize() const
Get the total number of trainable weights in the layer.