layer_types.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
13 #define MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
14 
15 #include
16 
17 // Layer modules.
57 
58 // Convolution modules.
62 
63 // Regularizers.
65 
66 // Loss function modules.
68 
69 namespace mlpack {
70 namespace ann {
71 
72 template<typename InputDataType, typename OutputDataType> class BatchNorm;
73 template<typename InputDataType, typename OutputDataType> class DropConnect;
74 template<typename InputDataType, typename OutputDataType> class Glimpse;
75 template<typename InputDataType, typename OutputDataType> class LayerNorm;
76 template<typename InputDataType, typename OutputDataType> class LSTM;
77 template<typename InputDataType, typename OutputDataType> class GRU;
78 template<typename InputDataType, typename OutputDataType> class FastLSTM;
79 template<typename InputDataType, typename OutputDataType> class VRClassReward;
80 template<typename InputDataType, typename OutputDataType> class Concatenate;
81 template<typename InputDataType, typename OutputDataType> class Padding;
82 
83 template<typename InputDataType,
84  typename OutputDataType,
85  typename RegularizerType>
86 class Linear;
87 
88 template<typename InputDataType,
89  typename OutputDataType,
90  typename Activation>
91 class RBF;
92 
93 template<typename InputDataType,
94  typename OutputDataType,
95  typename RegularizerType>
97 
98 template<typename InputDataType,
99  typename OutputDataType>
101 
102 template<typename InputDataType,
103  typename OutputDataType,
104  typename RegularizerType>
105 class Linear3D;
106 
107 template<typename InputDataType,
108  typename OutputDataType
109 >
111 
112 template<typename InputDataType,
113  typename OutputDataType
114 >
116 
117 template <typename InputDataType,
118  typename OutputDataType,
119  typename RegularizerType>
121 
122 template<typename InputDataType,
123  typename OutputDataType
124 >
126 
127 template<typename InputDataType,
128  typename OutputDataType,
129  typename... CustomLayers
130 >
131 class AddMerge;
132 
133 template<typename InputDataType,
134  typename OutputDataType,
135  bool residual,
136  typename... CustomLayers
137 >
139 
140 template<typename InputDataType,
141  typename OutputDataType,
142  typename... CustomLayers
143 >
144 class Highway;
145 
146 template<typename InputDataType,
147  typename OutputDataType,
148  typename... CustomLayers
149 >
150 class Recurrent;
151 
152 template<typename InputDataType,
153  typename OutputDataType,
154  typename... CustomLayers
155 >
156 class Concat;
157 
158 template<
159  typename OutputLayerType,
160  typename InputDataType,
161  typename OutputDataType
162 >
163 class ConcatPerformance;
164 
165 template<
166  typename ForwardConvolutionRule,
167  typename BackwardConvolutionRule,
168  typename GradientConvolutionRule,
169  typename InputDataType,
170  typename OutputDataType
171 >
172 class Convolution;
173 
174 template<
175  typename ForwardConvolutionRule,
176  typename BackwardConvolutionRule,
177  typename GradientConvolutionRule,
178  typename InputDataType,
179  typename OutputDataType
180 >
182 
183 template<
184  typename ForwardConvolutionRule,
185  typename BackwardConvolutionRule,
186  typename GradientConvolutionRule,
187  typename InputDataType,
188  typename OutputDataType
189 >
190 class AtrousConvolution;
191 
192 template<
193  typename InputDataType,
194  typename OutputDataType
195 >
197 
198 template<typename InputDataType,
199  typename OutputDataType,
200  typename... CustomLayers
201 >
203 
204 template <typename InputDataType,
205  typename OutputDataType,
206  typename... CustomLayers
207 >
209 
210 template <typename InputDataType,
211  typename OutputDataType
212 >
213 class AdaptiveMaxPooling;
214 
215 template <typename InputDataType,
216  typename OutputDataType
217 >
218 class AdaptiveMeanPooling;
219 
220 using MoreTypes = boost::variant<
221  Linear3D*,
222  Glimpse*,
223  Highway*,
225  Recurrent*,
229  Select*,
230  Sequential*,
231  Sequential*,
232  Subview*,
233  VRClassReward*,
235  RBF*,
236  BaseLayer*,
238 >;
239 
240 template <typename... CustomLayers>
241 using LayerTypes = boost::variant<
244  Add*,
245  AddMerge*,
246  AlphaDropout*,
250  arma::mat, arma::mat>*,
251  BaseLayer*,
252  BaseLayer*,
253  BaseLayer*,
254  BaseLayer*,
255  BaseLayer*,
256  BatchNorm*,
258  CELU*,
259  Concat*,
260  Concatenate*,
262  arma::mat, arma::mat>*,
263  Constant*,
264  Convolution,
265  NaiveConvolution,
266  NaiveConvolution, arma::mat, arma::mat>*,
267  CReLU*,
268  DropConnect*,
269  Dropout*,
270  ELU*,
271  FastLSTM*,
272  FlexibleReLU*,
273  GRU*,
274  HardTanH*,
275  Join*,
276  LayerNorm*,
277  LeakyReLU*,
278  Linear*,
279  LinearNoBias*,
280  LogSoftMax*,
281  Lookup*,
282  LSTM*,
283  MaxPooling*,
284  MeanPooling*,
287  MultiplyMerge*,
289  NoisyLinear*,
290  Padding*,
291  PReLU*,
292  Softmax*,
295  NaiveConvolution,
296  NaiveConvolution, arma::mat, arma::mat>*,
297  WeightNorm*,
298  MoreTypes,
299  CustomLayers*...
300 >;
301 
302 } // namespace ann
303 } // namespace mlpack
304 
305 #endif
Implementation of the variance reduced classification reinforcement layer.
Definition: layer_types.hpp:79
Implementation of the Add module class.
Definition: add.hpp:34
Implementation of the AdaptiveMaxPooling layer.
Implementation of the Concatenate module class.
Definition: concatenate.hpp:36
Implementation of the log softmax layer.
Definition: log_softmax.hpp:36
Implementation of the AddMerge module class.
Definition: add_merge.hpp:42
Linear algebra utility functions, generally performed on matrices or vectors.
Implementation of the Padding module class.
Definition: layer_types.hpp:81
Declaration of the VirtualBatchNorm layer class.
The FlexibleReLU activation function, defined by.
Implementation of the Transposed Convolution class.
Implementation of the reinforce normal layer.
boost::variant< AdaptiveMaxPooling< arma::mat, arma::mat > *, AdaptiveMeanPooling< arma::mat, arma::mat > *, Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, NoisyLinear< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Softmax< arma::mat, arma::mat > *, SpatialDropout< arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
Implementation of the Linear layer class.
Definition: layer_types.hpp:86
The LeakyReLU activation function, defined by.
Definition: leaky_relu.hpp:44
This class implements the Recurrent Model for Visual Attention, using a variety of possible layer imp...
Implementation of the Convolution class.
Definition: convolution.hpp:48
Positional Encoding injects some information about the relative or absolute position of the tokens in...
Implementation of the MeanPooling.
Implementation of the Reparametrization layer class.
Implementation of the Join module class.
Definition: join.hpp:33
Implementation of the concat performance class.
Declaration of the WeightNorm layer class.
The Hard Tanh activation function, defined by.
Definition: hard_tanh.hpp:49
The select module selects the specified column from a given input matrix.
Definition: select.hpp:32
Implementation of the negative log likelihood layer.
Implementation of the Softmax layer.
Definition: softmax.hpp:38
Multihead Attention allows the model to jointly attend to information from different representation s...
The PReLU activation function, defined by (where alpha is trainable)
Implementation of the AdaptiveMeanPooling.
Implementation of the base layer.
Definition: base_layer.hpp:65
Implementation of the Concat class.
Definition: concat.hpp:45
Implementation of the Highway layer.
Definition: highway.hpp:60
Implementation of the LSTM module class.
Definition: layer_types.hpp:76
Implementation of the Linear3D layer class.
Declaration of the Layer Normalization class.
Definition: layer_norm.hpp:65
The Lookup class stores word embeddings and retrieves them using tokens.
Definition: lookup.hpp:41
Implementation of the NoisyLinear layer class.
Implementation of the subview layer.
Definition: subview.hpp:34
Implementation of the MiniBatchDiscrimination layer.
Implementation of the MultiplyMerge module class.
Implementation of the LinearNoBias class.
Definition: layer_types.hpp:96
A concatenated ReLU has two outputs, one ReLU and one negative ReLU, concatenated together...
Definition: c_relu.hpp:50
Computes the two-dimensional convolution.
An implementation of a gru network layer.
Definition: gru.hpp:58
The dropout layer is a regularizer that randomly with probability 'ratio' sets input values to zero a...
Definition: dropout.hpp:53
The glimpse layer returns a retina-like representation (down-scaled cropped images) of increasing sca...
Definition: glimpse.hpp:88
The DropConnect layer is a regularizer that randomly with probability ratio sets the connection value...
Definition: dropconnect.hpp:63
boost::variant< Linear3D< arma::mat, arma::mat, NoRegularizer > *, Glimpse< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, MultiheadAttention< arma::mat, arma::mat, NoRegularizer > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Reparametrization< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Subview< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, VirtualBatchNorm< arma::mat, arma::mat > *, RBF< arma::mat, arma::mat, GaussianFunction > *, BaseLayer< GaussianFunction, arma::mat, arma::mat > *, PositionalEncoding< arma::mat, arma::mat > *> MoreTypes
Implementation of the multiply constant layer.
The alpha - dropout layer is a regularizer that randomly with probability 'ratio' sets input values t...
The CELU activation function, defined by.
Definition: celu.hpp:60
Declaration of the Batch Normalization layer class.
Definition: batch_norm.hpp:56
Implementation of the RecurrentLayer class.
Implementation of the Sequential class.
Implementation of the constant layer.
Definition: constant.hpp:34
Implementation of the MaxPooling layer.
Definition: max_pooling.hpp:52
The ELU activation function, defined by.
Definition: elu.hpp:111
Implementation of the Radial Basis Function layer.
Definition: layer_types.hpp:91
Implementation of the SpatialDropout layer.
Definition and Implementation of the Bilinear Interpolation Layer.
An implementation of a faster version of the Fast LSTM network layer.
Definition: fast_lstm.hpp:66
Implementation of the Atrous Convolution class.