mlpack  3.3.2
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
layer_types.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
13 #define MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
14 
15 #include <boost/variant.hpp>
16 
17 // Layer modules.
53 
54 // Convolution modules.
58 
59 // Regularizers.
61 
62 // Loss function modules.
64 
65 namespace mlpack {
66 namespace ann {
67 
68 template<typename InputDataType, typename OutputDataType> class BatchNorm;
69 template<typename InputDataType, typename OutputDataType> class DropConnect;
70 template<typename InputDataType, typename OutputDataType> class Glimpse;
71 template<typename InputDataType, typename OutputDataType> class LayerNorm;
72 template<typename InputDataType, typename OutputDataType> class LSTM;
73 template<typename InputDataType, typename OutputDataType> class GRU;
74 template<typename InputDataType, typename OutputDataType> class FastLSTM;
75 template<typename InputDataType, typename OutputDataType> class VRClassReward;
76 template<typename InputDataType, typename OutputDataType> class Concatenate;
77 template<typename InputDataType, typename OutputDataType> class Padding;
78 
79 template<typename InputDataType,
80  typename OutputDataType,
81  typename RegularizerType>
82 class Linear;
83 
84 template<typename InputDataType,
85  typename OutputDataType,
86  typename Activation>
87 class RBF;
88 
89 template<typename InputDataType,
90  typename OutputDataType,
91  typename RegularizerType>
93 
94 template<typename InputDataType,
95  typename OutputDataType>
97 
98 template<typename InputDataType,
99  typename OutputDataType
100 >
102 
103 template<typename InputDataType,
104  typename OutputDataType
105 >
107 
108 template<typename InputDataType,
109  typename OutputDataType
110 >
112 
113 template<typename InputDataType,
114  typename OutputDataType,
115  typename... CustomLayers
116 >
117 class AddMerge;
118 
119 template<typename InputDataType,
120  typename OutputDataType,
121  bool residual,
122  typename... CustomLayers
123 >
125 
126 template<typename InputDataType,
127  typename OutputDataType,
128  typename... CustomLayers
129 >
130 class Highway;
131 
132 template<typename InputDataType,
133  typename OutputDataType,
134  typename... CustomLayers
135 >
136 class Recurrent;
137 
138 template<typename InputDataType,
139  typename OutputDataType,
140  typename... CustomLayers
141 >
142 class Concat;
143 
144 template<
145  typename OutputLayerType,
146  typename InputDataType,
147  typename OutputDataType
148 >
149 class ConcatPerformance;
150 
151 template<
152  typename ForwardConvolutionRule,
153  typename BackwardConvolutionRule,
154  typename GradientConvolutionRule,
155  typename InputDataType,
156  typename OutputDataType
157 >
158 class Convolution;
159 
160 template<
161  typename ForwardConvolutionRule,
162  typename BackwardConvolutionRule,
163  typename GradientConvolutionRule,
164  typename InputDataType,
165  typename OutputDataType
166 >
168 
169 template<
170  typename ForwardConvolutionRule,
171  typename BackwardConvolutionRule,
172  typename GradientConvolutionRule,
173  typename InputDataType,
174  typename OutputDataType
175 >
176 class AtrousConvolution;
177 
178 template<
179  typename InputDataType,
180  typename OutputDataType
181 >
183 
184 template<typename InputDataType,
185  typename OutputDataType,
186  typename... CustomLayers
187 >
189 
190 template <typename InputDataType,
191  typename OutputDataType,
192  typename... CustomLayers
193 >
195 
196 template <typename InputDataType,
197  typename OutputDataType
198 >
199 class AdaptiveMaxPooling;
200 
201 template <typename InputDataType,
202  typename OutputDataType
203 >
204 class AdaptiveMeanPooling;
205 
206 using MoreTypes = boost::variant<
221 >;
222 
223 template <typename... CustomLayers>
224 using LayerTypes = boost::variant<
233  arma::mat, arma::mat>*,
245  arma::mat, arma::mat>*,
249  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
278  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
280  MoreTypes,
281  CustomLayers*...
282 >;
283 
284 } // namespace ann
285 } // namespace mlpack
286 
287 #endif
boost::variant< AdaptiveMaxPooling< arma::mat, arma::mat > *, AdaptiveMeanPooling< arma::mat, arma::mat > *, Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, NoisyLinear< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Softmax< arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
Implementation of the variance reduced classification reinforcement layer.
Definition: layer_types.hpp:75
Implementation of the Add module class.
Definition: add.hpp:34
Implementation of the AdaptiveMaxPooling layer.
Implementation of the Concatenate module class.
Definition: concatenate.hpp:36
Implementation of the log softmax layer.
Definition: log_softmax.hpp:36
Implementation of the AddMerge module class.
Definition: add_merge.hpp:42
Implementation of the Padding module class.
Definition: layer_types.hpp:77
Declaration of the VirtualBatchNorm layer class.
The FlexibleReLU activation function, defined by.
Implementation of the Transposed Convolution class.
Implementation of the reinforce normal layer.
Implementation of the Linear layer class.
Definition: layer_types.hpp:82
The LeakyReLU activation function, defined by.
Definition: leaky_relu.hpp:44
This class implements the Recurrent Model for Visual Attention, using a variety of possible layer imp...
Implementation of the Convolution class.
Definition: convolution.hpp:48
Implementation of the MeanPooling.
Implementation of the Reparametrization layer class.
Implementation of the Join module class.
Definition: join.hpp:33
Implementation of the concat performance class.
Declaration of the WeightNorm layer class.
The Hard Tanh activation function, defined by.
Definition: hard_tanh.hpp:49
The select module selects the specified column from a given input matrix.
Definition: select.hpp:32
Implementation of the negative log likelihood layer.
Implementation of the Softmax layer.
Definition: softmax.hpp:38
The PReLU activation function, defined by (where alpha is trainable)
Implementation of the AdaptiveMeanPooling.
Implementation of the base layer.
Definition: base_layer.hpp:65
Implementation of the Concat class.
Definition: concat.hpp:45
Implementation of the Highway layer.
Definition: highway.hpp:60
Implementation of the LSTM module class.
Definition: layer_types.hpp:72
Declaration of the Layer Normalization class.
Definition: layer_norm.hpp:65
Implementation of the Lookup class.
Definition: lookup.hpp:35
Implementation of the NoisyLinear layer class.
Definition: layer_types.hpp:96
Implementation of the subview layer.
Definition: subview.hpp:34
Implementation of the MiniBatchDiscrimination layer.
Implementation of the MultiplyMerge module class.
boost::variant< Glimpse< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Reparametrization< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Subview< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, VirtualBatchNorm< arma::mat, arma::mat > *, RBF< arma::mat, arma::mat, GaussianFunction > *, BaseLayer< GaussianFunction, arma::mat, arma::mat > * > MoreTypes
Implementation of the LinearNoBias class.
Definition: layer_types.hpp:92
A concatenated ReLU has two outputs, one ReLU and one negative ReLU, concatenated together...
Definition: c_relu.hpp:50
Computes the two-dimensional convolution.
An implementation of a gru network layer.
Definition: gru.hpp:58
The dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values to zero a...
Definition: dropout.hpp:53
The glimpse layer returns a retina-like representation (down-scaled cropped images) of increasing sca...
Definition: glimpse.hpp:88
The DropConnect layer is a regularizer that randomly with probability ratio sets the connection value...
Definition: dropconnect.hpp:63
Implementation of the multiply constant layer.
The alpha - dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values t...
The CELU activation function, defined by.
Definition: celu.hpp:60
Declaration of the Batch Normalization layer class.
Definition: batch_norm.hpp:56
Implementation of the RecurrentLayer class.
Implementation of the Sequential class.
Implementation of the constant layer.
Definition: constant.hpp:34
Implementation of the MaxPooling layer.
Definition: max_pooling.hpp:52
The ELU activation function, defined by.
Definition: elu.hpp:111
Implementation of the Radial Basis Function layer.
Definition: layer_types.hpp:87
Definition and Implementation of the Bilinear Interpolation Layer.
An implementation of a faster version of the Fast LSTM network layer.
Definition: fast_lstm.hpp:66
Implementation of the Atrous Convolution class.