summaryrefslogtreecommitdiffstats
path: root/contrib/torch/nn/SpatialBatchNormalization.lua
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/torch/nn/SpatialBatchNormalization.lua')
-rw-r--r--contrib/torch/nn/SpatialBatchNormalization.lua35
1 files changed, 35 insertions, 0 deletions
diff --git a/contrib/torch/nn/SpatialBatchNormalization.lua b/contrib/torch/nn/SpatialBatchNormalization.lua
new file mode 100644
index 000000000..c5004ce3a
--- /dev/null
+++ b/contrib/torch/nn/SpatialBatchNormalization.lua
@@ -0,0 +1,35 @@
+--[[
+ This file implements Batch Normalization as described in the paper:
+ "Batch Normalization: Accelerating Deep Network Training
+ by Reducing Internal Covariate Shift"
+ by Sergey Ioffe, Christian Szegedy
+
+ This implementation is useful for inputs coming from convolution layers.
+ For non-convolutional layers, see BatchNormalization.lua
+
+ The operation implemented is:
+ y = ( x - mean(x) )
+ -------------------- * gamma + beta
+ standard-deviation(x)
+ where gamma and beta are learnable parameters.
+
+ The learning of gamma and beta is optional.
+
+ Usage:
+ with learnable parameters: nn.SpatialBatchNormalization(N [,eps] [,momentum])
+ where N = dimensionality of input
+ without learnable parameters: nn.SpatialBatchNormalization(N [,eps] [,momentum], false)
+
+ eps is a small value added to the variance to avoid divide-by-zero.
+ Defaults to 1e-5
+
+ In training time, this layer keeps a running estimate of it's computed mean and std.
+ The running sum is kept with a default momentum of 0.1 (unless over-ridden)
+ In test time, this running mean/std is used to normalize.
+]]--
+local BN, parent = torch.class('nn.SpatialBatchNormalization', 'nn.BatchNormalization')
+
+BN.__version = 2
+
+-- expected dimension of input
+BN.nDim = 4