From fda3535414840898ce367094afb65c652a1728a5 Mon Sep 17 00:00:00 2001 From: Amir Arsalan Soltani Date: Wed, 27 Sep 2017 17:12:22 -0400 Subject: [PATCH] Update MapTable.lua Share Batch Normalization parameters as well --- MapTable.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/MapTable.lua b/MapTable.lua index c79f1ea1d..0eda8cb3a 100644 --- a/MapTable.lua +++ b/MapTable.lua @@ -3,14 +3,14 @@ local MapTable, parent = torch.class('nn.MapTable', 'nn.Container') function MapTable:__init(module, shared) parent.__init(self) self.shared = (shared == nil) and true or shared - self.sharedparams = {'weight', 'bias', 'gradWeight', 'gradBias'} + self.sharedparams = {'weight', 'bias', 'gradWeight', 'gradBias', 'running_mean', 'runnig_var', 'save_mean', 'save_var'} self.output = {} self.gradInput = {} self:add(module) end function MapTable:_extend(n) - self.sharedparams = self.sharedparams or {'weight', 'bias', 'gradWeight', 'gradBias'} + self.sharedparams = self.sharedparams or {'weight', 'bias', 'gradWeight', 'gradBias', 'running_mean', 'runnig_var', 'save_mean', 'save_var'} self.modules[1] = self.module for i = 2, n do if not self.modules[i] then