Python源码示例:chainer.links.BatchNormalization()
示例1
def __init__(self, in_channels, out_channels, ksize=3, pad=1, activation=F.leaky_relu, mode='none', bn=False, dr=None):
super(ResBlock, self).__init__()
initializer = chainer.initializers.GlorotUniform()
initializer_sc = chainer.initializers.GlorotUniform()
self.activation = activation
self.mode = _downsample if mode == 'down' else _upsample if mode == 'up' else None
self.learnable_sc = in_channels != out_channels
self.dr = dr
self.bn = bn
with self.init_scope():
self.c1 = L.Convolution2D(in_channels, out_channels, ksize=ksize, pad=pad, initialW=initializer, nobias=bn)
self.c2 = L.Convolution2D(out_channels, out_channels, ksize=ksize, pad=pad, initialW=initializer, nobias=bn)
if bn:
self.b1 = L.BatchNormalization(out_channels)
self.b2 = L.BatchNormalization(out_channels)
if self.learnable_sc:
self.c_sc = L.Convolution2D(in_channels, out_channels, ksize=1, pad=0, initialW=initializer_sc)
示例2
def __init__(self, in_channels, out_channels, ksize=3, pad=1, activation=F.relu, mode='none', bn=True, dr=None):
super(ResBlock, self).__init__()
initializer = chainer.initializers.GlorotUniform()
initializer_sc = chainer.initializers.GlorotUniform()
self.activation = activation
self.mode = _downsample if mode == 'down' else _upsample if mode == 'up' else None
self.learnable_sc = in_channels != out_channels
self.dr = dr
self.bn = bn
with self.init_scope():
self.c1 = L.Convolution1D(in_channels, out_channels, ksize=ksize, pad=pad, initialW=initializer, nobias=bn)
self.c2 = L.Convolution1D(out_channels, out_channels, ksize=ksize, pad=pad, initialW=initializer, nobias=bn)
if bn:
self.b1 = L.BatchNormalization(out_channels)
self.b2 = L.BatchNormalization(out_channels)
if self.learnable_sc:
self.c_sc = L.Convolution2D(in_channels, out_channels, ksize=1, pad=0, initialW=initializer_sc)
示例3
def copy_param(target_link, source_link):
"""Copy parameters of a link to another link."""
target_params = dict(target_link.namedparams())
for param_name, param in source_link.namedparams():
if target_params[param_name].array is None:
raise TypeError(
'target_link parameter {} is None. Maybe the model params are '
'not initialized.\nPlease try to forward dummy input '
'beforehand to determine parameter shape of the model.'.format(
param_name))
target_params[param_name].array[...] = param.array
# Copy Batch Normalization's statistics
target_links = dict(target_link.namedlinks())
for link_name, link in source_link.namedlinks():
if isinstance(link, L.BatchNormalization):
target_bn = target_links[link_name]
target_bn.avg_mean[...] = link.avg_mean
target_bn.avg_var[...] = link.avg_var
示例4
def soft_copy_param(target_link, source_link, tau):
"""Soft-copy parameters of a link to another link."""
target_params = dict(target_link.namedparams())
for param_name, param in source_link.namedparams():
if target_params[param_name].array is None:
raise TypeError(
'target_link parameter {} is None. Maybe the model params are '
'not initialized.\nPlease try to forward dummy input '
'beforehand to determine parameter shape of the model.'.format(
param_name))
target_params[param_name].array[...] *= (1 - tau)
target_params[param_name].array[...] += tau * param.array
# Soft-copy Batch Normalization's statistics
target_links = dict(target_link.namedlinks())
for link_name, link in source_link.namedlinks():
if isinstance(link, L.BatchNormalization):
target_bn = target_links[link_name]
target_bn.avg_mean[...] *= (1 - tau)
target_bn.avg_mean[...] += tau * link.avg_mean
target_bn.avg_var[...] *= (1 - tau)
target_bn.avg_var[...] += tau * link.avg_var
示例5
def soft_copy_param(target_link, source_link, tau, layers_in_use=None):
"""Soft-copy parameters of a link to another link."""
target_params = dict(target_link.namedparams())
for param_name, param in source_link.namedparams():
if layers_in_use is not None:
skip = True
for name in layers_in_use:
if param_name.startswith(name):
skip = False
break
if skip:
continue
target_params[param_name].data[:] *= (1 - tau)
target_params[param_name].data[:] += tau * param.data
# Soft-copy Batch Normalization's statistics
target_links = dict(target_link.namedlinks())
for link_name, link in source_link.namedlinks():
if isinstance(link, L.BatchNormalization):
target_bn = target_links[link_name]
target_bn.avg_mean[:] *= (1 - tau)
target_bn.avg_mean[:] += tau * link.avg_mean
target_bn.avg_var[:] *= (1 - tau)
target_bn.avg_var[:] += tau * link.avg_var
示例6
def __init__(self, obs_size, n_actions, n_hidden_channels=[1024,256]):
super(QFunction,self).__init__()
net = []
inpdim = obs_size
for i,n_hid in enumerate(n_hidden_channels):
net += [ ('l{}'.format(i), L.Linear( inpdim, n_hid ) ) ]
net += [ ('norm{}'.format(i), L.BatchNormalization( n_hid ) ) ]
net += [ ('_act{}'.format(i), F.relu ) ]
inpdim = n_hid
net += [('output', L.Linear( inpdim, n_actions) )]
with self.init_scope():
for n in net:
if not n[0].startswith('_'):
setattr(self, n[0], n[1])
self.forward = net
示例7
def __init__(self):
super(Mix, self).__init__()
enc_ch = [3, 64, 256, 512, 1024, 2048]
ins_ch = [6, 128, 384, 640, 2176, 3072]
self.conv = [None] * 6
self.bn = [None] * 6
for i in range(1, 6):
c = L.Convolution2D(enc_ch[i] + ins_ch[i], enc_ch[i], 1, nobias=True)
b = L.BatchNormalization(enc_ch[i])
self.conv[i] = c
self.bn[i] = b
self.add_link('c{}'.format(i), c)
self.add_link('b{}'.format(i), b)
示例8
def __init__(self, out_ch):
super(Decoder, self).__init__()
with self.init_scope():
self.mix = Mix()
self.bot1 = BottleNeckB(2048, 1024)
self.bot2 = BottleNeckB(2048, 1024)
self.bot3 = BottleNeckB(2048, 1024)
self.b5 = UpBlock(2048, 1024, 1024)
self.b4 = UpBlock(1024, 512, 512)
self.b3 = UpBlock(512, 256, 256)
self.b2 = UpBlock(256, 64, 128)
self.b1 = UpBlock(128, 3 + (6 + 3 * 13), 64)
self.last_b = L.BatchNormalization(64)
self.last_c = L.Convolution2D(64, out_ch * 2, 1, nobias=True)
示例9
def __init__(self, ch):
super(Link_BatchNormalization, self).__init__(
L.BatchNormalization(1))
self.n_out = ch.beta.shape[0]
self.scale = helper.make_tensor_value_info(
'/gamma', TensorProto.FLOAT, [self.n_out])
self.B = helper.make_tensor_value_info(
'/beta', TensorProto.FLOAT, [self.n_out])
self.mean = helper.make_tensor_value_info(
'/avg_mean', TensorProto.FLOAT, [self.n_out])
self.var = helper.make_tensor_value_info(
'/avg_var', TensorProto.FLOAT, [self.n_out])
self.eps = ch.eps
self.momentum = ch.decay
示例10
def collect_inits(lk, pathname):
res = []
for na, pa in lk.namedparams():
if isinstance(pa.data, type(None)):
continue
if na.count('/') == 1:
res.append((pathname + na, pa))
if isinstance(lk, L.BatchNormalization):
res.append((pathname + '/avg_mean', lk.avg_mean))
# TODO(satos) このままだと、nodeのテストは通るがResNetのテストがつらい
# lk.avg_var = np.ones(lk.avg_var.shape).astype(np.float32) * 4.0
res.append((pathname + '/avg_var', lk.avg_var))
elif isinstance(lk, L.NStepLSTM) or isinstance(lk, L.NStepBiLSTM):
# 先にこちらで集めてしまう
for i, clk in enumerate(lk.children()):
for param in clk.params():
res.append((pathname + '/%d/%s' % (i, param.name), param))
return res
for clk in lk.children():
res += collect_inits(clk, pathname + '/' + clk.name)
return res
示例11
def collect_inits(lk, pathname):
res = []
for na, pa in lk.namedparams():
if isinstance(pa.data, type(None)):
continue
if na.count('/') == 1:
res.append((pathname + na, pa))
if isinstance(lk, L.BatchNormalization):
res.append((pathname + '/avg_mean', lk.avg_mean))
# TODO(satos) このままだと、nodeのテストは通るがResNetのテストがつらい
# lk.avg_var = np.ones(lk.avg_var.shape).astype(np.float32) * 4.0
res.append((pathname + '/avg_var', lk.avg_var))
elif isinstance(lk, L.NStepLSTM) or isinstance(lk, L.NStepBiLSTM):
# 先にこちらで集めてしまう
for i, clk in enumerate(lk.children()):
for param in clk.params():
res.append((pathname + '/%d/%s' % (i, param.name), param))
return res
for clk in lk.children():
res += collect_inits(clk, pathname + '/' + clk.name)
return res
示例12
def __init__(self, in_size, ch, out_size, stride=2, groups=1):
super(BottleNeckA, self).__init__()
initialW = initializers.HeNormal()
with self.init_scope():
self.conv1 = L.Convolution2D(
in_size, ch, 1, stride, 0, initialW=initialW, nobias=True)
self.bn1 = L.BatchNormalization(ch)
self.conv2 = L.Convolution2D(
ch, ch, 3, 1, 1, initialW=initialW, nobias=True,
groups=groups)
self.bn2 = L.BatchNormalization(ch)
self.conv3 = L.Convolution2D(
ch, out_size, 1, 1, 0, initialW=initialW, nobias=True)
self.bn3 = L.BatchNormalization(out_size)
self.conv4 = L.Convolution2D(
in_size, out_size, 1, stride, 0,
initialW=initialW, nobias=True)
self.bn4 = L.BatchNormalization(out_size)
示例13
def __init__(self, in_channels, out_channels, ksize=None,
stride=1, pad=0, dilate=1, groups=1, nobias=True,
initialW=None, initial_bias=None, activ=relu, bn_kwargs={}):
if ksize is None:
out_channels, ksize, in_channels = in_channels, out_channels, None
self.activ = activ
super(Conv2DBNActiv, self).__init__()
with self.init_scope():
self.conv = Convolution2D(
in_channels, out_channels, ksize, stride, pad,
nobias, initialW, initial_bias, dilate=dilate, groups=groups)
if 'comm' in bn_kwargs:
with flags.ignore_branch():
self.bn = MultiNodeBatchNormalization(
out_channels, **bn_kwargs)
else:
self.bn = BatchNormalization(out_channels, **bn_kwargs)
示例14
def __init__(self, in_size, ch, out_size, stride=2, groups=1):
super(BottleNeckA, self).__init__()
initialW = initializers.HeNormal()
with self.init_scope():
self.conv1 = L.Convolution2D(
in_size, ch, 1, stride, 0, initialW=initialW, nobias=True)
self.bn1 = L.BatchNormalization(ch)
self.conv2 = L.Convolution2D(
ch, ch, 3, 1, 1, initialW=initialW, nobias=True,
groups=groups)
self.bn2 = L.BatchNormalization(ch)
self.conv3 = L.Convolution2D(
ch, out_size, 1, 1, 0, initialW=initialW, nobias=True)
self.bn3 = L.BatchNormalization(out_size)
self.conv4 = L.Convolution2D(
in_size, out_size, 1, stride, 0,
initialW=initialW, nobias=True)
self.bn4 = L.BatchNormalization(out_size)
示例15
def __init__(self, in_size, ch, out_size, stride=2, groups=1):
super(BottleNeckA, self).__init__()
initialW = initializers.HeNormal()
with self.init_scope():
self.conv1 = L.Convolution2D(
in_size, ch, 1, stride, 0, initialW=initialW, nobias=True)
self.bn1 = L.BatchNormalization(ch)
self.conv2 = L.Convolution2D(
ch, ch, 3, 1, 1, initialW=initialW, nobias=True,
groups=groups)
self.bn2 = L.BatchNormalization(ch)
self.conv3 = L.Convolution2D(
ch, out_size, 1, 1, 0, initialW=initialW, nobias=True)
self.bn3 = L.BatchNormalization(out_size)
self.conv4 = L.Convolution2D(
in_size, out_size, 1, stride, 0,
initialW=initialW, nobias=True)
self.bn4 = L.BatchNormalization(out_size)
示例16
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad,
num_blocks):
super(PolyConv, self).__init__()
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
nobias=True)
for i in range(num_blocks):
setattr(self, "bn{}".format(i + 1), L.BatchNormalization(
size=out_channels,
eps=1e-5))
self.activ = F.relu
示例17
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad,
groups):
super(CondenseSimpleConv, self).__init__()
with self.init_scope():
self.bn = L.BatchNormalization(size=in_channels)
self.activ = F.relu
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
nobias=True,
groups=groups)
示例18
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad):
super(InceptConv, self).__init__()
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
nobias=True)
self.bn = L.BatchNormalization(
size=out_channels,
decay=0.1,
eps=1e-3)
self.activ = F.relu
示例19
def __init__(self,
in_channels,
out_channels,
reduction=16):
super(PreSEAttBlock, self).__init__()
mid_cannels = out_channels // reduction
with self.init_scope():
self.bn = L.BatchNormalization(
size=in_channels,
eps=1e-5)
self.conv1 = conv1x1(
in_channels=in_channels,
out_channels=mid_cannels,
use_bias=True)
self.conv2 = conv1x1(
in_channels=mid_cannels,
out_channels=out_channels,
use_bias=True)
示例20
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad):
super(InceptConv, self).__init__()
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
nobias=True)
self.bn = L.BatchNormalization(
size=out_channels,
decay=0.1,
eps=1e-3)
self.activ = F.relu
示例21
def __init__(self,
in_channels,
out_channels,
stride):
super(ShakeShakeShortcut, self).__init__()
assert (out_channels % 2 == 0)
mid_channels = out_channels // 2
with self.init_scope():
self.pool = partial(
F.average_pooling_2d,
ksize=1,
stride=stride)
self.conv1 = conv1x1(
in_channels=in_channels,
out_channels=mid_channels)
self.conv2 = conv1x1(
in_channels=in_channels,
out_channels=mid_channels)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-5)
示例22
def __init__(self,
in_channels,
out_channels):
super(ShuffleInitBlock, self).__init__()
with self.init_scope():
self.conv = conv3x3(
in_channels=in_channels,
out_channels=out_channels,
stride=2)
self.bn = L.BatchNormalization(size=out_channels)
self.activ = F.relu
self.pool = partial(
F.max_pooling_2d,
ksize=3,
stride=2,
pad=1,
cover_all=False)
示例23
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad,
activate=True):
super(DartsConv, self).__init__()
self.activate = activate
with self.init_scope():
if self.activate:
self.activ = F.relu
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
nobias=True)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-5)
示例24
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad,
dilate):
super(DartsDwsConv, self).__init__()
with self.init_scope():
self.activ = F.relu
self.conv = DwsConv(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
dilate=dilate,
use_bias=False)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-5)
示例25
def __init__(self,
in_channels,
out_channels,
stride=2):
super(DartsReduceBranch, self).__init__()
assert (out_channels % 2 == 0)
mid_channels = out_channels // 2
with self.init_scope():
self.activ = F.relu
self.conv1 = conv1x1(
in_channels=in_channels,
out_channels=mid_channels,
stride=stride)
self.conv2 = conv1x1(
in_channels=in_channels,
out_channels=mid_channels,
stride=stride)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-5)
示例26
def __init__(self,
in_channels,
out_channels,
kernel_size,
stride,
pad,
activate):
super(DwsConvBlock, self).__init__()
self.activate = activate
with self.init_scope():
if self.activate:
self.activ = F.relu
self.conv = DwsConv(
in_channels=in_channels,
out_channels=out_channels,
ksize=kernel_size,
stride=stride,
pad=pad)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-5)
示例27
def __init__(self,
in_channels,
out_channels):
super(PyrInitBlock, self).__init__()
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=7,
stride=2,
pad=3,
nobias=True)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-5)
self.activ = F.relu
self.pool = partial(
F.max_pooling_2d,
ksize=3,
stride=2,
pad=1,
cover_all=False)
示例28
def __init__(self,
in_channels,
out_channels,
ksize,
stride,
pad):
super(InceptConv, self).__init__()
with self.init_scope():
self.conv = L.Convolution2D(
in_channels=in_channels,
out_channels=out_channels,
ksize=ksize,
stride=stride,
pad=pad,
nobias=True)
self.bn = L.BatchNormalization(
size=out_channels,
eps=1e-3)
self.activ = F.relu
示例29
def __init__(self, ksize, n_out, initializer):
super(ConvBlock, self).__init__()
pad_size = ksize // 2
links = [('conv1', L.Convolution2D(None, n_out, ksize, pad=pad_size, initialW=initializer))]
links += [('bn1', L.BatchNormalization(n_out))]
for link in links:
self.add_link(*link)
self.forward = links
示例30
def __init__(self, ksize, n_out, initializer):
super(ResBlock, self).__init__()
pad_size = ksize // 2
links = [('conv1', L.Convolution2D(None, n_out, ksize, pad=pad_size, initialW=initializer))]
links += [('bn1', L.BatchNormalization(n_out))]
links += [('_act1', F.ReLU())]
links += [('conv2', L.Convolution2D(n_out, n_out, ksize, pad=pad_size, initialW=initializer))]
links += [('bn2', L.BatchNormalization(n_out))]
for link in links:
if not link[0].startswith('_'):
self.add_link(*link)
self.forward = links