Python源码示例:chainer.links.DilatedConvolution2D()

示例1
def __init__(self, in_channels, out_channels, ksize=None,
                 stride=1, pad=0, dilate=1, nobias=False, initialW=None,
                 initial_bias=None, activ=relu):
        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.activ = activ
        super(Conv2DActiv, self).__init__()
        with self.init_scope():
            if dilate > 1:
                self.conv = DilatedConvolution2D(
                    in_channels, out_channels, ksize, stride, pad, dilate,
                    nobias, initialW, initial_bias)
            else:
                self.conv = Convolution2D(
                    in_channels, out_channels, ksize, stride, pad,
                    nobias, initialW, initial_bias) 
示例2
def __init__(self):
        super(VGG16, self).__init__()
        with self.init_scope():
            self.conv1_1 = L.Convolution2D(64, 3, pad=1)
            self.conv1_2 = L.Convolution2D(64, 3, pad=1)

            self.conv2_1 = L.Convolution2D(128, 3, pad=1)
            self.conv2_2 = L.Convolution2D(128, 3, pad=1)

            self.conv3_1 = L.Convolution2D(256, 3, pad=1)
            self.conv3_2 = L.Convolution2D(256, 3, pad=1)
            self.conv3_3 = L.Convolution2D(256, 3, pad=1)

            self.conv4_1 = L.Convolution2D(512, 3, pad=1)
            self.conv4_2 = L.Convolution2D(512, 3, pad=1)
            self.conv4_3 = L.Convolution2D(512, 3, pad=1)
            self.norm4 = Normalize(512, initial=initializers.Constant(20))

            self.conv5_1 = L.DilatedConvolution2D(512, 3, pad=1)
            self.conv5_2 = L.DilatedConvolution2D(512, 3, pad=1)
            self.conv5_3 = L.DilatedConvolution2D(512, 3, pad=1)

            self.conv6 = L.DilatedConvolution2D(1024, 3, pad=6, dilate=6)
            self.conv7 = L.Convolution2D(1024, 1) 
示例3
def __init__(self, in_channels, out_channels, ksize=None,
                 stride=1, pad=0, dilate=1, nobias=False, initialW=None,
                 initial_bias=None, activ=relu):
        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.activ = activ
        super(Conv2DActiv, self).__init__()
        with self.init_scope():
            if dilate > 1:
                self.conv = DilatedConvolution2D(
                    in_channels, out_channels, ksize, stride, pad, dilate,
                    nobias, initialW, initial_bias)
            else:
                self.conv = Convolution2D(
                    in_channels, out_channels, ksize, stride, pad,
                    nobias, initialW, initial_bias) 
示例4
def __init__(self, in_size, out_size, ch, stride=1):
        super(DilatedBottleNeckA, self).__init__()
        initialW = chainer.initializers.HeNormal()

        with self.init_scope():
            self.conv1 = L.Convolution2D(
                in_size, ch, 1, stride, 0, initialW=initialW, nobias=True)
            self.bn1 = L.BatchNormalization(ch, eps=self.eps)
            self.conv2 = L.DilatedConvolution2D(
                ch, ch, 3, 1, 2, dilate=2,
                initialW=initialW, nobias=True)
            self.bn2 = L.BatchNormalization(ch, eps=self.eps)
            self.conv3 = L.Convolution2D(
                ch, out_size, 1, 1, 0, initialW=initialW, nobias=True)
            self.bn3 = L.BatchNormalization(out_size, eps=self.eps)

            self.conv4 = L.Convolution2D(
                in_size, out_size, 1, stride, 0,
                initialW=initialW, nobias=True)
            self.bn4 = L.BatchNormalization(out_size) 
示例5
def __init__(self, nb_in, nb_out, ksize=3, dilate=1, no_bn=False):
        super(DConv_BN, self).__init__()
        self.no_bn = no_bn
        with self.init_scope():
            self.conv = L.DilatedConvolution2D(nb_in, nb_out, ksize=(ksize, 1), pad=(dilate, 0), dilate=(dilate, 1))
            if not no_bn:
                self.bn = L.BatchNormalization(nb_out) 
示例6
def setUp(self):
        self.link = links.DilatedConvolution2D(
            3, 2, 3, stride=2, pad=2, dilate=2)
        b = self.link.b.data
        b[...] = numpy.random.uniform(-1, 1, b.shape)
        self.link.cleargrads()

        self.x = numpy.random.uniform(-1, 1,
                                      (2, 3, 4, 3)).astype(numpy.float32)
        self.gy = numpy.random.uniform(-1, 1,
                                       (2, 2, 2, 2)).astype(numpy.float32) 
示例7
def setUp(self):
        self.link = links.DilatedConvolution2D(*self.args, **self.kwargs)
        self.x = numpy.random.uniform(-1, 1,
                                      (2, 3, 4, 3)).astype(numpy.float32)
        self.link(chainer.Variable(self.x))
        b = self.link.b.data
        b[...] = numpy.random.uniform(-1, 1, b.shape)
        self.link.cleargrads()
        self.gy = numpy.random.uniform(-1, 1,
                                       (2, 2, 2, 2)).astype(numpy.float32) 
示例8
def __init__(self, in_size, ch):
        super(DilatedBottleNeckB, self).__init__()
        initialW = chainer.initializers.HeNormal()

        with self.init_scope():
            self.conv1 = L.Convolution2D(
                in_size, ch, 1, 1, 0, initialW=initialW, nobias=True)
            self.bn1 = L.BatchNormalization(ch, eps=self.eps)
            self.conv2 = L.DilatedConvolution2D(
                ch, ch, 3, 1, 2, dilate=2,
                initialW=initialW, nobias=True)
            self.bn2 = L.BatchNormalization(ch, eps=self.eps)
            self.conv3 = L.Convolution2D(
                ch, in_size, 1, 1, 0, initialW=initialW, nobias=True)
            self.bn3 = L.BatchNormalization(in_size, eps=self.eps)