Python chainer.links.NStepBiLSTM() Examples

The following are 12 code examples of chainer.links.NStepBiLSTM(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module chainer.links , or try the search function .
Example #1
Source File: initializer.py    From chainer-compiler with MIT License 6 votes vote down vote up
def collect_inits(lk, pathname):
    res = []
    for na, pa in lk.namedparams():
        if isinstance(pa.data, type(None)):
            continue
        if na.count('/') == 1:
            res.append((pathname + na, pa))

    if isinstance(lk, L.BatchNormalization):
        res.append((pathname + '/avg_mean', lk.avg_mean))
        # TODO(satos) このままだと、nodeのテストは通るがResNetのテストがつらい
        # lk.avg_var = np.ones(lk.avg_var.shape).astype(np.float32) * 4.0
        res.append((pathname + '/avg_var', lk.avg_var))

    elif isinstance(lk, L.NStepLSTM) or isinstance(lk, L.NStepBiLSTM):
        # 先にこちらで集めてしまう
        for i, clk in enumerate(lk.children()):
            for param in clk.params():
                res.append((pathname + '/%d/%s' % (i, param.name), param))
        return res

    for clk in lk.children():
        res += collect_inits(clk, pathname + '/' + clk.name)
    return res 
Example #2
Source File: initializer.py    From chainer-compiler with MIT License 6 votes vote down vote up
def collect_inits(lk, pathname):
    res = []
    for na, pa in lk.namedparams():
        if isinstance(pa.data, type(None)):
            continue
        if na.count('/') == 1:
            res.append((pathname + na, pa))

    if isinstance(lk, L.BatchNormalization):
        res.append((pathname + '/avg_mean', lk.avg_mean))
        # TODO(satos) このままだと、nodeのテストは通るがResNetのテストがつらい
        # lk.avg_var = np.ones(lk.avg_var.shape).astype(np.float32) * 4.0
        res.append((pathname + '/avg_var', lk.avg_var))

    elif isinstance(lk, L.NStepLSTM) or isinstance(lk, L.NStepBiLSTM):
        # 先にこちらで集めてしまう
        for i, clk in enumerate(lk.children()):
            for param in clk.params():
                res.append((pathname + '/%d/%s' % (i, param.name), param))
        return res

    for clk in lk.children():
        res += collect_inits(clk, pathname + '/' + clk.name)
    return res 
Example #3
Source File: test_link_n_step_lstm.py    From chainer with MIT License 6 votes vote down vote up
def setUp(self):
        shape = (self.n_layers * 2, len(self.lengths), self.out_size)
        if self.hidden_none:
            self.h = self.c = numpy.zeros(shape, 'f')
        else:
            self.h = numpy.random.uniform(-1, 1, shape).astype('f')
            self.c = numpy.random.uniform(-1, 1, shape).astype('f')
        self.xs = [
            numpy.random.uniform(-1, 1, (l, self.in_size)).astype('f')
            for l in self.lengths]

        self.gh = numpy.random.uniform(-1, 1, shape).astype('f')
        self.gc = numpy.random.uniform(-1, 1, shape).astype('f')
        self.gys = [
            numpy.random.uniform(-1, 1, (l, self.out_size * 2)).astype('f')
            for l in self.lengths]
        self.rnn = links.NStepBiLSTM(
            self.n_layers, self.in_size, self.out_size, self.dropout)

        for layer in self.rnn:
            for p in layer.params():
                p.array[...] = numpy.random.uniform(-1, 1, p.shape)
        self.rnn.cleargrads() 
Example #4
Source File: test_link_n_step_lstm.py    From chainer with MIT License 6 votes vote down vote up
def check_multi_gpu_forward(self, train=True):
        # See chainer/chainer#6262
        # NStepBiLSTM w/ cudnn & dropout should work on not current device
        msg = None
        rnn = self.rnn.copy('copy')
        rnn.dropout = .5
        with cuda.get_device_from_id(1):
            if self.hidden_none:
                h = None
            else:
                h = cuda.to_gpu(self.h)
            c = cuda.to_gpu(self.c)
            xs = [cuda.to_gpu(x) for x in self.xs]
            with testing.assert_warns(DeprecationWarning):
                rnn = rnn.to_gpu()
        with cuda.get_device_from_id(0),\
                chainer.using_config('train', train),\
                chainer.using_config('use_cudnn', 'always'):
            try:
                rnn(h, c, xs)
            except Exception as e:
                msg = e
        assert msg is None 
Example #5
Source File: model.py    From TSNetVocoder with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, indim, outdim, normfac, fl=400, fs=80, fftl=512, fbsize=400):
        self.indim = indim
        self.outdim = outdim
        self.fl = fl
        self.fs = fs
        self.fftl = fftl
        self.fbsize = fbsize
        self.normfac = {'input'  : {'mean' : cuda.to_gpu(normfac['input']['mean']),
                                    'std' : cupy.fmax(cuda.to_gpu(normfac['input']['std']), 1.0E-6)},
                        'output' : {'mean' : cuda.to_gpu(normfac['output']['mean']),
                                    'std' : cupy.fmax(cuda.to_gpu(normfac['output']['std']), 1.0E-6)}}
        super(Model, self).__init__()
        with self.init_scope():
            self.lx1 = L.NStepBiLSTM(1, self.indim, self.indim//2, 0.0)
            self.lx2 = L.Convolution2D(1, self.indim, (5, self.indim), (1, 1), (2, 0))
            self.ly1 = L.NStepLSTM(3, self.fbsize+self.indim, 256, 0.0)
            self.ly2 = L.Linear(256, self.outdim) 
Example #6
Source File: links.py    From chainer-compiler with MIT License 5 votes vote down vote up
def __init__(self, ch):
        super(Link_NStepBiLSTM, self).__init__(L.NStepBiLSTM(1, 1, 1, 0))
        # code.InteractiveConsole({'ch': ch}).interact()

        hd = ch.children().__next__()
        if not(hd.w0 is None):
            self.n_in = hd.w0.shape[1]
        else:
            self.n_in = None

        self.out_size = ch.out_size
        self.n_layers = ch.n_layers
        self.dropout = ch.dropout

        self.ws = []
        self.bs = []
        for i in range(self.n_layers * 2):
            ws = []
            bs = []
            for j in range(8):
                ws.append(helper.make_tensor_value_info(
                    ('/%d/w%d' % (i, j)), TensorProto.FLOAT, ["TODO"]))
                bs.append(helper.make_tensor_value_info(
                    ('/%d/b%d' % (i, j)), TensorProto.FLOAT, ["TODO"]))
            self.ws.append(ws)
            self.bs.append(bs) 
Example #7
Source File: EspNet_BLSTM.py    From chainer-compiler with MIT License 5 votes vote down vote up
def __init__(self, idim, elayers, cdim, hdim, dropout):
        super(BLSTM, self).__init__()
        with self.init_scope():
            self.nblstm = L.NStepBiLSTM(elayers, idim, cdim, dropout)
            self.l_last = L.Linear(cdim * 2, hdim) 
Example #8
Source File: NStepBiLSTM.py    From chainer-compiler with MIT License 5 votes vote down vote up
def __init__(self, n_layer, n_in, n_out):
        super(A, self).__init__()
        with self.init_scope():
            self.l1 = L.NStepBiLSTM(n_layer, n_in, n_out, 0.1) 
Example #9
Source File: NStepBiLSTM.py    From chainer-compiler with MIT License 5 votes vote down vote up
def __init__(self, n_layer, n_in, n_out):
        super(A, self).__init__()
        with self.init_scope():
            self.l1 = L.NStepBiLSTM(n_layer, n_in, n_out, 0.1) 
Example #10
Source File: models.py    From EEND with MIT License 5 votes vote down vote up
def __init__(self,
                 n_speakers=4,
                 dropout=0.25,
                 in_size=513,
                 hidden_size=256,
                 n_layers=1,
                 embedding_layers=1,
                 embedding_size=20,
                 dc_loss_ratio=0.5,
                 ):
        """ BLSTM-based diarization model.

        Args:
          n_speakers (int): Number of speakers in recording
          dropout (float): dropout ratio
          in_size (int): Dimension of input feature vector
          hidden_size (int): Number of hidden units in LSTM
          n_layers (int): Number of LSTM layers after embedding
          embedding_layers (int): Number of LSTM layers for embedding
          embedding_size (int): Dimension of embedding vector
          dc_loss_ratio (float): mixing parameter for DPCL loss
        """
        super(BLSTMDiarization, self).__init__()
        with self.init_scope():
            self.bi_lstm1 = L.NStepBiLSTM(
                n_layers, hidden_size * 2, hidden_size, dropout)
            self.bi_lstm_emb = L.NStepBiLSTM(
                embedding_layers, in_size, hidden_size, dropout)
            self.linear1 = L.Linear(hidden_size * 2, n_speakers)
            self.linear2 = L.Linear(hidden_size * 2, embedding_size)
        self.dc_loss_ratio = dc_loss_ratio
        self.n_speakers = n_speakers 
Example #11
Source File: encoders.py    From espnet with Apache License 2.0 5 votes vote down vote up
def __init__(self, idim, elayers, cdim, hdim, subsample, dropout, typ="blstm"):
        super(RNNP, self).__init__()
        bidir = typ[0] == "b"
        if bidir:
            rnn = L.NStepBiLSTM if "lstm" in typ else L.NStepBiGRU
        else:
            rnn = L.NStepLSTM if "lstm" in typ else L.NStepGRU
        rnn_label = "birnn" if bidir else "rnn"
        with self.init_scope():
            for i in six.moves.range(elayers):
                if i == 0:
                    inputdim = idim
                else:
                    inputdim = hdim
                _cdim = 2 * cdim if bidir else cdim
                # bottleneck layer to merge
                setattr(
                    self, "{}{:d}".format(rnn_label, i), rnn(1, inputdim, cdim, dropout)
                )
                setattr(self, "bt%d" % i, L.Linear(_cdim, hdim))

        self.elayers = elayers
        self.rnn_label = rnn_label
        self.cdim = cdim
        self.subsample = subsample
        self.typ = typ
        self.bidir = bidir 
Example #12
Source File: encoders.py    From espnet with Apache License 2.0 5 votes vote down vote up
def __init__(self, idim, elayers, cdim, hdim, dropout, typ="lstm"):
        super(RNN, self).__init__()
        bidir = typ[0] == "b"
        if bidir:
            rnn = L.NStepBiLSTM if "lstm" in typ else L.NStepBiGRU
        else:
            rnn = L.NStepLSTM if "lstm" in typ else L.NStepGRU
        _cdim = 2 * cdim if bidir else cdim
        with self.init_scope():
            self.nbrnn = rnn(elayers, idim, cdim, dropout)
            self.l_last = L.Linear(_cdim, hdim)
        self.typ = typ
        self.bidir = bidir