Python chainer.ChainList() Examples
The following are 30
code examples of chainer.ChainList().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
chainer
, or try the search function
.
Example #1
Source File: behavioral_cloning.py From baselines with MIT License | 6 votes |
def __init__(self, n_actions, n_input_channels=4, activation=F.relu, bias=0.1, hiddens=None, action_wrapper=None): self.n_actions = n_actions self.n_input_channels = n_input_channels self.activation = activation self.hiddens = [512] if hiddens is None else hiddens assert action_wrapper in ['discrete', 'continuous'] self.action_wrapper = action_wrapper super().__init__() with self.init_scope(): self.conv_layers = chainer.ChainList( L.Convolution2D(n_input_channels, 32, 8, stride=4, initial_bias=bias), L.Convolution2D(32, 64, 4, stride=2, initial_bias=bias), L.Convolution2D(64, 64, 3, stride=1, initial_bias=bias)) self.a_stream = chainerrl.links.mlp.MLP(None, n_actions, self.hiddens)
Example #2
Source File: fpn.py From chainer-compiler with MIT License | 6 votes |
def __init__(self, base, n_base_output, scales): super(FPN, self).__init__() with self.init_scope(): self.base = base self.inner = chainer.ChainList() self.outer = chainer.ChainList() init = {'initialW': initializers.GlorotNormal()} for _ in range(n_base_output): self.inner.append(L.Convolution2D(256, 1, **init)) self.outer.append(L.Convolution2D(256, 3, pad=1, **init)) self.scales = scales # hacks self.n_base_output = n_base_output self.n_base_output_minus1 = n_base_output - 1 self.scales_minus_n_base_output = len(scales) - n_base_output
Example #3
Source File: dueling_dqn.py From chainerrl with MIT License | 6 votes |
def __init__(self, n_actions, n_input_channels=4, activation=F.relu, bias=0.1): self.n_actions = n_actions self.n_input_channels = n_input_channels self.activation = activation super().__init__() with self.init_scope(): self.conv_layers = chainer.ChainList( L.Convolution2D(n_input_channels, 32, 8, stride=4, initial_bias=bias), L.Convolution2D(32, 64, 4, stride=2, initial_bias=bias), L.Convolution2D(64, 64, 3, stride=1, initial_bias=bias)) self.a_stream = MLP(3136, n_actions, [512]) self.v_stream = MLP(3136, 1, [512])
Example #4
Source File: net.py From chainer-stylegan with MIT License | 6 votes |
def __init__(self, ch=512): super().__init__() self.ch = ch with self.init_scope(): self.l = chainer.ChainList( EqualizedLinear(ch, ch), LinkLeakyRelu(), EqualizedLinear(ch, ch), LinkLeakyRelu(), EqualizedLinear(ch, ch), LinkLeakyRelu(), EqualizedLinear(ch, ch), LinkLeakyRelu(), EqualizedLinear(ch, ch), LinkLeakyRelu(), EqualizedLinear(ch, ch), LinkLeakyRelu(), EqualizedLinear(ch, ch), LinkLeakyRelu(), EqualizedLinear(ch, ch), LinkLeakyRelu(), ) self.ln = len(self.l)
Example #5
Source File: test_link.py From chainer with MIT License | 6 votes |
def setUp(self): self.l1 = chainer.Link() with self.l1.init_scope(): self.l1.x = chainer.Parameter(shape=(2, 3)) self.l1.y = chainer.Parameter() self.l2 = chainer.Link() with self.l2.init_scope(): self.l2.x = chainer.Parameter(shape=2) self.l3 = chainer.Link() with self.l3.init_scope(): self.l3.x = chainer.Parameter(shape=3) self.l4 = chainer.Link() self.l5 = chainer.Link() self.l6 = chainer.Link() self.c1 = chainer.ChainList(self.l1) self.c1.add_link(self.l2) self.c2 = chainer.ChainList(self.c1) self.c2.append(self.l3) self.c3 = chainer.ChainList(self.l4)
Example #6
Source File: test_link.py From chainer with MIT License | 6 votes |
def test_copyparams(self): l1 = chainer.Link() with l1.init_scope(): l1.x = chainer.Parameter(shape=(2, 3)) l1.y = chainer.Parameter() l2 = chainer.Link() with l2.init_scope(): l2.x = chainer.Parameter(shape=2) l3 = chainer.Link() with l3.init_scope(): l3.x = chainer.Parameter(shape=3) c1 = chainer.ChainList(l1, l2) c2 = chainer.ChainList(c1, l3) l1.x.data.fill(0) l2.x.data.fill(1) l3.x.data.fill(2) self.c2.copyparams(c2) numpy.testing.assert_array_equal(self.l1.x.data, l1.x.data) numpy.testing.assert_array_equal(self.l2.x.data, l2.x.data) numpy.testing.assert_array_equal(self.l3.x.data, l3.x.data)
Example #7
Source File: test_link.py From chainer with MIT License | 6 votes |
def test_serialize(self): l1 = chainer.Link() with l1.init_scope(): l1.y = chainer.Parameter(shape=(1, 1)) l2 = chainer.Link() with l2.init_scope(): l2.x = chainer.Parameter(0, 2) c1 = chainer.ChainList(l1, l2) mocks = {'0': mock.MagicMock(), '1': mock.MagicMock()} serializer = mock.MagicMock() serializer.__getitem__.side_effect = lambda k: mocks[k] serializer.return_value = None mocks['0'].return_value = None mocks['1'].return_value = None c1.serialize(serializer) self.assertEqual(serializer.call_count, 0) self.assertEqual(serializer.__getitem__.call_count, 2) serializer.__getitem__.assert_any_call('0') serializer.__getitem__.assert_any_call('1') mocks['0'].assert_called_with('y', l1.y.data) mocks['1'].assert_called_with('x', l2.x.data)
Example #8
Source File: mlp_decoder.py From models with MIT License | 6 votes |
def __init__(self, n_in_node, edge_types, msg_hid, msg_out, n_hid, do_prob=0., skip_first=False): super(MLPDecoder, self).__init__() w = chainer.initializers.LeCunUniform(scale=(1. / np.sqrt(3))) b = self._bias_initializer with self.init_scope(): self.msg_fc1 = chainer.ChainList( *[L.Linear(2 * n_in_node, msg_hid) for _ in range(edge_types)]) self.msg_fc2 = chainer.ChainList( *[L.Linear(msg_hid, msg_out) for _ in range(edge_types)]) self.out_fc1 = L.Linear(n_in_node + msg_out, n_hid, initialW=w, initial_bias=b) self.out_fc2 = L.Linear(n_hid, n_hid, initialW=w, initial_bias=b) self.out_fc3 = L.Linear(n_hid, n_in_node, initialW=w, initial_bias=b) self.msg_out_shape = msg_out self.skip_first_edge_type = skip_first logger = logging.getLogger(__name__) logger.info('Using learned interaction net decoder.') self.dropout_prob = do_prob
Example #9
Source File: rnn_decoder.py From models with MIT License | 6 votes |
def __init__(self, n_in_node, edge_types, n_hid, do_prob=0., skip_first=False): super(RNNDecoder, self).__init__() self.msg_fc1 = chainer.ChainList( [L.Linear(2 * n_hid, n_hid) for _ in range(edge_types)]) self.msg_fc2 = chainer.ChainList( [L.Linear(n_hid, n_hid) for _ in range(edge_types)]) self.msg_out_shape = n_hid self.skip_first_edge_type = skip_first self.hidden_r = L.Linear(n_hid, n_hid, bias=False) self.hidden_i = L.Linear(n_hid, n_hid, bias=False) self.hidden_h = L.Linear(n_hid, n_hid, bias=False) self.input_r = L.Linear(n_in_node, n_hid, bias=True) self.input_i = L.Linear(n_in_node, n_hid, bias=True) self.input_n = L.Linear(n_in_node, n_hid, bias=True) self.out_fc1 = L.Linear(n_hid, n_hid) self.out_fc2 = L.Linear(n_hid, n_hid) self.out_fc3 = L.Linear(n_hid, n_in_node) print('Using learned recurrent interaction net decoder.') self.dropout_prob = do_prob
Example #10
Source File: highway.py From models with MIT License | 6 votes |
def __init__(self, input_dim, num_layers=1, activation=F.relu): super(Highway, self).__init__() self._input_dim = input_dim with self.init_scope(): self._layers = chainer.ChainList(*[L.Linear(input_dim, input_dim * 2) for _ in range(num_layers)]) self._activation = activation for layer in self._layers: # We should bias the highway layer to just carry its input forward. We do that by # setting the bias on `B(x)` to be positive, because that means `g` will be biased to # be high, to we will carry the input forward. The bias on `B(x)` is the second half # of the bias vector in each Linear layer. layer.b.data[input_dim:] = 1.
Example #11
Source File: multibox.py From chainercv with MIT License | 6 votes |
def __init__( self, n_class, aspect_ratios, initialW=None, initial_bias=None): self.n_class = n_class self.aspect_ratios = aspect_ratios super(Multibox, self).__init__() with self.init_scope(): self.loc = chainer.ChainList() self.conf = chainer.ChainList() if initialW is None: initialW = initializers.LeCunUniform() if initial_bias is None: initial_bias = initializers.Zero() init = {'initialW': initialW, 'initial_bias': initial_bias} for ar in aspect_ratios: n = (len(ar) + 1) * 2 self.loc.add_link(L.Convolution2D(n * 4, 3, pad=1, **init)) self.conf.add_link(L.Convolution2D( n * self.n_class, 3, pad=1, **init))
Example #12
Source File: test_noisy_chain.py From chainerrl with MIT License | 6 votes |
def test_chainlist(self): ch = chainer.ChainList( chainer.links.Linear(3, 4), chainer.links.Linear(5), chainer.links.PReLU(), ) self.assertEqual( names_of_links(ch), {'/0', '/1', '/2'}) to_factorized_noisy(ch) self.assertEqual( names_of_links(ch), { '/0', '/0/mu', '/0/sigma', '/1', '/1/mu', '/1/sigma', '/2'})
Example #13
Source File: kdnet_cls.py From chainer-pointnet with MIT License | 6 votes |
def __init__(self, out_dim, in_dim=3, max_level=10, dropout_ratio=-1, use_bn=True, compute_accuracy=True, cdim=3): super(KDNetCls, self).__init__() if max_level <= 10: # depth 10 ch_list = [in_dim] + [32, 64, 64, 128, 128, 256, 256, 512, 512, 128] ch_list = ch_list[:max_level + 1] elif max_level <= 15: # depth 15 ch_list = [in_dim] + [16, 16, 32, 32, 64, 64, 128, 128, 256, 256, 512, 512, 1024, 1024, 128] ch_list = ch_list[:max_level + 1] else: raise NotImplementedError('depth {} is not implemented yet' .format(max_level)) with self.init_scope(): self.kdconvs = chainer.ChainList( *[KDConv(ch_list[i], ch_list[i+1], use_bn=use_bn, cdim=cdim) for i in range(len(ch_list)-1)]) self.linear = links.Linear(ch_list[-1], out_dim) self.compute_accuracy = compute_accuracy self.max_level = max_level self.dropout_ratio = dropout_ratio
Example #14
Source File: set_abstraction_all_block.py From chainer-pointnet with MIT License | 6 votes |
def __init__(self, mlp, mlp2, in_channels=None, use_bn=True, activation=functions.relu, residual=False): # k is number of sampled point (num_region) super(SetAbstractionGroupAllModule, self).__init__() # Feature Extractor channel list assert isinstance(mlp, list) fe_ch_list = [in_channels] + mlp # Head channel list if mlp2 is None: mlp2 = [] assert isinstance(mlp2, list) head_ch_list = [mlp[-1]] + mlp2 with self.init_scope(): self.sampling_grouping = SamplingGroupingAllModule() self.feature_extractor_list = chainer.ChainList( *[ConvBlock(fe_ch_list[i], fe_ch_list[i+1], ksize=1, use_bn=use_bn, activation=activation, residual=residual ) for i in range(len(mlp))]) self.head_list = chainer.ChainList( *[ConvBlock(head_ch_list[i], head_ch_list[i + 1], ksize=1, use_bn=use_bn, activation=activation, residual=residual ) for i in range(len(mlp2))]) self.use_bn = use_bn
Example #15
Source File: weavenet.py From chainer-chemistry with MIT License | 6 votes |
def __init__(self, weave_channels=None, hidden_dim=16, n_atom=WEAVE_DEFAULT_NUM_MAX_ATOMS, n_sub_layer=1, n_atom_types=MAX_ATOMIC_NUM, readout_mode='sum'): weave_channels = weave_channels or WEAVENET_DEFAULT_WEAVE_CHANNELS weave_module = [ WeaveModule(n_atom, c, n_sub_layer, readout_mode=readout_mode) for c in weave_channels ] super(WeaveNet, self).__init__() with self.init_scope(): self.embed = EmbedAtomID(out_size=hidden_dim, in_size=n_atom_types) self.weave_module = chainer.ChainList(*weave_module) self.readout = GeneralReadout(mode=readout_mode) self.readout_mode = readout_mode
Example #16
Source File: schnet.py From chainer-chemistry with MIT License | 6 votes |
def __init__(self, out_dim=1, hidden_channels=64, n_update_layers=3, readout_hidden_dim=32, n_atom_types=MAX_ATOMIC_NUM, concat_hidden=False, num_rbf=300, radius_resolution=0.1, gamma=10.0): super(SchNet, self).__init__() with self.init_scope(): self.embed = EmbedAtomID(out_size=hidden_channels, in_size=n_atom_types) self.update_layers = chainer.ChainList( *[SchNetUpdate( hidden_channels, num_rbf=num_rbf, radius_resolution=radius_resolution, gamma=gamma) for _ in range(n_update_layers)]) self.readout_layer = SchNetReadout( out_dim, in_channels=None, hidden_channels=readout_hidden_dim) self.out_dim = out_dim self.hidden_channels = hidden_channels self.readout_hidden_dim = readout_hidden_dim self.n_update_layers = n_update_layers self.concat_hidden = concat_hidden
Example #17
Source File: nfp.py From chainer-chemistry with MIT License | 6 votes |
def __init__(self, out_dim, hidden_channels=16, n_update_layers=4, max_degree=6, n_atom_types=MAX_ATOMIC_NUM, concat_hidden=False): super(NFP, self).__init__() n_degree_types = max_degree + 1 with self.init_scope(): self.embed = EmbedAtomID(in_size=n_atom_types, out_size=hidden_channels) self.layers = chainer.ChainList( *[NFPUpdate(hidden_channels, hidden_channels, max_degree=max_degree) for _ in range(n_update_layers)]) self.readout_layers = chainer.ChainList( *[NFPReadout(out_dim=out_dim, in_channels=hidden_channels) for _ in range(n_update_layers)]) self.out_dim = out_dim self.hidden_channels = hidden_channels self.max_degree = max_degree self.n_degree_types = n_degree_types self.n_update_layers = n_update_layers self.concat_hidden = concat_hidden
Example #18
Source File: megnet.py From chainer-chemistry with MIT License | 6 votes |
def __init__(self, out_dim=32, n_update_layers=3, dropout_ratio=-1, activation=megnet_softplus): super(MEGNet, self).__init__() if n_update_layers <= 0: raise ValueError('n_update_layers must be a positive integer, ' 'but it was set to {}'.format(n_update_layers)) self.n_update_layers = n_update_layers with self.init_scope(): self.update_layers = chainer.ChainList( *[MEGNetUpdate( dim_for_dense=[64, 32], dim_for_update=[64, 64, 32], dropout_ratio=dropout_ratio, activation=activation, skip_intermediate=(i == 0) ) for i in range(n_update_layers)]) self.readout = MEGNetReadout(out_dim=out_dim, in_channels=32, n_layers=1, processing_steps=3, dropout_ratio=dropout_ratio, activation=activation)
Example #19
Source File: relgcn.py From graph-nvp with MIT License | 6 votes |
def __init__(self, out_channels=64, num_edge_type=4, ch_list=None, n_atom_types=MAX_ATOMIC_NUM, input_type='int', scale_adj=False, activation=F.tanh): super(RelGCN, self).__init__() ch_list = ch_list or [16, 128, 64] # ch_list = [in_channels] + ch_list with self.init_scope(): if input_type == 'int': self.embed = EmbedAtomID(out_size=ch_list[0], in_size=n_atom_types) elif input_type == 'float': self.embed = GraphLinear(None, ch_list[0]) else: raise ValueError("[ERROR] Unexpected value input_type={}".format(input_type)) self.rgcn_convs = chainer.ChainList(*[ RelGCNUpdate(ch_list[i], ch_list[i+1], num_edge_type) for i in range(len(ch_list)-1)]) self.rgcn_readout = RelGCNReadout(ch_list[-1], out_channels) # self.num_relations = num_edge_type self.input_type = input_type self.scale_adj = scale_adj self.activation = activation
Example #20
Source File: policies.py From baselines with MIT License | 6 votes |
def __init__(self, n_actions, n_input_channels=4, activation=F.relu, bias=0.1, var_param_init=0, # var_func=F.softplus, hiddens=None): self.n_input_channels = n_input_channels self.activation = activation self.hiddens = [512] if hiddens is None else hiddens # self.var_func = var_func super(ActorTRPONetForContinuous, self).__init__() with self.init_scope(): self.conv_layers = chainer.ChainList( L.Convolution2D(n_input_channels, 32, 8, stride=4, initial_bias=bias), L.Convolution2D(32, 64, 4, stride=2, initial_bias=bias), L.Convolution2D(64, 64, 3, stride=1, initial_bias=bias)) self.a_stream = chainerrl.links.mlp.MLP(None, n_actions, self.hiddens) self.var_param = chainer.Parameter(initializer=var_param_init, shape=(1,)) # self.var_param = chainer.Parameter( # initializer=var_param_init, shape=(n_actions,)) # independent
Example #21
Source File: nets.py From dynamic_routing_between_capsules with MIT License | 6 votes |
def __init__(self, use_reconstruction=False): super(CapsNet, self).__init__() self.n_iterations = 3 # dynamic routing self.n_grids = 6 # grid width of primary capsules layer self.n_raw_grids = self.n_grids self.use_reconstruction = use_reconstruction with self.init_scope(): self.conv1 = L.Convolution2D(1, 256, ksize=9, stride=1, initialW=init) self.conv2 = L.Convolution2D(256, 32 * 8, ksize=9, stride=2, initialW=init) self.Ws = chainer.ChainList( *[L.Convolution2D(8, 16 * 10, ksize=1, stride=1, initialW=init) for i in range(32)]) self.fc1 = L.Linear(16 * 10, 512, initialW=init) self.fc2 = L.Linear(512, 1024, initialW=init) self.fc3 = L.Linear(1024, 784, initialW=init) _count_params(self, n_grids=self.n_grids) self.results = {'N': 0., 'loss': [], 'correct': [], 'cls_loss': [], 'rcn_loss': []}
Example #22
Source File: policies.py From baselines with MIT License | 6 votes |
def __init__(self, action_space, n_input_channels=4, activation=F.relu, bias=0.1, var_param_init=0, hiddens=None): n_actions = action_space.high + 1 self.n_input_channels = n_input_channels self.activation = activation self.hiddens = [512] if hiddens is None else hiddens super(ActorTRPONetForMultiDimensionalSoftmax, self).__init__() with self.init_scope(): self.conv_layers = chainer.ChainList( L.Convolution2D(n_input_channels, 32, 8, stride=4, initial_bias=bias), L.Convolution2D(32, 64, 4, stride=2, initial_bias=bias), L.Convolution2D(64, 64, 3, stride=1, initial_bias=bias)) self.hidden_layers = chainer.ChainList( *[L.Linear(None, hidden) for hidden in self.hiddens]) self.action_layers = chainer.ChainList( *[L.Linear(None, n) for n in n_actions])
Example #23
Source File: policies.py From baselines with MIT License | 6 votes |
def __init__(self, n_input_channels=4, activation=F.relu, bias=0.1, action_wrapper=None, hiddens=None): self.n_input_channels = n_input_channels self.activation = activation self.hiddens = [512, 512] if hiddens is None else hiddens assert action_wrapper in ['discrete', 'continuous', 'multi-dimensional-softmax'] self.action_wrapper = action_wrapper super(DiscNet, self).__init__() with self.init_scope(): self.conv_layers = chainer.ChainList( L.Convolution2D(n_input_channels, 32, 8, stride=4, initial_bias=bias), L.Convolution2D(32, 64, 4, stride=2, initial_bias=bias), L.Convolution2D(64, 64, 3, stride=1, initial_bias=bias)) self.a_stream = chainerrl.links.mlp.MLP( None, 1, self.hiddens, nonlinearity=self.activation)
Example #24
Source File: lm.py From espnet with Apache License 2.0 | 6 votes |
def __init__(self, n_vocab, n_layers, n_units, typ="lstm"): super(RNNLM, self).__init__() with self.init_scope(): self.embed = DL.EmbedID(n_vocab, n_units) self.rnn = ( chainer.ChainList( *[L.StatelessLSTM(n_units, n_units) for _ in range(n_layers)] ) if typ == "lstm" else chainer.ChainList( *[L.StatelessGRU(n_units, n_units) for _ in range(n_layers)] ) ) self.lo = L.Linear(n_units, n_vocab) for param in self.params(): param.data[...] = np.random.uniform(-0.1, 0.1, param.data.shape) self.n_layers = n_layers self.n_units = n_units self.typ = typ
Example #25
Source File: weavenet.py From chainer-chemistry with MIT License | 5 votes |
def __init__(self, n_channel, n_layer, n_atom, mode='sum'): super(PairToAtom, self).__init__() with self.init_scope(): self.linearLayer = chainer.ChainList( *[links.Linear(None, n_channel) for _ in range(n_layer)] ) self.readout = GeneralReadout(mode=mode) self.n_atom = n_atom self.n_channel = n_channel self.mode = mode
Example #26
Source File: async_.py From chainerrl with MIT License | 5 votes |
def _set_persistent_values_recursively(link, persistent_name, shared_array): if persistent_name.startswith('/'): persistent_name = persistent_name[1:] if hasattr(link, persistent_name): attr_name = persistent_name attr = getattr(link, attr_name) if isinstance(attr, np.ndarray): setattr(link, persistent_name, np.frombuffer( shared_array, dtype=attr.dtype).reshape(attr.shape)) else: assert np.isscalar(attr) # We wrap scalars with np.ndarray because # multiprocessing.RawValue cannot be used as a scalar, while # np.ndarray can be. typecode = np.asarray(attr).dtype.char setattr(link, attr_name, np.frombuffer( shared_array, dtype=typecode).reshape(())) else: assert isinstance(link, (chainer.Chain, chainer.ChainList)) assert '/' in persistent_name child_name, remaining = persistent_name.split('/', 1) if isinstance(link, chainer.Chain): _set_persistent_values_recursively( getattr(link, child_name), remaining, shared_array) else: _set_persistent_values_recursively( link[int(child_name)], remaining, shared_array)
Example #27
Source File: namedpersistent.py From chainerrl with MIT License | 5 votes |
def _namedchildren(link): if isinstance(link, chainer.Chain): for name in sorted(link._children): yield name, link.__dict__[name] elif isinstance(link, chainer.ChainList): for idx, child in enumerate(link._children): yield str(idx), child
Example #28
Source File: mlp.py From chainer-chemistry with MIT License | 5 votes |
def __init__(self, out_dim, hidden_dim=16, n_layers=2, activation=relu): super(MLP, self).__init__() if n_layers <= 0: raise ValueError('n_layers must be a positive integer, but it was ' 'set to {}'.format(n_layers)) layers = [links.Linear(None, hidden_dim) for i in range(n_layers - 1)] with self.init_scope(): self.layers = chainer.ChainList(*layers) self.l_out = links.Linear(None, out_dim) self.activation = activation
Example #29
Source File: cgcnn.py From chainer-chemistry with MIT License | 5 votes |
def __init__(self, out_dim=128, n_update_layers=3, n_atom_features=64): super(CGCNN, self).__init__() with self.init_scope(): self.atom_feature_embedding = links.Linear(None, n_atom_features) self.crystal_convs = chainer.ChainList( *[CGCNNUpdate(n_atom_features) for _ in range(n_update_layers)] ) self.readout = CGCNNReadout(out_dim=out_dim)
Example #30
Source File: ggnn.py From chainer-chemistry with MIT License | 5 votes |
def __init__(self, out_dim, hidden_channels=16, n_update_layers=4, n_atom_types=MAX_ATOMIC_NUM, concat_hidden=False, weight_tying=True, activation=functions.identity, n_edge_types=4): super(GGNN, self).__init__() n_readout_layer = n_update_layers if concat_hidden else 1 n_message_layer = 1 if weight_tying else n_update_layers with self.init_scope(): # Update self.embed = EmbedAtomID( out_size=hidden_channels, in_size=n_atom_types) self.update_layers = chainer.ChainList(*[GGNNUpdate( hidden_channels=hidden_channels, n_edge_types=n_edge_types) for _ in range(n_message_layer)]) # Readout self.readout_layers = chainer.ChainList(*[GGNNReadout( out_dim=out_dim, in_channels=hidden_channels * 2, activation=activation, activation_agg=activation) for _ in range(n_readout_layer)]) self.out_dim = out_dim self.hidden_channels = hidden_channels self.n_update_layers = n_update_layers self.n_edge_types = n_edge_types self.activation = activation self.concat_hidden = concat_hidden self.weight_tying = weight_tying