Python mxnet.initializer.Uniform() Examples

The following are 30 code examples of mxnet.initializer.Uniform(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module mxnet.initializer , or try the search function .
Example #1
Source File: parall_module_local_v1.py    From insightface with MIT License 6 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        #TODO init the same weights with all work nodes
        self._curr_module.init_params(initializer=initializer, arg_params=None,
                                      aux_params=None, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        for _module in self._arcface_modules:
          #_initializer = initializer
          _initializer = mx.init.Normal(0.01)
          _module.init_params(initializer=_initializer, arg_params=None,
                                        aux_params=None, allow_missing=allow_missing,
                                        force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #2
Source File: detection_module.py    From simpledet with Apache License 2.0 6 votes vote down vote up
def fit(self, train_data, eval_data=None, eval_metric='acc',
            epoch_end_callback=None, batch_end_callback=None, kvstore='local',
            optimizer='sgd', optimizer_params=(('learning_rate', 0.01),),
            eval_end_callback=None,
            eval_batch_end_callback=None, initializer=Uniform(0.01),
            arg_params=None, aux_params=None, allow_missing=False,
            force_rebind=False, force_init=False, begin_epoch=0, num_epoch=None,
            validation_metric=None, monitor=None, sparse_row_id_fn=None, profile=False):

        assert num_epoch is not None, 'please specify number of epochs'

        self.bind(data_shapes=train_data.provide_data, 
                  label_shapes=train_data.provide_label + self.teacher_label_shapes,
                  for_training=True, force_rebind=force_rebind)
        super().fit(force_rebind=False, train_data=train_data, eval_data=eval_data, eval_metric=eval_metric,
                    epoch_end_callback=epoch_end_callback, batch_end_callback=batch_end_callback,
                    kvstore=kvstore, optimizer=optimizer, optimizer_params=optimizer_params,
                    eval_end_callback=eval_end_callback,
                    eval_batch_end_callback=eval_batch_end_callback, initializer=initializer,
                    arg_params=arg_params, aux_params=aux_params, allow_missing=allow_missing,
                    force_init=force_init, begin_epoch=begin_epoch,
                    num_epoch=num_epoch, validation_metric=validation_metric, monitor=monitor,
                    sparse_row_id_fn=sparse_row_id_fn, profile=profile) 
Example #3
Source File: oth_alpha_pose.py    From imgclsmob with MIT License 6 votes vote down vote up
def make_layer(self, block, planes, blocks, stride=1, **kwargs):
        """ Make ResNet stage """
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.HybridSequential()
            downsample.add(nn.Conv2D(planes * block.expansion, in_channels=self.inplanes,
                                     kernel_size=1, strides=stride, use_bias=False,
                                     weight_initializer=initializer.Uniform(
                                         scale=math.sqrt(1 / (self.inplanes * 1 * 1))),
                                     bias_initializer=initializer.Uniform(
                                         scale=math.sqrt(1 / (self.inplanes * 1 * 1)))))
            downsample.add(self.norm_layer(gamma_initializer=ZeroUniform(), **kwargs))

        layers = nn.HybridSequential()
        if downsample is not None:
            layers.add(block(self.inplanes, planes, stride, downsample,
                             reduction=True, norm_layer=self.norm_layer, **kwargs))
        else:
            layers.add(block(self.inplanes, planes, stride, downsample,
                             norm_layer=self.norm_layer, **kwargs))
        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            layers.add(block(self.inplanes, planes, norm_layer=self.norm_layer, **kwargs))

        return layers 
Example #4
Source File: module.py    From mx-DeepIM with Apache License 2.0 6 votes vote down vote up
def init_params(
        self,
        initializer=Uniform(0.01),
        arg_params=None,
        aux_params=None,
        allow_missing=False,
        force_init=False,
        allow_extra=False,
    ):
        if self.params_initialized and not force_init:
            return
        assert self.binded, "call bind before initializing the parameters"
        self._curr_module.init_params(
            initializer=initializer,
            arg_params=arg_params,
            aux_params=aux_params,
            allow_missing=allow_missing,
            force_init=force_init,
        )
        self.params_initialized = True 
Example #5
Source File: fast_pose.py    From gluon-cv with Apache License 2.0 6 votes vote down vote up
def __init__(self, preact, num_joints,
                 norm_layer=nn.BatchNorm, norm_kwargs=None, **kwargs):
        super(AlphaPose, self).__init__(**kwargs)
        self.preact = preact
        self.num_joints = num_joints

        self.shuffle1 = PixelShuffle(2)
        if norm_kwargs is None:
            norm_kwargs = {}
        self.duc1 = DUC(1024, inplanes=512,
                        upscale_factor=2, norm_layer=norm_layer, **norm_kwargs)
        self.duc2 = DUC(512, inplanes=256,
                        upscale_factor=2, norm_layer=norm_layer, **norm_kwargs)

        self.conv_out = nn.Conv2D(
            channels=num_joints,
            in_channels=128,
            kernel_size=3,
            strides=1,
            padding=1,
            weight_initializer=initializer.Uniform(scale=math.sqrt(1 / (128 * 3 * 3))),
            bias_initializer=initializer.Uniform(scale=math.sqrt(1 / (128 * 3 * 3)))
        ) 
Example #6
Source File: fast_pose.py    From gluon-cv with Apache License 2.0 6 votes vote down vote up
def make_layer(self, block, planes, blocks, stride=1, **kwargs):
        """ Make ResNet stage """
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.HybridSequential()
            downsample.add(nn.Conv2D(planes * block.expansion, in_channels=self.inplanes,
                                     kernel_size=1, strides=stride, use_bias=False,
                                     weight_initializer=initializer.Uniform(
                                         scale=math.sqrt(1 / (self.inplanes * 1 * 1))),
                                     bias_initializer=initializer.Uniform(
                                         scale=math.sqrt(1 / (self.inplanes * 1 * 1)))))
            downsample.add(self.norm_layer(gamma_initializer=ZeroUniform(), **kwargs))

        layers = nn.HybridSequential()
        if downsample is not None:
            layers.add(block(self.inplanes, planes, stride, downsample,
                             reduction=True, norm_layer=self.norm_layer, **kwargs))
        else:
            layers.add(block(self.inplanes, planes, stride, downsample,
                             norm_layer=self.norm_layer, **kwargs))
        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            layers.add(block(self.inplanes, planes, norm_layer=self.norm_layer, **kwargs))

        return layers 
Example #7
Source File: parall_module_local_v1.py    From 1.FaceRecognition with MIT License 6 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        #TODO init the same weights with all work nodes
        self._curr_module.init_params(initializer=initializer, arg_params=None,
                                      aux_params=None, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        for _module in self._arcface_modules:
          #_initializer = initializer
          _initializer = mx.init.Normal(0.01)
          _module.init_params(initializer=_initializer, arg_params=None,
                                        aux_params=None, allow_missing=allow_missing,
                                        force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #8
Source File: module.py    From Accel with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #9
Source File: module.py    From Faster_RCNN_for_DOTA with Apache License 2.0 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #10
Source File: module.py    From Accel with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #11
Source File: module.py    From Accel with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #12
Source File: module.py    From Decoupled-Classification-Refinement with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #13
Source File: module.py    From Decoupled-Classification-Refinement with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #14
Source File: module.py    From SNIPER-mxnet with Apache License 2.0 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #15
Source File: module_bak.py    From RetinaDetector with Apache License 2.0 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #16
Source File: module_bak.py    From mxnet-SSH with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #17
Source File: module.py    From mx-maskrcnn with Apache License 2.0 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #18
Source File: module.py    From mxnet-SSH with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #19
Source File: module.py    From Decoupled-Classification-Refinement with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #20
Source File: module.py    From Relation-Networks-for-Object-Detection with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #21
Source File: module.py    From RoITransformer_DOTA with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #22
Source File: module.py    From RoITransformer_DOTA with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #23
Source File: module.py    From Deformable-ConvNets with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #24
Source File: module.py    From Deformable-ConvNets with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #25
Source File: module.py    From Deformable-ConvNets with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #26
Source File: module.py    From Flow-Guided-Feature-Aggregation with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #27
Source File: module.py    From Deep-Feature-Flow with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #28
Source File: module.py    From Deep-Feature-Flow with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init)
        self.params_initialized = True 
Example #29
Source File: module.py    From RetinaDetector with Apache License 2.0 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True 
Example #30
Source File: module_bak.py    From insightface with MIT License 5 votes vote down vote up
def init_params(self, initializer=Uniform(0.01), arg_params=None, aux_params=None,
                    allow_missing=False, force_init=False, allow_extra=False):
        if self.params_initialized and not force_init:
            return
        assert self.binded, 'call bind before initializing the parameters'
        self._curr_module.init_params(initializer=initializer, arg_params=arg_params,
                                      aux_params=aux_params, allow_missing=allow_missing,
                                      force_init=force_init, allow_extra=allow_extra)
        self.params_initialized = True