Python theano.tensor.shared_randomstreams.RandomStreams() Examples

The following are 30 code examples of theano.tensor.shared_randomstreams.RandomStreams(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module theano.tensor.shared_randomstreams , or try the search function .
Example #1
Source File: env.py    From deepy with MIT License 6 votes vote down vote up
def __init__(self, seed=DEFAULT_SEED):
        """
        Initialize seed and global random variables.
        """
        if seed != self.DEFAULT_SEED:
            self._seed = seed
        elif 'DEEPY_SEED' in os.environ:
            self._seed = int(os.environ['DEEPY_SEED'])
        else:
            self._seed = self.DEFAULT_SEED
        if self._seed != self.DEFAULT_SEED:
            logging.info("set global random seed to %d" % self._seed)

        self._numpy_rand = np.random.RandomState(seed=self._seed)
        self._theano_rand = RandomStreams(seed=self._seed)
        self._shared_rand = SharedRandomStreams(seed=self._seed)
        self._default_initializer = None 
Example #2
Source File: mask_generator.py    From Neural-Photo-Editor with MIT License 6 votes vote down vote up
def reset(self):
        # Set Original ordering
        self.ordering.set_value(np.arange(self._input_size, dtype=theano.config.floatX))

        # Reset RandomStreams
        self._rng.seed(self._random_seed)

        # Initial layer connectivity
        self.layers_connectivity[0].set_value((self.ordering + 1).eval())
        for i in range(1, len(self.layers_connectivity)-1):
            self.layers_connectivity[i].set_value(np.zeros((self._hidden_sizes[i-1]), dtype=theano.config.floatX))
        self.layers_connectivity[-1].set_value(self.ordering.get_value())

        # Reset MRG_RandomStreams (GPU)
        self._mrng.rstate = self._initial_mrng_rstate
        for state, value in zip(self._mrng.state_updates, self._initial_mrng_state_updates):
            state[0].set_value(value)

        self.sample_connectivity() 
Example #3
Source File: layers.py    From eqnet with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, embeddings, memory_size: int, embeddings_size: int, hyperparameters: dict, rng: RandomStreams,
                 name="SequenceAveragingGRU", use_centroid=False):
        """
        :param embeddings: the embedding matrix
        """
        self.__name = name
        self.__embeddings = embeddings
        self.__memory_size = memory_size
        self.__embeddings_size = embeddings_size
        self.__hyperparameters = hyperparameters
        self.__rng = rng

        if use_centroid:
            self.__gru = GruCentroidsCell(memory_size, embeddings_size, hyperparameters['num_centroids'],
                                          hyperparameters['centroid_use_rate'], self.__rng, self.__name + ":GRUCell",
                                          hyperparameters['log_init_noise'])
        else:
            self.__gru = GruCell(memory_size, embeddings_size, self.__name + ":GRUCell",
                                 hyperparameters['log_init_noise'])

        self.__params = {self.__name + ":" + n: v for n, v in self.__gru.get_params().items()} 
Example #4
Source File: MIDS.py    From MIDS with MIT License 6 votes vote down vote up
def main(load_id):
        consts = Consts()
        consts.load_from_ids = load_id
        rng = numpy.random.RandomState()
        theano_rng = RandomStreams(rng.randint(2 ** 30))
        user_lines = UserLines(rng = rng,theano_rng = theano_rng,consts = consts)
        rating_info = numpy.zeros(1,dtype=theano.config.floatX)
        wday = 4 # friday
        rating_info[0] = get_aranged(value = wday, min_value = 0, max_value = 6)
        #user_id = user_lines.rng.randint(low=0,high=user_lines.matrix_ids.users_count)
        #user_ids = user_lines.__find_nearest(user_id,5)
        user_indices = [user_lines.rng.randint(low=0,high=len(user_lines.users_cvs)-1) for it in numpy.arange(5)]
        user_ids = [user_lines.users_cvs.at[indice,"id"] for indice in user_indices]
        #user_lines.build_line_for_rand_user(rating_info = rating_info, user_ids = user_ids, consts = consts)
        user_lines.build_rate_for_rand_user(rating_info = rating_info, user_ids = user_ids, consts = consts)
        sys.stdout.write("all done\n")
        return 
Example #5
Source File: model.py    From eqnet with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, memory_size: int, num_node_types: int, max_num_children: int, hyperparameters: dict,
                 rng: RandomStreams, name: str = "single_layer_combination"):
        self.__memory_size = memory_size
        self.__rng = rng
        self.__name = name
        self.__hyperparameters = hyperparameters

        w = np.random.randn(num_node_types, memory_size, max_num_children * memory_size) * \
            10 ** self.__hyperparameters["log_init_scale_embedding"]
        self.__w = theano.shared(w.astype(theano.config.floatX), name=name + ":w")

        bias = np.random.randn(num_node_types, memory_size) * 10 ** self.__hyperparameters["log_init_scale_embedding"]
        self.__bias = theano.shared(bias.astype(theano.config.floatX), name=name + ":b")

        self.__w_with_dropout = \
            dropout(self.__hyperparameters['dropout_rate'], self.__rng, self.__w, True) 
Example #6
Source File: model.py    From eqnet with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, memory_size: int, num_node_types: int, max_num_children: int, hyperparameters: dict,
                 hidden_layer_size: int, rng: RandomStreams, name: str = "single_layer_combination"):
        self.__memory_size = memory_size
        self.__rng = rng
        self.__name = name
        self.__hyperparameters = hyperparameters

        w_l1 = np.random.randn(num_node_types, hidden_layer_size, max_num_children * memory_size) * \
               10 ** self.__hyperparameters["log_init_scale_embedding"]
        self.__w_l1 = theano.shared(w_l1.astype(theano.config.floatX), name=name + ":w_l1")

        bias_l1 = np.random.randn(num_node_types, hidden_layer_size) * 10 ** self.__hyperparameters[
            "log_init_scale_embedding"]
        self.__bias_l1 = theano.shared(bias_l1.astype(theano.config.floatX), name=name + ":b_l1")

        w_l2 = np.random.randn(num_node_types, memory_size, hidden_layer_size) * \
               10 ** self.__hyperparameters["log_init_scale_embedding"]
        self.__w_l2 = theano.shared(w_l2.astype(theano.config.floatX), name=name + ":w_l2")

        bias_l2 = np.random.randn(num_node_types, memory_size) * 10 ** self.__hyperparameters[
            "log_init_scale_embedding"]
        self.__bias_l2 = theano.shared(bias_l2.astype(theano.config.floatX), name=name + ":b_l2")

        self.__w_l1_with_dropout, self.__w_l2_with_dropout = \
            dropout_multiple(self.__hyperparameters['dropout_rate'], self.__rng, True, self.__w_l1, self.__w_l2) 
Example #7
Source File: layers.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_cell_with_dropout(self, rng: RandomStreams, dropout_rate: float):
        raise NotImplementedError() 
Example #8
Source File: first_glimpse_model.py    From deepy with MIT License 5 votes vote down vote up
def _setup_params(self):
        self.srng = RandomStreams(seed=234)
        self.large_cov = np.array([[0.06,0],[0,0.06]], dtype=FLOATX)
        self.small_cov = np.array([[self.gaussian_std,0],[0,self.gaussian_std]], dtype=FLOATX)
        self.cov = theano.shared(np.array(self.small_cov, dtype=FLOATX))
        self.cov_inv_var = theano.shared(np.array(LA.inv(self.small_cov), dtype=FLOATX))
        self.cov_det_var = theano.shared(np.array(LA.det(self.small_cov), dtype=FLOATX))
        self._sample_gaussian = SampleMultivariateGaussian()

        self.W_g0 = self.create_weight(7 * 7, 128, label="g0")
        self.W_g1 = self.create_weight(2, 128, label="g1")
        self.W_g2_hg = self.create_weight(128, 256, label="g2_hg")
        self.W_g2_hl = self.create_weight(128, 256, label="g2_hl")

        self.W_h_g = self.create_weight(256, 256, label="h_g")
        self.W_h = self.create_weight(256, 256, label="h")
        self.B_h = self.create_bias(256, label="h")
        self.h0 = self.create_vector(256, "h0")
        self.l0 = self.create_vector(2, "l0")
        self.l0.set_value(np.array([-1, -1], dtype=FLOATX))

        self.W_l = self.create_weight(256, 2, label="l")
        self.W_l.set_value(self.W_l.get_value() / 10)
        self.B_l = self.create_bias(2, label="l")
        self.W_a = self.create_weight(256, 10, label="a")
        self.B_a = self.create_bias(10, label="a")

        self.W_f = self.create_weight(7 * 7, 2, label="f")


        self.W = [self.W_g0, self.W_g1, self.W_g2_hg, self.W_g2_hl, self.W_h_g, self.W_h, self.W_a]
        self.B = [self.B_h, self.B_a]
        self.parameters = [self.W_l, self.W_f] 
Example #9
Source File: mask_generator.py    From Neural-Photo-Editor with MIT License 5 votes vote down vote up
def __init__(self, input_size, hidden_sizes, l, random_seed=1234):
        self._random_seed = random_seed
        self._mrng = MRG_RandomStreams(seed=random_seed)
        self._rng = RandomStreams(seed=random_seed)

        self._hidden_sizes = hidden_sizes
        self._input_size = input_size
        self._l = l

        self.ordering = theano.shared(value=np.arange(input_size, dtype=theano.config.floatX), name='ordering', borrow=False)

        # Initial layer connectivity
        self.layers_connectivity = [theano.shared(value=(self.ordering + 1).eval(), name='layer_connectivity_input', borrow=False)]
        for i in range(len(self._hidden_sizes)):
            self.layers_connectivity += [theano.shared(value=np.zeros((self._hidden_sizes[i]), dtype=theano.config.floatX), name='layer_connectivity_hidden{0}'.format(i), borrow=False)]
        self.layers_connectivity += [self.ordering]

        ## Theano functions
        new_ordering = self._rng.shuffle_row_elements(self.ordering)
        self.shuffle_ordering = theano.function(name='shuffle_ordering',
                                                inputs=[],
                                                updates=[(self.ordering, new_ordering), (self.layers_connectivity[0], new_ordering + 1)])

        self.layers_connectivity_updates = []
        for i in range(len(self._hidden_sizes)):
            self.layers_connectivity_updates += [self._get_hidden_layer_connectivity(i)]
        # self.layers_connectivity_updates = [self._get_hidden_layer_connectivity(i) for i in range(len(self._hidden_sizes))]  # WTF THIS DO NOT WORK
        self.sample_connectivity = theano.function(name='sample_connectivity',
                                                   inputs=[],
                                                   updates=[(self.layers_connectivity[i+1], self.layers_connectivity_updates[i]) for i in range(len(self._hidden_sizes))])

        # Save random initial state
        self._initial_mrng_rstate = copy.deepcopy(self._mrng.rstate)
        self._initial_mrng_state_updates = [state_update[0].get_value() for state_update in self._mrng.state_updates]

        # Ensuring valid initial connectivity
        self.sample_connectivity() 
Example #10
Source File: common_theano.py    From neural_topic_models with Apache License 2.0 5 votes vote down vote up
def get_rngs(seed):
    assert seed > 0
    # initialize random and np.random with seed
    np.random.seed(seed)
    np_rng = np.random.RandomState(np.random.randint(2 ** 30))
    theano_rng = RandomStreams(np.random.randint(2 ** 30))
    return np_rng, theano_rng 
Example #11
Source File: models.py    From SERT with MIT License 5 votes vote down vote up
def _negative_sampling(self, num_negative_samples, target_indices):
        assert num_negative_samples > 0

        logging.debug('Stochastically sampling %d negative instances '
                      'out of %d classes (%.2f%%).',
                      num_negative_samples, self.num_entities,
                      100.0 *
                      float(num_negative_samples) / self.num_entities)

        from theano.tensor.shared_randomstreams import RandomStreams

        srng = RandomStreams(
            seed=np.random.randint(low=0, high=(1 << 30)))

        rng_sample_size = (self.batch_size, num_negative_samples,)

        logging.debug(
            'Using %s for random sample generation of %s tensors.',
            RandomStreams, rng_sample_size)

        logging.debug('For every batch %d random integers are sampled.',
                      np.prod(rng_sample_size))

        random_negative_indices = srng.choice(
            rng_sample_size,
            a=self.num_entities,
            p=self.clazz_distribution)

        if self.__DEBUG__:
            random_negative_indices = theano.printing.Print(
                'random_negative_indices')(random_negative_indices)

        return random_negative_indices 
Example #12
Source File: supervisedencoder.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, training_filename: str, hyperparameters: dict, combination_type='eqnet'):
        self.__hyperparameters = hyperparameters

        self.__dataset_extractor = TreeDatasetExtractor(training_filename)
        self.__rng = RandomStreams()

        self.__rnn = RNN(self.__hyperparameters['memory_size'], self.__hyperparameters, self.__rng,
                         self.__dataset_extractor, combination_type=combination_type)
        check_hyperparameters(self.REQUIRED_HYPERPARAMETERS | self.__rnn.required_hyperparameters,
                              self.__hyperparameters)

        target_embeddings = np.random.randn(self.__hyperparameters['memory_size'],
                                            self.__dataset_extractor.num_equivalent_classes) * 10 ** \
                                                                                               self.__hyperparameters[
                                                                                                   "log_init_scale_embedding"]
        self.__target_embeddings = theano.shared(target_embeddings.astype(theano.config.floatX),
                                                 name="target_embeddings")
        self.__target_embeddings_dropout = dropout(self.__hyperparameters['dropout_rate'], self.__rng,
                                                   self.__target_embeddings, True)

        self.__target_bias = np.log(self.__dataset_extractor.training_empirical_distribution)

        self.__trainable_params = list(self.__rnn.get_params().values()) + [self.__target_embeddings]

        self.__compiled_methods = None
        self.__trained_parameters = None 
Example #13
Source File: siameseencoder.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, training_filename: str, hyperparameters: dict, combination_type='residual_with_ae'):
        self.__hyperparameters = hyperparameters

        self.__dataset_extractor = TreeDatasetExtractor(training_filename)
        self.__rng = RandomStreams()

        self.__rnn = RNN(self.__hyperparameters['memory_size'], self.__hyperparameters, self.__rng,
                         self.__dataset_extractor, combination_type=combination_type)
        self.__trainable_params = list(self.__rnn.get_params().values())
        check_hyperparameters(self.REQUIRED_HYPERPARAMETERS | self.__rnn.required_hyperparameters,
                              self.__hyperparameters)

        self.__compiled_methods = None
        self.__trained_parameters = None 
Example #14
Source File: grussiameseencoder.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, embedding_size: int, vocabulary_size: int, empirical_distribution, representation_size: int,
                 hyperparameters: dict, encoder_type: str, name: str = "GRUSequenceSiameseEncoder", use_centroid=False):
        self.__hyperparameters = hyperparameters
        self.__name = name
        log_init_noise = self.__hyperparameters["log_init_noise"]

        self.__memory_size = representation_size
        self.__embedding_size = embedding_size
        self.__vocabulary_size = vocabulary_size
        self.__empirical_distribution = empirical_distribution
        self.__encoder_type = encoder_type

        embeddings = np.random.randn(vocabulary_size, embedding_size) * 10 ** log_init_noise
        self.__embeddings = theano.shared(embeddings.astype(theano.config.floatX), name=name + ":embeddings")
        self.__name_bias = theano.shared(np.log(empirical_distribution).astype(theano.config.floatX),
                                         name=name + ":name_bias")

        encoder_init_state = np.random.randn(representation_size) * 10 ** log_init_noise
        self.__encoder_init_state = theano.shared(encoder_init_state.astype(theano.config.floatX),
                                                  name=name + ":encoder_init_state")

        self.__rng = RandomStreams()

        self.__input_sequence = T.ivector(name + ":input_sequence")

        if encoder_type == 'gru':
            self.__encoder = GRU(self.__embeddings, representation_size, embedding_size,
                                 self.__hyperparameters, self.__rng, name=name + ":GRUSequenceEncoder",
                                 use_centroid=use_centroid)
        elif encoder_type == 'averaging_gru':
            self.__encoder = AveragingGRU(self.__embeddings, representation_size, embedding_size,
                                          self.__hyperparameters, self.__rng,
                                          name=name + ":AveragingGRUSequenceEncoder", use_centroid=use_centroid)
        else:
            raise Exception("Unrecognized encoder type `%s`, possible options `gru` and `averaging_gru`")

        self.__params = {"embeddings": self.__embeddings,
                         "encoder_init_state": self.__encoder_init_state}
        self.__params.update(self.__encoder.get_params()) 
Example #15
Source File: test_builders.py    From D-VAE with MIT License 5 votes vote down vote up
def test_connection_pattern(self):
        # Basic case
        x, y, z = T.matrices('xyz')
        out1 = x * y
        out2 = y * z

        op1 = OpFromGraph([x, y, z], [out1, out2])
        results = op1.connection_pattern(None)
        expect_result = [[True, False],
                         [True, True],
                         [False, True]]
        assert results == expect_result

        # Graph with ops that don't have a 'full' connection pattern
        # and with ops that have multiple outputs
        m, n, p, q = T.matrices('mnpq')
        o1, o2 = op1(m, n, p)
        out1, out2 = op1(o1, q, o2)
        op2 = OpFromGraph([m, n, p, q], [out1, out2])

        results = op2.connection_pattern(None)
        expect_result = [[True, False],
                         [True, True],
                         [False, True],
                         [True, True]]
        assert results == expect_result

        # Inner graph where some computation doesn't rely on explicit inputs
        srng = RandomStreams(seed=234)
        rv_u = srng.uniform((2, 2))
        x, y = T.matrices('xy')
        out1 = x + rv_u
        out2 = y + 3
        out3 = 3 + rv_u
        op3 = OpFromGraph([x, y], [out1, out2, out3])

        results = op3.connection_pattern(None)
        expect_result = [[True, False, False],
                         [False, True, False],
                         [True, False, True]]
        assert results == expect_result 
Example #16
Source File: layers.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_cell_with_dropout(self, rng: RandomStreams, dropout_rate: float):
        with_dropout = SimpleRecurrentCell.__new__(self.__class__)

        with_dropout.__prev_hidden_to_next, with_dropout.__prediction_to_hidden = dropout_multiple(
            dropout_rate, rng, True, self.__prev_hidden_to_next, self.__prediction_to_hidden)
        with_dropout.__bias = self.__bias
        with_dropout.get_cell_with_dropout = None
        with_dropout.__name = self.__name + ":with_dropout"
        return with_dropout 
Example #17
Source File: layers.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_cell_with_dropout(self, rng: RandomStreams, dropout_rate: float):
        with_dropout = LinearRecurrentCell.__new__(LinearRecurrentCell)

        with_dropout.__prev_hidden_to_next, with_dropout.__prediction_to_hidden = dropout_multiple(
            dropout_rate, rng, True, self.__prev_hidden_to_next, self.__prediction_to_hidden)
        with_dropout.__bias = self.__bias
        with_dropout.get_cell_with_dropout = None
        with_dropout.__name = self.__name + ":with_dropout"
        return with_dropout 
Example #18
Source File: layers.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_cell_with_dropout(self, rng: RandomStreams, dropout_rate: float):
        with_dropout = GruCell.__new__(GruCell)

        with_dropout.__w_hid, with_dropout.__w_in = dropout_multiple(
            dropout_rate, rng, True, self.__w_hid, self.__w_in)
        with_dropout.__biases = self.__biases
        with_dropout.get_cell_with_dropout = None
        with_dropout.__name = self.__name + ":with_dropout"
        with_dropout.__memory_D = self.__memory_D
        with_dropout.__grad_clip = self.__grad_clip
        return with_dropout 
Example #19
Source File: optimization.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def dropout(dropout_rate: float, rng: RandomStreams, parameter, use_dropout: bool):
    if use_dropout:
        mask = rng.binomial(parameter.shape, p=1. - dropout_rate, dtype=parameter.dtype)
        return parameter * mask / (1. - dropout_rate)
    else:
        return parameter 
Example #20
Source File: optimization.py    From eqnet with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def dropout_multiple(dropout_rate: float, rng: RandomStreams, use_dropout: bool, *parameters):
    return tuple([dropout(dropout_rate, rng, p, use_dropout) for p in parameters]) 
Example #21
Source File: tools.py    From cortex_old with GNU General Public License v3.0 5 votes vote down vote up
def get_srng():
    '''Shared Randomstream.

    '''
    srng = SRandomStreams(random.randint(0, 1000000))
    return srng 
Example #22
Source File: layers.py    From seizure-detection with MIT License 5 votes vote down vote up
def dropout(rng, x, p=0.5):
    """ Zero-out random values in x with probability p using rng """
    if p > 0. and p < 1.:
        seed = rng.randint(2 ** 30)
        srng = theano.tensor.shared_randomstreams.RandomStreams(seed)
        mask = srng.binomial(n=1, p=1.-p, size=x.shape,
                dtype=theano.config.floatX)
        return x * mask
    return x 
Example #23
Source File: layers.py    From seizure-detection with MIT License 5 votes vote down vote up
def fast_dropout(rng, x):
    """ Multiply activations by N(1,1) """
    seed = rng.randint(2 ** 30)
    srng = RandomStreams(seed)
    mask = srng.normal(size=x.shape, avg=1., dtype=theano.config.floatX)
    return x * mask 
Example #24
Source File: autoencoder.py    From TextDetector with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, L1, ratio):
        self.random_stream = RandomStreams(seed=1)
        self.L1 = L1
        self.ratio = ratio 
Example #25
Source File: rbm_pretraining.py    From Projects with MIT License 5 votes vote down vote up
def __init__(self,n_visible,n_hidden,batch_size):
        self.n_visible = n_visible
        self.n_hidden = n_hidden

        np_rng = np.random.RandomState(1234)
        theano_rng = RandomStreams(np_rng.randint(2 ** 30))

        initial_W = np.asarray(np_rng.uniform(low=-4*np.sqrt(6./(n_hidden + n_visible)),high=4*np.sqrt(6./(n_hidden+n_visible)),
            size=(n_visible, n_hidden)),dtype=theano.config.floatX)
        W = theano.shared(value=initial_W, name='W', borrow=True)

        hbias = theano.shared(value=np.zeros(n_hidden,dtype=theano.config.floatX),name='hbias',borrow=True)
        vbias = theano.shared(value=np.zeros(n_visible,dtype=theano.config.floatX),name='vbias',borrow=True)

        self.input = T.matrix('input')
        self.W = W
        self.hbias = hbias
        self.vbias = vbias
        self.theano_rng = theano_rng
        self.params = [self.W, self.hbias, self.vbias]
        
        self.persistent_chain = theano.shared(np.zeros((batch_size,n_hidden),dtype=theano.config.floatX),borrow=True)
        self.cost, self.updates = self.get_cost_updates(lr=0.1,persistent=self.persistent_chain, k=15)
        self.train = theano.function([self.input],self.cost,updates=self.updates,name='train_rbm')
        
        _i,_j,self._output_hidden = self.sample_h_given_v(self.input)
        self.output_hidden = theano.function([self.input],self._output_hidden) 
Example #26
Source File: network3.py    From WannaPark with GNU General Public License v3.0 5 votes vote down vote up
def dropout_layer(layer, p_dropout):
    srng = shared_randomstreams.RandomStreams(
        np.random.RandomState(0).randint(999999))
    mask = srng.binomial(n=1, p=1-p_dropout, size=layer.shape)
    return layer*T.cast(mask, theano.config.floatX) 
Example #27
Source File: test_builders.py    From attention-lvcsr with MIT License 5 votes vote down vote up
def test_connection_pattern(self):
        # Basic case 
        x, y, z = T.matrices('xyz')
        out1 = x * y
        out2 = y * z

        op1 = OpFromGraph([x ,y, z], [out1, out2])
        results = op1.connection_pattern(None)
        expect_result = [[True, False],
                         [True, True],
                         [False, True]]
        assert results == expect_result

        # Graph with ops that don't have a 'full' connection pattern
        # and with ops that have multiple outputs 
        m, n, p, q = T.matrices('mnpq')
        o1, o2 = op1(m, n, p)
        out1, out2 = op1(o1, q, o2)
        op2 = OpFromGraph([m, n, p, q], [out1, out2])

        results = op2.connection_pattern(None)
        expect_result = [[True, False],
                         [True, True],
                         [False, True],
                         [True, True]]
        assert results == expect_result

        # Inner graph where some computation doesn't rely on explicit inputs
        srng = RandomStreams(seed=234)
        rv_u = srng.uniform((2,2))
        x, y = T.matrices('xy')
        out1 = x + rv_u
        out2 = y + 3
        out3 = 3 + rv_u
        op3 = OpFromGraph([x, y], [out1, out2, out3])

        results = op3.connection_pattern(None)
        expect_result = [[True, False, False],
                         [False, True, False],
                         [True, False, True]]
        assert results == expect_result 
Example #28
Source File: sda.py    From DeepLearningBook with MIT License 5 votes vote down vote up
def __init__(self, input, n_input, n_hidden, W=None, bhid=None, bout=None):
		self.input = input
		self.n_input = n_input
		self.n_output = n_input
		self.n_hidden = n_hidden 

		if W is None:
			initial_W = numpy.random.uniform(
					low=-4*numpy.sqrt(6. / (n_hidden + n_input)),
                    high=4*numpy.sqrt(6. / (n_hidden + n_input)),
                    size=(n_input, n_hidden)).astype(theano.config.floatX)
			W = theano.shared(value = initial_W, name = 'W')
		self.W = W
		
		if bhid is None:
			initial_bhid = numpy.zeros(shape=(n_hidden, )).astype(theano.config.floatX)
			bhid = theano.shared(value = initial_bhid, name = 'bhid')
		self.bhid = bhid
		
		if bout is None:
			initial_bout = numpy.zeros(shape=(n_input, )).astype(theano.config.floatX)
			bout = theano.shared(value = initial_bout, name = 'bout')
		self.bout = bout
		
		# 自编码器的输入层和输出层是相同的
		self.W_pi = self.W.T
		self.params = [self.W, self.bhid, self.bout]
		self.hidden = self.get_hidden_value(self.input)
		self.output = self.get_reconstructed_value(self.hidden)
		
		self.theano_rng = RandomStreams(12345) 
Example #29
Source File: sda.py    From DeepLearningBook with MIT License 5 votes vote down vote up
def __init__(self, input, n_input, n_hidden, W=None, bhid=None, bout=None):
		self.input = input
		self.n_input = n_input
		self.n_output = n_input
		self.n_hidden = n_hidden 

		if W is None:
			initial_W = numpy.random.uniform(
					low=-4*numpy.sqrt(6. / (n_hidden + n_input)),
                    high=4*numpy.sqrt(6. / (n_hidden + n_input)),
                    size=(n_input, n_hidden)).astype(theano.config.floatX)
			W = theano.shared(value = initial_W, name = 'W')
		self.W = W
		
		if bhid is None:
			initial_bhid = numpy.zeros(shape=(n_hidden, )).astype(theano.config.floatX)
			bhid = theano.shared(value = initial_bhid, name = 'bhid')
		self.bhid = bhid
		
		if bout is None:
			initial_bout = numpy.zeros(shape=(n_input, )).astype(theano.config.floatX)
			bout = theano.shared(value = initial_bout, name = 'bout')
		self.bout = bout
		
		# 自编码器的输入层和输出层是相同的
		self.W_pi = self.W.T
		self.params = [self.W, self.bhid, self.bout]
		self.hidden = self.get_hidden_value(self.input)
		self.output = self.get_reconstructed_value(self.hidden)
		
		self.theano_rng = RandomStreams(12345) 
Example #30
Source File: dnn.py    From DL4H with MIT License 5 votes vote down vote up
def dropout(rng, x, p=0.5):
    """ Zero-out random values in x with probability p using rng """
    if p > 0. and p < 1.:
        seed = rng.randint(2 ** 30)
        srng = theano.tensor.shared_randomstreams.RandomStreams(seed)
        mask = srng.binomial(n=1, p=1.-p, size=x.shape,
                dtype=theano.config.floatX)
        return x * mask
    return x