Python sklearn.decomposition.MiniBatchDictionaryLearning() Examples
The following are 14
code examples of sklearn.decomposition.MiniBatchDictionaryLearning().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.decomposition
, or try the search function
.
Example #1
Source File: test_decomposition.py From pandas-ml with BSD 3-Clause "New" or "Revised" License | 7 votes |
def test_objectmapper(self): df = pdml.ModelFrame([]) self.assertIs(df.decomposition.PCA, decomposition.PCA) self.assertIs(df.decomposition.IncrementalPCA, decomposition.IncrementalPCA) self.assertIs(df.decomposition.KernelPCA, decomposition.KernelPCA) self.assertIs(df.decomposition.FactorAnalysis, decomposition.FactorAnalysis) self.assertIs(df.decomposition.FastICA, decomposition.FastICA) self.assertIs(df.decomposition.TruncatedSVD, decomposition.TruncatedSVD) self.assertIs(df.decomposition.NMF, decomposition.NMF) self.assertIs(df.decomposition.SparsePCA, decomposition.SparsePCA) self.assertIs(df.decomposition.MiniBatchSparsePCA, decomposition.MiniBatchSparsePCA) self.assertIs(df.decomposition.SparseCoder, decomposition.SparseCoder) self.assertIs(df.decomposition.DictionaryLearning, decomposition.DictionaryLearning) self.assertIs(df.decomposition.MiniBatchDictionaryLearning, decomposition.MiniBatchDictionaryLearning) self.assertIs(df.decomposition.LatentDirichletAllocation, decomposition.LatentDirichletAllocation)
Example #2
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def test_dict_learning_online_verbosity(): n_components = 5 # test verbosity from io import StringIO import sys old_stdout = sys.stdout try: sys.stdout = StringIO() dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=1, random_state=0) dico.fit(X) dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=2, random_state=0) dico.fit(X) dict_learning_online(X, n_components=n_components, alpha=1, verbose=1, random_state=0) dict_learning_online(X, n_components=n_components, alpha=1, verbose=2, random_state=0) finally: sys.stdout = old_stdout assert dico.components_.shape == (n_components, n_features)
Example #3
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def test_dict_learning_online_partial_fit(): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init V /= np.sum(V ** 2, axis=1)[:, np.newaxis] dict1 = MiniBatchDictionaryLearning(n_components, n_iter=10 * len(X), batch_size=1, alpha=1, shuffle=False, dict_init=V, random_state=0).fit(X) dict2 = MiniBatchDictionaryLearning(n_components, alpha=1, n_iter=1, dict_init=V, random_state=0) for i in range(10): for sample in X: dict2.partial_fit(sample[np.newaxis, :]) assert not np.all(sparse_encode(X, dict1.components_, alpha=1) == 0) assert_array_almost_equal(dict1.components_, dict2.components_, decimal=2)
Example #4
Source File: LearnDictionary.py From Vessel3DDL with MIT License | 6 votes |
def learn_dictionary_mini(patches, n_c=512, a=1, n_i=800, n_j=3, b_s=3, es=5, fit_algorithm='lars'): """ patches - patches to learn on (should be normalized before) n_c - number of components (atoms) e.g. 512 a - alpha sparsity controlling parameter n_i - total number of iterations to perform b_s - batch size: number of samples in each mini-batch fit_algorithm - {‘lars’, ‘cd’} n_j - number of parallel jobs to run (number of threads) e_s - size of each element in the dictionary """ dic = MiniBatchDictionaryLearning(n_components=n_c, alpha=a, n_iter=n_i, n_jobs=n_j, batch_size=b_s, fit_algorithm=fit_algorithm) print ("Start learning dictionary_mini: n_c: "+str(n_c)+", alpha: "+str(a)+", n_i: " + str(n_i)+", n_j: "+str(n_j)+", es: "+str(es)+", b_s: "+str(b_s)) v1 = dic.fit(patches).components_ d1 = v1.reshape(n_c, es, es, es) # e.g. 512x5x5x5 return d1
Example #5
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 6 votes |
def test_dict_learning_online_verbosity(): n_components = 5 # test verbosity from sklearn.externals.six.moves import cStringIO as StringIO import sys old_stdout = sys.stdout try: sys.stdout = StringIO() dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=1, random_state=0) dico.fit(X) dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=2, random_state=0) dico.fit(X) dict_learning_online(X, n_components=n_components, alpha=1, verbose=1, random_state=0) dict_learning_online(X, n_components=n_components, alpha=1, verbose=2, random_state=0) finally: sys.stdout = old_stdout assert_true(dico.components_.shape == (n_components, n_features))
Example #6
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 6 votes |
def test_dict_learning_online_partial_fit(): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init V /= np.sum(V ** 2, axis=1)[:, np.newaxis] dict1 = MiniBatchDictionaryLearning(n_components, n_iter=10 * len(X), batch_size=1, alpha=1, shuffle=False, dict_init=V, random_state=0).fit(X) dict2 = MiniBatchDictionaryLearning(n_components, alpha=1, n_iter=1, dict_init=V, random_state=0) for i in range(10): for sample in X: dict2.partial_fit(sample[np.newaxis, :]) assert_true(not np.all(sparse_encode(X, dict1.components_, alpha=1) == 0)) assert_array_almost_equal(dict1.components_, dict2.components_, decimal=2)
Example #7
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_online_positivity(transform_algorithm, positive_code, positive_dict): rng = np.random.RandomState(0) n_components = 8 dico = MiniBatchDictionaryLearning( n_components, transform_algorithm=transform_algorithm, random_state=0, positive_code=positive_code, positive_dict=positive_dict).fit(X) code = dico.transform(X) if positive_dict: assert (dico.components_ >= 0).all() else: assert (dico.components_ < 0).any() if positive_code: assert (code >= 0).all() else: assert (code < 0).any() code, dictionary = dict_learning_online(X, n_components=n_components, alpha=1, random_state=rng, positive_dict=positive_dict, positive_code=positive_code) if positive_dict: assert (dictionary >= 0).all() else: assert (dictionary < 0).any() if positive_code: assert (code >= 0).all() else: assert (code < 0).any()
Example #8
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_online_estimator_shapes(): n_components = 5 dico = MiniBatchDictionaryLearning(n_components, n_iter=20, random_state=0) dico.fit(X) assert dico.components_.shape == (n_components, n_features)
Example #9
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_online_overcomplete(): n_components = 12 dico = MiniBatchDictionaryLearning(n_components, n_iter=20, random_state=0).fit(X) assert dico.components_.shape == (n_components, n_features)
Example #10
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_online_initialization(): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) dico = MiniBatchDictionaryLearning(n_components, n_iter=0, dict_init=V, random_state=0).fit(X) assert_array_equal(dico.components_, V)
Example #11
Source File: preprocessing.py From Deep-SVDD with MIT License | 5 votes |
def learn_dictionary(X, n_filters, filter_size, n_sample=1000, n_sample_patches=0, **kwargs): """ learn a dictionary of n_filters atoms from n_sample images from X """ n_channels = X.shape[1] # subsample n_sample images randomly rand_idx = np.random.choice(len(X), n_sample, replace=False) # extract patches patch_size = (filter_size, filter_size) patches = PatchExtractor(patch_size).transform( X[rand_idx, ...].reshape(n_sample, X.shape[2], X.shape[3], X.shape[1])) patches = patches.reshape(patches.shape[0], -1) patches -= np.mean(patches, axis=0) patches /= np.std(patches, axis=0) if n_sample_patches > 0 and (n_sample_patches < len(patches)): np.random.shuffle(patches) patches = patches[:n_sample_patches, ...] # learn dictionary print('Learning dictionary for weight initialization...') dico = MiniBatchDictionaryLearning(n_components=n_filters, alpha=1, n_iter=1000, batch_size=10, shuffle=True, verbose=True, **kwargs) W = dico.fit(patches).components_ W = W.reshape(n_filters, n_channels, filter_size, filter_size) print('Dictionary learned.') return W.astype(np.float32)
Example #12
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_online_estimator_shapes(): n_components = 5 dico = MiniBatchDictionaryLearning(n_components, n_iter=20, random_state=0) dico.fit(X) assert_true(dico.components_.shape == (n_components, n_features))
Example #13
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_online_overcomplete(): n_components = 12 dico = MiniBatchDictionaryLearning(n_components, n_iter=20, random_state=0).fit(X) assert_true(dico.components_.shape == (n_components, n_features))
Example #14
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_online_initialization(): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) dico = MiniBatchDictionaryLearning(n_components, n_iter=0, dict_init=V, random_state=0).fit(X) assert_array_equal(dico.components_, V)