Python sklearn.decomposition.DictionaryLearning() Examples
The following are 18
code examples of sklearn.decomposition.DictionaryLearning().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.decomposition
, or try the search function
.
Example #1
Source File: test_decomposition.py From pandas-ml with BSD 3-Clause "New" or "Revised" License | 7 votes |
def test_objectmapper(self): df = pdml.ModelFrame([]) self.assertIs(df.decomposition.PCA, decomposition.PCA) self.assertIs(df.decomposition.IncrementalPCA, decomposition.IncrementalPCA) self.assertIs(df.decomposition.KernelPCA, decomposition.KernelPCA) self.assertIs(df.decomposition.FactorAnalysis, decomposition.FactorAnalysis) self.assertIs(df.decomposition.FastICA, decomposition.FastICA) self.assertIs(df.decomposition.TruncatedSVD, decomposition.TruncatedSVD) self.assertIs(df.decomposition.NMF, decomposition.NMF) self.assertIs(df.decomposition.SparsePCA, decomposition.SparsePCA) self.assertIs(df.decomposition.MiniBatchSparsePCA, decomposition.MiniBatchSparsePCA) self.assertIs(df.decomposition.SparseCoder, decomposition.SparseCoder) self.assertIs(df.decomposition.DictionaryLearning, decomposition.DictionaryLearning) self.assertIs(df.decomposition.MiniBatchDictionaryLearning, decomposition.MiniBatchDictionaryLearning) self.assertIs(df.decomposition.LatentDirichletAllocation, decomposition.LatentDirichletAllocation)
Example #2
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def test_dict_learning_positivity(transform_algorithm, positive_code, positive_dict): n_components = 5 dico = DictionaryLearning( n_components, transform_algorithm=transform_algorithm, random_state=0, positive_code=positive_code, positive_dict=positive_dict).fit(X) code = dico.transform(X) if positive_dict: assert (dico.components_ >= 0).all() else: assert (dico.components_ < 0).any() if positive_code: assert (code >= 0).all() else: assert (code < 0).any()
Example #3
Source File: test_ksvd.py From ksvd with Apache License 2.0 | 6 votes |
def test_size(): np.random.seed(0) N = 50 L = 12 n_features = 16 D = np.random.randn(L, n_features) B = np.array(sp.sparse.random(N, L, density=0.5).todense()) X = np.dot(B, D) dico1 = ApproximateKSVD(n_components=L, transform_n_nonzero_coefs=L) dico1.fit(X) gamma1 = dico1.transform(X) e1 = norm(X - gamma1.dot(dico1.components_)) dico2 = DictionaryLearning(n_components=L, transform_n_nonzero_coefs=L) dico2.fit(X) gamma2 = dico2.transform(X) e2 = norm(X - gamma2.dot(dico2.components_)) assert dico1.components_.shape == dico2.components_.shape assert gamma1.shape == gamma2.shape assert e1 < e2
Example #4
Source File: LearnDictionary.py From Vessel3DDL with MIT License | 5 votes |
def learn_dictionary(patches, n_c=512, a=1, n_i=100, n_j=3, es=5, fit_algorithm='lars'): dic = DictionaryLearning(n_components=n_c, alpha=a, max_iter=n_i, n_jobs=n_j, fit_algorithm=fit_algorithm) print ("Start learning dictionary: n_c: "+str(n_c)+", alpha: "+str(a)+", n_i: " + str(n_i)+", es: "+str(es)+", n_j: "+str(n_j)) v2 = dic.fit(patches).components_ d2 = v2.reshape(n_c, es, es, es) # e.g. 512x5x5x5 return d2
Example #5
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_split(): n_components = 5 dico = DictionaryLearning(n_components, transform_algorithm='threshold', random_state=0) code = dico.fit(X).transform(X) dico.split_sign = True split_code = dico.transform(X) assert_array_equal(split_code[:, :n_components] - split_code[:, n_components:], code)
Example #6
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_unknown_fit_algorithm(): n_components = 5 dico = DictionaryLearning(n_components, fit_algorithm='<unknown>') assert_raises(ValueError, dico.fit, X)
Example #7
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_lassocd_readonly_data(): n_components = 12 with TempMemmap(X) as X_read_only: dico = DictionaryLearning(n_components, transform_algorithm='lasso_cd', transform_alpha=0.001, random_state=0, n_jobs=-1) with ignore_warnings(category=ConvergenceWarning): code = dico.fit(X_read_only).transform(X_read_only) assert_array_almost_equal(np.dot(code, dico.components_), X_read_only, decimal=2)
Example #8
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_reconstruction_parallel(): # regression test that parallel reconstruction works with n_jobs=-1 n_components = 12 dico = DictionaryLearning(n_components, transform_algorithm='omp', transform_alpha=0.001, random_state=0, n_jobs=-1) code = dico.fit(X).transform(X) assert_array_almost_equal(np.dot(code, dico.components_), X) dico.set_params(transform_algorithm='lasso_lars') code = dico.transform(X) assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2)
Example #9
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_reconstruction(): n_components = 12 dico = DictionaryLearning(n_components, transform_algorithm='omp', transform_alpha=0.001, random_state=0) code = dico.fit(X).transform(X) assert_array_almost_equal(np.dot(code, dico.components_), X) dico.set_params(transform_algorithm='lasso_lars') code = dico.transform(X) assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2) # used to test lars here too, but there's no guarantee the number of # nonzero atoms is right.
Example #10
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_overcomplete(): n_components = 12 dico = DictionaryLearning(n_components, random_state=0).fit(X) assert_true(dico.components_.shape == (n_components, n_features))
Example #11
Source File: test_dict_learning.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_dict_learning_shapes(): n_components = 5 dico = DictionaryLearning(n_components, random_state=0).fit(X) assert_equal(dico.components_.shape, (n_components, n_features)) n_components = 1 dico = DictionaryLearning(n_components, random_state=0).fit(X) assert_equal(dico.components_.shape, (n_components, n_features)) assert_equal(dico.transform(X).shape, (X.shape[0], n_components))
Example #12
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_split(): n_components = 5 dico = DictionaryLearning(n_components, transform_algorithm='threshold', random_state=0) code = dico.fit(X).transform(X) dico.split_sign = True split_code = dico.transform(X) assert_array_almost_equal(split_code[:, :n_components] - split_code[:, n_components:], code)
Example #13
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_unknown_fit_algorithm(): n_components = 5 dico = DictionaryLearning(n_components, fit_algorithm='<unknown>') assert_raises(ValueError, dico.fit, X)
Example #14
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_nonzero_coefs(): n_components = 4 dico = DictionaryLearning(n_components, transform_algorithm='lars', transform_n_nonzero_coefs=3, random_state=0) code = dico.fit(X).transform(X[np.newaxis, 1]) assert len(np.flatnonzero(code)) == 3 dico.set_params(transform_algorithm='omp') code = dico.transform(X[np.newaxis, 1]) assert_equal(len(np.flatnonzero(code)), 3)
Example #15
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_reconstruction_parallel(): # regression test that parallel reconstruction works with n_jobs>1 n_components = 12 dico = DictionaryLearning(n_components, transform_algorithm='omp', transform_alpha=0.001, random_state=0, n_jobs=4) code = dico.fit(X).transform(X) assert_array_almost_equal(np.dot(code, dico.components_), X) dico.set_params(transform_algorithm='lasso_lars') code = dico.transform(X) assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2)
Example #16
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_reconstruction(): n_components = 12 dico = DictionaryLearning(n_components, transform_algorithm='omp', transform_alpha=0.001, random_state=0) code = dico.fit(X).transform(X) assert_array_almost_equal(np.dot(code, dico.components_), X) dico.set_params(transform_algorithm='lasso_lars') code = dico.transform(X) assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2) # used to test lars here too, but there's no guarantee the number of # nonzero atoms is right.
Example #17
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_overcomplete(): n_components = 12 dico = DictionaryLearning(n_components, random_state=0).fit(X) assert dico.components_.shape == (n_components, n_features) # positive lars deprecated 0.22
Example #18
Source File: test_dict_learning.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_dict_learning_shapes(): n_components = 5 dico = DictionaryLearning(n_components, random_state=0).fit(X) assert_equal(dico.components_.shape, (n_components, n_features)) n_components = 1 dico = DictionaryLearning(n_components, random_state=0).fit(X) assert_equal(dico.components_.shape, (n_components, n_features)) assert_equal(dico.transform(X).shape, (X.shape[0], n_components))