Python sklearn.metrics.pairwise.polynomial_kernel() Examples
The following are 21
code examples of sklearn.metrics.pairwise.polynomial_kernel().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.metrics.pairwise
, or try the search function
.

Example #1
Source File: test_coreg.py From mvlearn with Apache License 2.0 | 6 votes |
def test_affinity_mat_poly(data): v1_data = data['fit_data'][0] distances = cdist(v1_data, v1_data) gamma = 1 / (2 * np.median(distances) ** 2) true_kernel = polynomial_kernel(v1_data, gamma=gamma) spectral = MultiviewCoRegSpectralClustering(random_state=RANDOM_STATE, affinity='poly') p_kernel = spectral._affinity_mat(v1_data) assert(p_kernel.shape[0] == data['n_fit']) assert(p_kernel.shape[1] == data['n_fit']) for ind1 in range(p_kernel.shape[0]): for ind2 in range(p_kernel.shape[1]): assert np.abs(true_kernel[ind1][ind2] - p_kernel[ind1][ind2]) < 0.000001
Example #2
Source File: test_spectral.py From mvlearn with Apache License 2.0 | 6 votes |
def test_affinity_mat_poly(data): v1_data = data['fit_data'][0] distances = cdist(v1_data, v1_data) gamma = 1 / (2 * np.median(distances) ** 2) true_kernel = polynomial_kernel(v1_data, gamma=gamma) spectral = MultiviewSpectralClustering(random_state=RANDOM_STATE, affinity='poly') p_kernel = spectral._affinity_mat(v1_data) assert(p_kernel.shape[0] == data['n_fit']) assert(p_kernel.shape[1] == data['n_fit']) for ind1 in range(p_kernel.shape[0]): for ind2 in range(p_kernel.shape[1]): assert np.abs(true_kernel[ind1][ind2] - p_kernel[ind1][ind2]) < 0.000001
Example #3
Source File: test_utils.py From numpy-ml with GNU General Public License v3.0 | 6 votes |
def test_polynomial_kernel(N=1): np.random.seed(12345) i = 0 while i < N: N = np.random.randint(1, 100) M = np.random.randint(1, 100) C = np.random.randint(1, 1000) gamma = np.random.rand() d = np.random.randint(1, 5) c0 = np.random.rand() X = np.random.rand(N, C) Y = np.random.rand(M, C) mine = PolynomialKernel(gamma=gamma, d=d, c0=c0)(X, Y) gold = sk_poly(X, Y, gamma=gamma, degree=d, coef0=c0) np.testing.assert_almost_equal(mine, gold) print("PASSED") i += 1
Example #4
Source File: kernels.py From polylearn with BSD 2-Clause "Simplified" License | 6 votes |
def homogeneous_kernel(X, P, degree=2): """Convenience alias for homogeneous polynomial kernel between X and P:: K_P(x, p) = <x, p> ^ degree Parameters ---------- X : ndarray of shape (n_samples_1, n_features) Y : ndarray of shape (n_samples_2, n_features) degree : int, default 2 Returns ------- Gram matrix : array of shape (n_samples_1, n_samples_2) """ return polynomial_kernel(X, P, degree=degree, gamma=1, coef0=0)
Example #5
Source File: komd.py From MKLpy with GNU General Public License v3.0 | 6 votes |
def __kernel_definition__(self): """Select the kernel function Returns ------- kernel : a callable relative to selected kernel """ if hasattr(self.kernel, '__call__'): return self.kernel if self.kernel == 'rbf' or self.kernel == None: return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma) if self.kernel == 'poly': return lambda X,Y : polynomial_kernel(X, Y, degree=self.degree, gamma=self.rbf_gamma, coef0=self.coef0) if self.kernel == 'linear': return lambda X,Y : linear_kernel(X,Y) if self.kernel == 'precomputed': return lambda X,Y : X
Example #6
Source File: query_labels.py From ALiPy with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, X, y, train_idx, **kwargs): # K: kernel matrix # X = np.asarray(X)[train_idx] y = np.asarray(y)[train_idx] self._train_idx = np.asarray(train_idx) self.y = np.array(y) self.lmbda = kwargs.pop('lambda', 1.) self.kernel = kwargs.pop('kernel', 'rbf') if self.kernel == 'rbf': self.K = rbf_kernel(X=X, Y=X, gamma=kwargs.pop('gamma', 1.)) elif self.kernel == 'poly': self.K = polynomial_kernel(X=X, Y=X, coef0=kwargs.pop('coef0', 1), degree=kwargs.pop('degree', 3), gamma=kwargs.pop('gamma', 1.)) elif self.kernel == 'linear': self.K = linear_kernel(X=X, Y=X) elif hasattr(self.kernel, '__call__'): self.K = self.kernel(X=np.array(X), Y=np.array(X)) else: raise NotImplementedError if not isinstance(self.K, np.ndarray): raise TypeError('K should be an ndarray') if self.K.shape != (len(X), len(X)): raise ValueError( 'kernel should have size (%d, %d)' % (len(X), len(X))) self.L = np.linalg.inv(self.K + self.lmbda * np.eye(len(X)))
Example #7
Source File: test_pairwise.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_kernel_symmetry(): # Valid kernels should be symmetric rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) for kernel in (linear_kernel, polynomial_kernel, rbf_kernel, laplacian_kernel, sigmoid_kernel, cosine_similarity): K = kernel(X, X) assert_array_almost_equal(K, K.T, 15)
Example #8
Source File: mv_spectral.py From mvlearn with Apache License 2.0 | 5 votes |
def _affinity_mat(self, X): r''' Computes the affinity matrix based on the selected kernel type. Parameters ---------- X : array-like, shape (n_samples, n_features) The data matrix from which we will compute the affinity matrix. Returns ------- sims : array-like, shape (n_samples, n_samples) The resulting affinity kernel. ''' sims = None # If gamma is None, then compute default gamma value for this view gamma = self.gamma if self.gamma is None: distances = cdist(X, X) gamma = 1 / (2 * np.median(distances) ** 2) # Produce the affinity matrix based on the selected kernel type if (self.affinity == 'rbf'): sims = rbf_kernel(X, gamma=gamma) elif(self.affinity == 'nearest_neighbors'): neighbor = NearestNeighbors(n_neighbors=self.n_neighbors) neighbor.fit(X) sims = neighbor.kneighbors_graph(X).toarray() else: sims = polynomial_kernel(X, gamma=gamma) return sims
Example #9
Source File: scale_variant.py From intro_ds with Apache License 2.0 | 5 votes |
def trainModel(data): """ 在模型里使用不同的核函数 """ kernel = [polynomial_kernel, rbf_kernel] res = [] for i in kernel: model = SVC(kernel=i, coef0=1) model.fit(data[["x1", "x2"]], data["y"]) res.append({"name": i.__name__, "result": model}) return res
Example #10
Source File: kernel.py From intro_ds with Apache License 2.0 | 5 votes |
def trainModel(data): """ 在模型里使用不同的核函数 """ kernel = [linear_kernel, polynomial_kernel, rbf_kernel, laplacian_kernel] res = [] for i in kernel: model = SVC(kernel=i, coef0=1) model.fit(data[["x1", "x2"]], data["y"]) res.append({"name": i.__name__, "result": model}) return res
Example #11
Source File: test_pairwise.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_kernel_sparse(): rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) X_sparse = csr_matrix(X) for kernel in (linear_kernel, polynomial_kernel, rbf_kernel, laplacian_kernel, sigmoid_kernel, cosine_similarity): K = kernel(X, X) K2 = kernel(X_sparse, X_sparse) assert_array_almost_equal(K, K2)
Example #12
Source File: multi_label.py From ALiPy with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, X, y, **kwargs): # K: kernel matrix super(QueryMultiLabelQUIRE, self).__init__(X, y) self.lmbda = kwargs.pop('lambda', 1.) self.kernel = kwargs.pop('kernel', 'rbf') if self.kernel == 'rbf': self.K = rbf_kernel(X=X, Y=X, gamma=kwargs.pop('gamma', 1.)) elif self.kernel == 'poly': self.K = polynomial_kernel(X=X, Y=X, coef0=kwargs.pop('coef0', 1), degree=kwargs.pop('degree', 3), gamma=kwargs.pop('gamma', 1.)) elif self.kernel == 'linear': self.K = linear_kernel(X=X, Y=X) elif hasattr(self.kernel, '__call__'): self.K = self.kernel(X=np.array(X), Y=np.array(X)) else: raise NotImplementedError if not isinstance(self.K, np.ndarray): raise TypeError('K should be an ndarray') if self.K.shape != (len(X), len(X)): raise ValueError( 'Kernel should have size (%d, %d)' % (len(X), len(X))) self._nsamples, self._nclass = self.y.shape self.L = np.linalg.pinv(self.K + self.lmbda * np.eye(len(X)))
Example #13
Source File: pumil_mr.py From pywsl with MIT License | 5 votes |
def _ker(self, x): if self.basis == 'minimax': # minimax polynomial kernel (Andrews et al., NIPS2002) stat = lambda X: np.concatenate([X.max(axis=0), X.min(axis=0)]) sx = np.array([stat(X) for X in x]) sc = np.array([stat(X) for X in self._x_c]) K = polynomial_kernel(sx, sc, degree=self.degree) return K
Example #14
Source File: test_kernel_approximation.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_nystroem_poly_kernel_params(): # Non-regression: Nystroem should pass other parameters beside gamma. rnd = np.random.RandomState(37) X = rnd.uniform(size=(10, 4)) K = polynomial_kernel(X, degree=3.1, coef0=.1) nystroem = Nystroem(kernel="polynomial", n_components=X.shape[0], degree=3.1, coef0=.1) X_transformed = nystroem.fit_transform(X) assert_array_almost_equal(np.dot(X_transformed, X_transformed.T), K)
Example #15
Source File: unit_tests.py From MKLpy with GNU General Public License v3.0 | 5 votes |
def test_numpy(self): Xtr = self.Xtr.numpy() self.assertTrue(matNear( pairwise_mk.polynomial_kernel(Xtr, degree=4, gamma=0.1, coef0=2), pairwise_sk.polynomial_kernel(Xtr, degree=4, gamma=0.1, coef0=2))) self.assertTrue(matNear( pairwise_mk.linear_kernel(Xtr), pairwise_sk.linear_kernel(Xtr)))
Example #16
Source File: unit_tests.py From MKLpy with GNU General Public License v3.0 | 5 votes |
def test_otype(self): self.assertEqual(type(pairwise_mk.linear_kernel(self.Xtr)), torch.Tensor) self.assertEqual(type(pairwise_mk.homogeneous_polynomial_kernel(self.Xtr)), torch.Tensor) self.assertEqual(type(pairwise_mk.polynomial_kernel(self.Xtr)), torch.Tensor) self.assertEqual(type(pairwise_mk.rbf_kernel(self.Xtr)), torch.Tensor)
Example #17
Source File: unit_tests.py From MKLpy with GNU General Public License v3.0 | 5 votes |
def test_HPK_test(self): Kte = self.Xte @ self.Xtr.T self.assertTrue(matNear(Kte, pairwise_mk.homogeneous_polynomial_kernel(self.Xte, self.Xtr, degree=1))) self.assertTrue(matNear( pairwise_mk.homogeneous_polynomial_kernel(self.Xte, self.Xtr, degree=4), pairwise_sk.polynomial_kernel(self.Xte, self.Xtr, degree=4, gamma=1, coef0=0)))
Example #18
Source File: unit_tests.py From MKLpy with GNU General Public License v3.0 | 5 votes |
def test_HPK_train(self): Ktr = self.Xtr @ self.Xtr.T self.assertTrue(matNear(Ktr,pairwise_sk.linear_kernel(self.Xtr))) self.assertTrue(matNear( pairwise_mk.homogeneous_polynomial_kernel(self.Xtr, degree=4), pairwise_sk.polynomial_kernel(self.Xtr, degree=4, gamma=1, coef0=0))) self.assertTrue(matNear( pairwise_mk.homogeneous_polynomial_kernel(self.Xtr, degree=5), pairwise_sk.polynomial_kernel(self.Xtr, degree=5, gamma=1, coef0=0))) self.assertTrue(matNear(Ktr**3, pairwise_sk.polynomial_kernel(self.Xtr, degree=3, gamma=1, coef0=0))) self.assertTrue(matNear( pairwise_mk.homogeneous_polynomial_kernel(self.Xtr, self.Xtr, degree=3), pairwise_sk.polynomial_kernel(self.Xtr, self.Xtr, degree=3, gamma=1, coef0=0)))
Example #19
Source File: test_kernel_approximation.py From twitter-stock-recommendation with MIT License | 4 votes |
def test_nystroem_poly_kernel_params(): # Non-regression: Nystroem should pass other parameters beside gamma. rnd = np.random.RandomState(37) X = rnd.uniform(size=(10, 4)) K = polynomial_kernel(X, degree=3.1, coef0=.1) nystroem = Nystroem(kernel="polynomial", n_components=X.shape[0], degree=3.1, coef0=.1) X_transformed = nystroem.fit_transform(X) assert_array_almost_equal(np.dot(X_transformed, X_transformed.T), K)
Example #20
Source File: query_labels.py From ALiPy with BSD 3-Clause "New" or "Revised" License | 4 votes |
def __init__(self, X, y, mu=0.1, gamma=0.1, rho=1, lambda_init=0.1, lambda_pace=0.01, **kwargs): try: import cvxpy self._cvxpy = cvxpy except: raise ImportError("This method need cvxpy to solve the QP problem." "Please refer to https://www.cvxpy.org/install/index.html " "install cvxpy manually before using.") # K: kernel matrix super(QueryInstanceSPAL, self).__init__(X, y) ul = unique_labels(self.y) if len(unique_labels(self.y)) != 2: warnings.warn("This query strategy is implemented for binary classification only.", category=FunctionWarning) if len(ul) == 2 and {1, -1} != set(ul): y_temp = np.array(copy.deepcopy(self.y)) y_temp[y_temp == ul[0]] = 1 y_temp[y_temp == ul[1]] = -1 self.y = y_temp self._mu = mu self._gamma = gamma self._rho = rho self._lambda_init = lambda_init self._lambda_pace = lambda_pace self._lambda = lambda_init # calc kernel self._kernel = kwargs.pop('kernel', 'rbf') if self._kernel == 'rbf': self._K = rbf_kernel(X=X, Y=X, gamma=kwargs.pop('gamma_ker', 1.)) elif self._kernel == 'poly': self._K = polynomial_kernel(X=X, Y=X, coef0=kwargs.pop('coef0', 1), degree=kwargs.pop('degree', 3), gamma=kwargs.pop('gamma_ker', 1.)) elif self._kernel == 'linear': self._K = linear_kernel(X=X, Y=X) elif hasattr(self._kernel, '__call__'): self._K = self._kernel(X=np.array(X), Y=np.array(X)) else: raise NotImplementedError if not isinstance(self._K, np.ndarray): raise TypeError('K should be an ndarray') if self._K.shape != (len(X), len(X)): raise ValueError( 'kernel should have size (%d, %d)' % (len(X), len(X)))
Example #21
Source File: query_labels.py From ALiPy with BSD 3-Clause "New" or "Revised" License | 4 votes |
def __init__(self, X, y, beta=1000, gamma=0.1, rho=1, **kwargs): try: import cvxpy self._cvxpy = cvxpy except: raise ImportError("This method need cvxpy to solve the QP problem." "Please refer to https://www.cvxpy.org/install/index.html " "install cvxpy manually before using.") # K: kernel matrix super(QueryInstanceBMDR, self).__init__(X, y) ul = unique_labels(self.y) if len(ul) != 2: warnings.warn("This query strategy is implemented for binary classification only.", category=FunctionWarning) if len(ul) == 2 and {1, -1} != set(ul): y_temp = np.array(copy.deepcopy(self.y)) y_temp[y_temp == ul[0]] = 1 y_temp[y_temp == ul[1]] = -1 self.y = y_temp self._beta = beta self._gamma = gamma self._rho = rho # calc kernel self._kernel = kwargs.pop('kernel', 'rbf') if self._kernel == 'rbf': self._K = rbf_kernel(X=X, Y=X, gamma=kwargs.pop('gamma_ker', 1.)) elif self._kernel == 'poly': self._K = polynomial_kernel(X=X, Y=X, coef0=kwargs.pop('coef0', 1), degree=kwargs.pop('degree', 3), gamma=kwargs.pop('gamma_ker', 1.)) elif self._kernel == 'linear': self._K = linear_kernel(X=X, Y=X) elif hasattr(self._kernel, '__call__'): self._K = self._kernel(X=np.array(X), Y=np.array(X)) else: raise NotImplementedError if not isinstance(self._K, np.ndarray): raise TypeError('K should be an ndarray') if self._K.shape != (len(X), len(X)): raise ValueError( 'kernel should have size (%d, %d)' % (len(X), len(X)))