Python numpy.random.multivariate_normal() Examples
The following are 17
code examples of numpy.random.multivariate_normal().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
numpy.random
, or try the search function
.
Example #1
Source File: 05-logistic_vs_perceptron.py From ml4se with GNU General Public License v2.0 | 6 votes |
def prepare_dataset(variance): n1 = 10 n2 = 10 mu1 = [7,7] mu2 = [-3,-3] cov1 = np.array([[variance,0],[0,variance]]) cov2 = np.array([[variance,0],[0,variance]]) df1 = DataFrame(multivariate_normal(mu1,cov1,n1),columns=['x','y']) df1['type'] = 1 df2 = DataFrame(multivariate_normal(mu2,cov2,n2),columns=['x','y']) df2['type'] = 0 df = pd.concat([df1,df2],ignore_index=True) df = df.reindex(np.random.permutation(df.index)).reset_index(drop=True) return df # ロジスティック回帰
Example #2
Source File: 05-roc_curve.py From ml4se with GNU General Public License v2.0 | 6 votes |
def prepare_dataset(variance): n1 = 80 n2 = 200 mu1 = [9,9] mu2 = [-3,-3] cov1 = np.array([[variance,0],[0,variance]]) cov2 = np.array([[variance,0],[0,variance]]) df1 = DataFrame(multivariate_normal(mu1,cov1,n1),columns=['x','y']) df1['type'] = 1 df2 = DataFrame(multivariate_normal(mu2,cov2,n2),columns=['x','y']) df2['type'] = 0 df = pd.concat([df1,df2],ignore_index=True) df = df.reindex(np.random.permutation(df.index)).reset_index() return df[['x','y','type']] # ロジスティック回帰を実施
Example #3
Source File: kde.py From Computable with MIT License | 6 votes |
def resample(self, size=None): """ Randomly sample a dataset from the estimated pdf. Parameters ---------- size : int, optional The number of samples to draw. If not provided, then the size is the same as the underlying dataset. Returns ------- resample : (self.d, `size`) ndarray The sampled dataset. """ if size is None: size = self.n norm = transpose(multivariate_normal(zeros((self.d,), float), self.covariance, size=size)) indices = randint(0, self.n, size=size) means = self.dataset[:, indices] return means + norm
Example #4
Source File: kde.py From GraphicDesignPatternByPython with MIT License | 6 votes |
def resample(self, size=None): """ Randomly sample a dataset from the estimated pdf. Parameters ---------- size : int, optional The number of samples to draw. If not provided, then the size is the same as the underlying dataset. Returns ------- resample : (self.d, `size`) ndarray The sampled dataset. """ if size is None: size = self.n norm = transpose(multivariate_normal(zeros((self.d,), float), self.covariance, size=size)) indices = randint(0, self.n, size=size) means = self.dataset[:, indices] return means + norm
Example #5
Source File: kde.py From lambda-packs with MIT License | 6 votes |
def resample(self, size=None): """ Randomly sample a dataset from the estimated pdf. Parameters ---------- size : int, optional The number of samples to draw. If not provided, then the size is the same as the underlying dataset. Returns ------- resample : (self.d, `size`) ndarray The sampled dataset. """ if size is None: size = self.n norm = transpose(multivariate_normal(zeros((self.d,), float), self.covariance, size=size)) indices = randint(0, self.n, size=size) means = self.dataset[:, indices] return means + norm
Example #6
Source File: pw_normal.py From ruptures with BSD 2-Clause "Simplified" License | 6 votes |
def pw_normal(n_samples=200, n_bkps=3): """Return a 2D piecewise Gaussian signal and the associated changepoints. Args: n_samples (int, optional): signal length n_bkps (int, optional): number of change points Returns: tuple: signal of shape (n_samples, 2), list of breakpoints """ # breakpoints bkps = draw_bkps(n_samples, n_bkps) # we create the signal signal = np.zeros((n_samples, 2), dtype=float) cov1 = np.array([[1, 0.9], [0.9, 1]]) cov2 = np.array([[1, -0.9], [-0.9, 1]]) for sub, cov in zip(np.split(signal, bkps), cycle((cov1, cov2))): n_sub, _ = sub.shape sub += rd.multivariate_normal([0, 0], cov, size=n_sub) return signal, bkps
Example #7
Source File: ensemble_kalman_filter.py From filterpy with MIT License | 6 votes |
def predict(self): """ Predict next position. """ N = self.N for i, s in enumerate(self.sigmas): self.sigmas[i] = self.fx(s, self.dt) e = multivariate_normal(self._mean, self.Q, N) self.sigmas += e self.x = np.mean(self.sigmas, axis=0) self.P = outer_product_sum(self.sigmas - self.x) / (N - 1) # save prior self.x_prior = np.copy(self.x) self.P_prior = np.copy(self.P)
Example #8
Source File: kde.py From Splunking-Crime with GNU Affero General Public License v3.0 | 6 votes |
def resample(self, size=None): """ Randomly sample a dataset from the estimated pdf. Parameters ---------- size : int, optional The number of samples to draw. If not provided, then the size is the same as the underlying dataset. Returns ------- resample : (self.d, `size`) ndarray The sampled dataset. """ if size is None: size = self.n norm = transpose(multivariate_normal(zeros((self.d,), float), self.covariance, size=size)) indices = randint(0, self.n, size=size) means = self.dataset[:, indices] return means + norm
Example #9
Source File: ensemble_kalman_filter.py From filterpy with MIT License | 5 votes |
def initialize(self, x, P): """ Initializes the filter with the specified mean and covariance. Only need to call this if you are using the filter to filter more than one set of data; this is called by __init__ Parameters ---------- x : np.array(dim_z) state mean P : np.array((dim_x, dim_x)) covariance of the state """ if x.ndim != 1: raise ValueError('x must be a 1D array') self.sigmas = multivariate_normal(mean=x, cov=P, size=self.N) self.x = x self.P = P # these will always be a copy of x,P after predict() is called self.x_prior = self.x.copy() self.P_prior = self.P.copy() # these will always be a copy of x,P after update() is called self.x_post = self.x.copy() self.P_post = self.P.copy()
Example #10
Source File: gaussian_mix.py From PyCV-time with MIT License | 5 votes |
def make_gaussians(cluster_n, img_size): points = [] ref_distrs = [] for i in xrange(cluster_n): mean = (0.1 + 0.8*random.rand(2)) * img_size a = (random.rand(2, 2)-0.5)*img_size*0.1 cov = np.dot(a.T, a) + img_size*0.05*np.eye(2) n = 100 + random.randint(900) pts = random.multivariate_normal(mean, cov, n) points.append( pts ) ref_distrs.append( (mean, cov) ) points = np.float32( np.vstack(points) ) return points, ref_distrs
Example #11
Source File: gaussian_mix.py From PyCV-time with MIT License | 5 votes |
def make_gaussians(cluster_n, img_size): points = [] ref_distrs = [] for i in xrange(cluster_n): mean = (0.1 + 0.8*random.rand(2)) * img_size a = (random.rand(2, 2)-0.5)*img_size*0.1 cov = np.dot(a.T, a) + img_size*0.05*np.eye(2) n = 100 + random.randint(900) pts = random.multivariate_normal(mean, cov, n) points.append( pts ) ref_distrs.append( (mean, cov) ) points = np.float32( np.vstack(points) ) return points, ref_distrs
Example #12
Source File: test_dissimilarity.py From flyingpigeon with Apache License 2.0 | 5 votes |
def test_mvnormal(self): """Compare the results to the figure 2 in the paper.""" from numpy.random import normal, multivariate_normal n = 30000 p = normal(0, 1, size=(n, 2)) np.random.seed(1) q = multivariate_normal([.5, -.5], [[.5, .1], [.1, .3]], size=n) aaeq(dd.kldiv(p, q), 1.39, 1) aaeq(dd.kldiv(q, p), 0.62, 1)
Example #13
Source File: test_stats.py From filterpy with MIT License | 5 votes |
def do_plot_test(): import matplotlib.pyplot as plt from numpy.random import multivariate_normal as mnormal from filterpy.stats import covariance_ellipse, plot_covariance p = np.array([[32, 15], [15., 40.]]) x, y = mnormal(mean=(0, 0), cov=p, size=5000).T sd = 2 a, w, h = covariance_ellipse(p, sd) print(np.degrees(a), w, h) count = 0 color = [] for i in range(len(x)): if _is_inside_ellipse(x[i], y[i], 0, 0, a, w, h): color.append('b') count += 1 else: color.append('r') plt.scatter(x, y, alpha=0.2, c=color) plt.axis('equal') plot_covariance(mean=(0., 0.), cov=p, std=[1,2,3], alpha=0.3, facecolor='none') print(count / len(x))
Example #14
Source File: 04-perceptron.py From ml4se with GNU General Public License v2.0 | 5 votes |
def prepare_dataset(variance): cov1 = np.array([[variance,0],[0,variance]]) cov2 = np.array([[variance,0],[0,variance]]) df1 = DataFrame(multivariate_normal(Mu1,cov1,N1),columns=['x','y']) df1['type'] = 1 df2 = DataFrame(multivariate_normal(Mu2,cov2,N2),columns=['x','y']) df2['type'] = -1 df = pd.concat([df1,df2],ignore_index=True) df = df.reindex(np.random.permutation(df.index)).reset_index(drop=True) return df # Perceptronのアルゴリズム(確率的勾配降下法)を実行
Example #15
Source File: gaussian_mix.py From OpenCV-Python-Tutorial with MIT License | 5 votes |
def make_gaussians(cluster_n, img_size): points = [] ref_distrs = [] for i in xrange(cluster_n): mean = (0.1 + 0.8*random.rand(2)) * img_size a = (random.rand(2, 2)-0.5)*img_size*0.1 cov = np.dot(a.T, a) + img_size*0.05*np.eye(2) n = 100 + random.randint(900) pts = random.multivariate_normal(mean, cov, n) points.append( pts ) ref_distrs.append( (mean, cov) ) points = np.float32( np.vstack(points) ) return points, ref_distrs
Example #16
Source File: ensemble_kalman_filter.py From filterpy with MIT License | 4 votes |
def update(self, z, R=None): """ Add a new measurement (z) to the kalman filter. If z is None, nothing is changed. Parameters ---------- z : np.array measurement for this update. R : np.array, scalar, or None Optionally provide R to override the measurement noise for this one call, otherwise self.R will be used. """ if z is None: self.z = array([[None]*self.dim_z]).T self.x_post = self.x.copy() self.P_post = self.P.copy() return if R is None: R = self.R if np.isscalar(R): R = eye(self.dim_z) * R N = self.N dim_z = len(z) sigmas_h = zeros((N, dim_z)) # transform sigma points into measurement space for i in range(N): sigmas_h[i] = self.hx(self.sigmas[i]) z_mean = np.mean(sigmas_h, axis=0) P_zz = (outer_product_sum(sigmas_h - z_mean) / (N-1)) + R P_xz = outer_product_sum( self.sigmas - self.x, sigmas_h - z_mean) / (N - 1) self.S = P_zz self.SI = self.inv(self.S) self.K = dot(P_xz, self.SI) e_r = multivariate_normal(self._mean_z, R, N) for i in range(N): self.sigmas[i] += dot(self.K, z + e_r[i] - sigmas_h[i]) self.x = np.mean(self.sigmas, axis=0) self.P = self.P - dot(dot(self.K, self.S), self.K.T) # save measurement and posterior state self.z = deepcopy(z) self.x_post = self.x.copy() self.P_post = self.P.copy()
Example #17
Source File: fstpso.py From fst-pso with GNU Lesser General Public License v3.0 | 4 votes |
def _new_recombination2(self, X, trials=100): print(" * Trying to reboot...") from numpy import average, identity, cov, logspace from numpy.random import multivariate_normal from matplotlib.pyplot import scatter, show, xlim, ylim, subplots, legend #fig, ax = subplots(1,1, figsize=(5,5)) best_solutions = self._get_best_solutions(int(self.numberofparticles/3)) all_sols = [] for sol in best_solutions: all_sols.append(sol.X) all_sols = array(all_sols).T #print (all_sols) com = [average( x, weights=logspace(0,-2,self.numberofparticles/3) ) for x in all_sols] cova = cov(all_sols) res = multivariate_normal(com, cova, trials) if False: scatter(all_sols[0], all_sols[1], label="all selected solutions") scatter(com[0], com[1], label="weighted average") scatter(res.T[0], res.T[1], alpha=0.5, s=10, label="new samples") scatter(all_sols[0][0], all_sols[1][0], label="best individual") xlim(-100,100) ylim(-100,100) legend() show() ; exit() for r in res: for d in range(len(r)): if r[d]>self.Boundaries[d][1]: r[d] = self.Boundaries[d][1] elif r[d]<self.Boundaries[d][0]: r[d] = self.Boundaries[d][0] allnewfit = [self.FITNESS(r) for r in res] best = argmin(allnewfit) self._overall_fitness_evaluations += trials if allnewfit[best]<X.CalculatedBestFitness: return list(res[best]), allnewfit[best] else: return X.X, X.CalculatedFitness