Python numpy.random.normal() Examples
The following are 30
code examples of numpy.random.normal().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
numpy.random
, or try the search function
.
Example #1
Source File: test_metrics.py From python-dlpy with Apache License 2.0 | 6 votes |
def _create_regression_table(nrow, id_vars=None, seed=1234, true_label='target', pred_label='p_target'): nr.seed(seed) mean_value = nr.normal(loc=10, scale=2, size=(nrow, 1)) error = nr.normal(loc=0, scale=1, size=(nrow, 1)) true_value = mean_value + error regression_matrix = np.hstack((true_value, mean_value)) regression_matrix = np.abs(regression_matrix) colnames = [true_label, pred_label] if id_vars is not None: if not isinstance(id_vars, list): id_vars = [id_vars] ncol = len(id_vars) id_matrix = _create_id_matrix(nrow, ncol) regression_matrix = np.hstack((regression_matrix, id_matrix)) colnames = colnames + id_vars return pd.DataFrame(regression_matrix, columns=colnames)
Example #2
Source File: pw_linear.py From ruptures with BSD 2-Clause "Simplified" License | 6 votes |
def pw_linear(n_samples=200, n_features=1, n_bkps=3, noise_std=None): """ Return piecewise linear signal and the associated changepoints. Args: n_samples (int, optional): signal length n_features (int, optional): number of covariates n_bkps (int, optional): number of change points noise_std (float, optional): noise std. If None, no noise is added Returns: tuple: signal of shape (n_samples, n_features+1), list of breakpoints """ covar = normal(size=(n_samples, n_features)) linear_coeff, bkps = pw_constant(n_samples=n_samples, n_bkps=n_bkps, n_features=n_features, noise_std=None) var = np.sum(linear_coeff * covar, axis=1) if noise_std is not None: var += normal(scale=noise_std, size=var.shape) signal = np.c_[var, covar] return signal, bkps
Example #3
Source File: gaussian_fitting.py From FRETBursts with GNU General Public License v2.0 | 6 votes |
def test_gaussian2d_fit(): mx0 = 0.1 my0 = 0.9 sigx0 = 0.4 sigy0 = 0.25 Size = 500 sx = R.normal(size=Size, loc=mx0, scale=sigx0) sy = R.normal(size=Size, loc=my0, scale=sigy0) mux, sigmax, muy, sigmay = gaussian2d_fit(sx, sy) plot(sx, sy, 'o', alpha=0.2, mew=0) X,Y = np.mgrid[sx.min()-1:sx.max()+1:200j, sy.min()-1:sy.max()+1:200j] def gauss2d(X,Y, mx, my, sigx, sigy): return np.exp(-((X-mx)**2)/(2*sigx**2))*np.exp(-((Y-my)**2)/(2*sigy**2)) contour(X,Y,gauss2d(X,Y,mux,muy,sigmax,sigmay)) plot(mx0,my0, 'ok', mew=0, ms=10) plot(mux,muy, 'x', mew=2, ms=10, color='green')
Example #4
Source File: lstm-synthetic-wave-anomaly-detect.py From lstm-anomaly-detect with MIT License | 6 votes |
def gen_wave(): """ Generate a synthetic wave by adding up a few sine waves and some noise :return: the final wave """ t = np.arange(0.0, 10.0, 0.01) wave1 = sin(2 * 2 * pi * t) noise = random.normal(0, 0.1, len(t)) wave1 = wave1 + noise print("wave1", len(wave1)) wave2 = sin(2 * pi * t) print("wave2", len(wave2)) t_rider = arange(0.0, 0.5, 0.01) wave3 = sin(10 * pi * t_rider) print("wave3", len(wave3)) insert = round(0.8 * len(t)) wave1[insert:insert + 50] = wave1[insert:insert + 50] + wave3 return wave1 + wave2
Example #5
Source File: gaussian_fitting.py From FRETBursts with GNU General Public License v2.0 | 6 votes |
def test_gaussian_fit(): m0 = 0.1 s0 = 0.4 size = 500 s = R.normal(size=size, loc=m0, scale=s0) #s = s[s<0.4] mu, sig = gaussian_fit(s) mu1, sig1 = S.norm.fit(s) mu2, sig2 = gaussian_fit_ml(s) print("ECDF ", mu, sig) print("ML ", mu1, sig1) print("ML (manual)", mu2, sig2) H = np.histogram(s, bins=20, density=True) h = H[0] bw = H[1][1] - H[1][0] #bins_c = H[1][:-1]+0.5*bw bar(H[1][:-1], H[0], bw, alpha=0.3) x = np.r_[s.min()-1:s.max()+1:200j] plot(x, normpdf(x,m0,s0), lw=2, color='grey') plot(x, normpdf(x,mu,sig), lw=2, color='r', alpha=0.5) plot(x, normpdf(x,mu1,sig1), lw=2, color='b', alpha=0.5)
Example #6
Source File: test_decomp.py From GraphicDesignPatternByPython with MIT License | 6 votes |
def random_rot(dim): """Return a random rotation matrix, drawn from the Haar distribution (the only uniform distribution on SO(n)). The algorithm is described in the paper Stewart, G.W., 'The efficient generation of random orthogonal matrices with an application to condition estimators', SIAM Journal on Numerical Analysis, 17(3), pp. 403-409, 1980. For more information see http://en.wikipedia.org/wiki/Orthogonal_matrix#Randomization""" H = eye(dim) D = ones((dim,)) for n in range(1, dim): x = normal(size=(dim-n+1,)) D[n-1] = sign(x[0]) x[0] -= D[n-1]*sqrt((x*x).sum()) # Householder transformation Hx = eye(dim-n+1) - 2.*outer(x, x)/(x*x).sum() mat = eye(dim) mat[n-1:,n-1:] = Hx H = dot(H, mat) # Fix the last sign such that the determinant is 1 D[-1] = -D.prod() H = (D*H.T).T return H
Example #7
Source File: test_decomp.py From Computable with MIT License | 6 votes |
def random_rot(dim): """Return a random rotation matrix, drawn from the Haar distribution (the only uniform distribution on SO(n)). The algorithm is described in the paper Stewart, G.W., 'The efficient generation of random orthogonal matrices with an application to condition estimators', SIAM Journal on Numerical Analysis, 17(3), pp. 403-409, 1980. For more information see http://en.wikipedia.org/wiki/Orthogonal_matrix#Randomization""" H = eye(dim) D = ones((dim,)) for n in range(1, dim): x = normal(size=(dim-n+1,)) D[n-1] = sign(x[0]) x[0] -= D[n-1]*sqrt((x*x).sum()) # Householder transformation Hx = eye(dim-n+1) - 2.*outer(x, x)/(x*x).sum() mat = eye(dim) mat[n-1:,n-1:] = Hx H = dot(H, mat) # Fix the last sign such that the determinant is 1 D[-1] = -D.prod() H = (D*H.T).T return H
Example #8
Source File: test.py From pregel with MIT License | 6 votes |
def _test_guassian_comparison(): ''' Method to test the _comparE_gaussian function ''' size = 100 dist1 = normal(loc=0, scale=1, size=size) dist2 = normal(loc=0.1, scale=0.9, size=size) assert ptest._compare_gaussians(dist1, dist2) == True, "The input distributions are similar." dist2 = normal(loc=5, scale=1, size=size) assert ptest._compare_gaussians(dist1, dist2) == False, "The input distributions are not similar." dist2 = normal(loc=5, scale=5, size=size) assert ptest._compare_gaussians(dist1, dist2) == False, "The input distributions are not similar."
Example #9
Source File: testing.py From M-LOOP with MIT License | 6 votes |
def set_noise_function(self, proportional=0.0, absolute=0.0): ''' Adds noise to the function. with the formula:: c'(c,x) = c (1 + s_p p) + s_a a where s_i are gaussian random variables, p is the proportional noise factor and a is the absolute noise factor, and c is the cost before noise is added the uncertainty is then:: u = sqrt((cp)^2 + a^2) Keyword Args: proportional (Optional [float]): the proportional factor. Defaults to 0 absolute (Optional [float]): the absolute factor. Defaults to 0 ''' self.noise_prop = proportional self.noise_abs = absolute self.noise_function = lambda p,c,u : (c *(1 + nr.normal()*self.noise_prop) + nr.normal()*self.noise_abs,np.sqrt((c*self.noise_prop)**2 + (self.noise_abs)**2))
Example #10
Source File: tree.py From tree with MIT License | 6 votes |
def step(self): #self.r *= BRANCH_DIMINISH self.r = self.r - self.tree.branch_diminish angle = normal()*self.tree.branch_angle_max #da = (1.-1./((self.g+1)**SEARCH_ANGLE_EXP))*angle #da = ((1./(ONE + INIT_BRANCH - self.r))**SEARCH_ANGLE_EXP)*angle #da = (1.-1./(ONE + INIT_BRANCH - self.r)**SEARCH_ANGLE_EXP)*angle scale = self.tree.one+self.tree.root_r-self.r da = (1.+scale/self.tree.root_r)**self.tree.branch_angle_exp self.a += da*angle dx = cos(self.a)*self.tree.stepsize dy = sin(self.a)*self.tree.stepsize self.x += dx self.y += dy self.i += 1
Example #11
Source File: mock.py From klusta with BSD 3-Clause "New" or "Revised" License | 5 votes |
def artificial_traces(n_samples, n_channels): # TODO: more realistic traces. return .25 * nr.normal(size=(n_samples, n_channels))
Example #12
Source File: test_matrix.py From PyXtal with MIT License | 5 votes |
def random_matrix(width=1.0, unitary=False): mat = np.zeros([3,3]) for x in range(3): for y in range(3): mat[x][y] = normal(scale=width) if unitary: new = mat / cbrt(det(mat)) return new else: return mat
Example #13
Source File: test_dissimilarity.py From flyingpigeon with Apache License 2.0 | 5 votes |
def test_mvnormal(self): """Compare the results to the figure 2 in the paper.""" from numpy.random import normal, multivariate_normal n = 30000 p = normal(0, 1, size=(n, 2)) np.random.seed(1) q = multivariate_normal([.5, -.5], [[.5, .1], [.1, .3]], size=n) aaeq(dd.kldiv(p, q), 1.39, 1) aaeq(dd.kldiv(q, p), 0.62, 1)
Example #14
Source File: test_triangulation.py From adaptive with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_circumsphere(): from adaptive.learner.triangulation import circumsphere, fast_norm from numpy import allclose from numpy.random import normal, uniform def generate_random_sphere_points(dim, radius=0): """https://math.stackexchange.com/a/1585996""" vec = [None] * (dim + 1) center = uniform(-100, 100, dim) radius = uniform(1.0, 100.0) if radius == 0 else radius for i in range(dim + 1): points = normal(0, size=dim) x = fast_norm(points) points = points / x * radius vec[i] = tuple(points + center) return radius, center, vec for dim in range(2, 10): radius, center, points = generate_random_sphere_points(dim) circ_center, circ_radius = circumsphere(points) err_msg = "" if not allclose(circ_center, center): err_msg += f"Calculated center ({circ_center}) differs from true center ({center})\n" if not allclose(radius, circ_radius): err_msg += ( f"Calculated radius {circ_radius} differs from true radius {radius}\n" ) if err_msg: raise AssertionError(err_msg)
Example #15
Source File: pw_wavy.py From ruptures with BSD 2-Clause "Simplified" License | 5 votes |
def pw_wavy(n_samples=200, n_bkps=3, noise_std=None): """Return a 1D piecewise wavy signal and the associated changepoints. Args: n_samples (int, optional): signal length n_bkps (int, optional): number of changepoints noise_std (float, optional): noise std. If None, no noise is added Returns: tuple: signal of shape (n_samples, 1), list of breakpoints """ # breakpoints bkps = draw_bkps(n_samples, n_bkps) # we create the signal f1 = np.array([0.075, 0.1]) f2 = np.array([0.1, 0.125]) freqs = np.zeros((n_samples, 2)) for sub, val in zip(np.split(freqs, bkps[:-1]), cycle([f1, f2])): sub += val tt = np.arange(n_samples) # DeprecationWarning: Calling np.sum(generator) is deprecated # Use np.sum(np.from_iter(generator)) or the python sum builtin instead. signal = np.sum([np.sin(2 * np.pi * tt * f) for f in freqs.T], axis=0) if noise_std is not None: noise = normal(scale=noise_std, size=signal.shape) signal += noise return signal, bkps
Example #16
Source File: common.py From coffeegrindsize with MIT License | 5 votes |
def setup_method(self, method): import matplotlib as mpl mpl.rcdefaults() self.mpl_ge_2_0_1 = plotting._compat._mpl_ge_2_0_1() self.mpl_ge_2_1_0 = plotting._compat._mpl_ge_2_1_0() self.mpl_ge_2_2_0 = plotting._compat._mpl_ge_2_2_0() self.mpl_ge_2_2_2 = plotting._compat._mpl_ge_2_2_2() self.mpl_ge_3_0_0 = plotting._compat._mpl_ge_3_0_0() self.bp_n_objects = 7 self.polycollection_factor = 2 self.default_figsize = (6.4, 4.8) self.default_tick_position = 'left' n = 100 with tm.RNGContext(42): gender = np.random.choice(['Male', 'Female'], size=n) classroom = np.random.choice(['A', 'B', 'C'], size=n) self.hist_df = DataFrame({'gender': gender, 'classroom': classroom, 'height': random.normal(66, 4, size=n), 'weight': random.normal(161, 32, size=n), 'category': random.randint(4, size=n)}) self.tdf = tm.makeTimeDataFrame() self.hexbin_df = DataFrame({"A": np.random.uniform(size=20), "B": np.random.uniform(size=20), "C": np.arange(20) + np.random.uniform( size=20)})
Example #17
Source File: algebraicconnectivity.py From qgisSpaceSyntaxToolkit with GNU General Public License v3.0 | 5 votes |
def _get_fiedler_func(method): """Return a function that solves the Fiedler eigenvalue problem. """ match = _tracemin_method.match(method) if match: method = match.group(1) def find_fiedler(L, x, normalized, tol): q = 2 if method == 'pcg' else min(4, L.shape[0] - 1) X = asmatrix(normal(size=(q, L.shape[0]))).T sigma, X = _tracemin_fiedler(L, X, normalized, tol, method) return sigma[0], X[:, 0] elif method == 'lanczos' or method == 'lobpcg': def find_fiedler(L, x, normalized, tol): L = csc_matrix(L, dtype=float) n = L.shape[0] if normalized: D = spdiags(1. / sqrt(L.diagonal()), [0], n, n, format='csc') L = D * L * D if method == 'lanczos' or n < 10: # Avoid LOBPCG when n < 10 due to # https://github.com/scipy/scipy/issues/3592 # https://github.com/scipy/scipy/pull/3594 sigma, X = eigsh(L, 2, which='SM', tol=tol, return_eigenvectors=True) return sigma[1], X[:, 1] else: X = asarray(asmatrix(x).T) M = spdiags(1. / L.diagonal(), [0], n, n) Y = ones(n) if normalized: Y /= D.diagonal() sigma, X = lobpcg(L, X, M=M, Y=asmatrix(Y).T, tol=tol, maxiter=n, largest=False) return sigma[0], X[:, 0] else: raise nx.NetworkXError("unknown method '%s'." % method) return find_fiedler
Example #18
Source File: test_dissimilarity.py From flyingpigeon with Apache License 2.0 | 5 votes |
def randn(mean, std, shape): """Return a random normal sample with exact mean and standard deviation.""" r = np.random.randn(*shape) r1 = r / r.std(0, ddof=1) * np.array(std) return r1 - r1.mean(0) + np.array(mean)
Example #19
Source File: random.py From dybm with Apache License 2.0 | 5 votes |
def normal(loc=0.0, scale=1.0, size=None): return _to_gpu(r.normal(loc=loc, scale=scale, size=size))
Example #20
Source File: pw_constant.py From ruptures with BSD 2-Clause "Simplified" License | 5 votes |
def pw_constant(n_samples=200, n_features=1, n_bkps=3, noise_std=None, delta=(1, 10)): """Return a piecewise constant signal and the associated changepoints. Args: n_samples (int): signal length n_features (int, optional): number of dimensions n_bkps (int, optional): number of changepoints noise_std (float, optional): noise std. If None, no noise is added delta (tuple, optional): (delta_min, delta_max) max and min jump values Returns: tuple: signal of shape (n_samples, n_features), list of breakpoints """ # breakpoints bkps = draw_bkps(n_samples, n_bkps) # we create the signal signal = np.empty((n_samples, n_features), dtype=float) tt_ = np.arange(n_samples) delta_min, delta_max = delta # mean value center = np.zeros(n_features) for ind in np.split(tt_, bkps): if ind.size > 0: # jump value jump = rd.uniform(delta_min, delta_max, size=n_features) spin = rd.choice([-1, 1], n_features) center += jump * spin signal[ind] = center if noise_std is not None: noise = rd.normal(size=signal.shape) * noise_std signal = signal + noise return signal, bkps
Example #21
Source File: ray_source.py From pyoptools with GNU General Public License v3.0 | 5 votes |
def point_source_r(origin=(0.,0.,0.),direction=(0.,0.,0),span=pi/8,num_rays=100,wavelength=0.58929, label=""): """Point source, with a ranrom beam distribution This function creates a point source, where the rays are organized in a random grid. Parameters: *origin* Tuple with the coordinates of the central ray origin *direction* Tuple with the rotation of the beam around the XYZ axes. *span* Tuple angular size of the ray pencil. *num_rays* Number of rays used to create the beam *label* String used to identify the ray source """ ret_val=[] for n_ in range(num_rays): rx=normal(0,span) ry=normal(0,span) temp_ray=Ray(pos=(0,0,0),dir=(0,0,1),wavelength=wavelength, label=label).ch_coord_sys_inv((0,0,0),(rx,ry,0)) ret_val.append(temp_ray.ch_coord_sys_inv(origin,direction)) return ret_val
Example #22
Source File: base.py From gwin with GNU General Public License v3.0 | 5 votes |
def random_data(self): from numpy.random import normal from pycbc.types import FrequencySeries return FrequencySeries(normal(size=self.nsamp).astype(complex), epoch=self.epoch, delta_f=self.delta_f)
Example #23
Source File: test_matrix.py From PyXtal with MIT License | 5 votes |
def random_strain(): a, b, c = 0, 0, 0 while (a<30*deg or b<30*deg or c<30*deg or a>150*deg or b>150*deg or c>150*deg): mat = strain_matrix(normal(scale=0.1),normal(scale=0.1),normal(scale=0.1)) a, b, c = alpha(mat), beta(mat), gamma(mat) return mat
Example #24
Source File: blas.py From pyculib with BSD 2-Clause "Simplified" License | 5 votes |
def create_array(dtype, shape, slices=None, empty=False): """Create a test array of the given dtype and shape. if slices is given, the returned array aliases a bigger parent array using the specified start and step values. (The stop member is expected to be appropriate to yield the given length.)""" from numpy.random import normal, seed seed(1234) def total_size(s): # this function doesn't support slices whose members are 'None' return s.start + (s.stop - s.start)*np.abs(s.step) if not slices: a = np.empty(dtype=dtype, shape=shape) else: if type(shape) is not tuple: # 1D pshape = total_size(slices) else: pshape = tuple([total_size(s) for s in slices]) parent = np.empty(dtype=dtype, shape=pshape) a = parent[slices] if not empty: mult = np.array(1, dtype=dtype) a[:] = normal(0.,1.,shape).astype(dtype) * mult return a
Example #25
Source File: pso.py From xfoil-optimization-toolbox with The Unlicense | 5 votes |
def APSO(self, global_best, B, a): '''A simplified way of PSO, with no velocity, updating the particle in one step. http://arxiv.org/pdf/1203.6577.pdf Typically, a = 0.1L ~ 0.5L where L is the scale of each variable, while B = 0.1 ~ 0.7 is sufficient for most applications''' self.oldpts = copy(self.pts) self.oldspds = copy(self.spds) for i, pt in enumerate(self.pts): mu, sigma = 0, 1 e = normal(mu, sigma) c = self.constraints[i] L = abs(c[1]-c[0]) self.pts[i] = (1-B)*L*pt + B*L*global_best[i] + a*L*e self._boundpts()
Example #26
Source File: test.py From pregel with MIT License | 5 votes |
def _test_distribution_comparison(): ''' Method to test the _comparE_gaussian function ''' size = 100 dist1 = normal(loc=0, scale=1, size=size) dist2 = normal(loc=0.1, scale=0.9, size=size) assert ptest._compare_distributions(dist1, dist2) == True, "The input distributions are similar." dist2 = uniform(low=-1, high=-1, size=size) assert ptest._compare_gaussians(dist1, dist2) == False, "The input distributions are not similar."
Example #27
Source File: test.py From pregel with MIT License | 5 votes |
def _test_normality(): ''' Method to test the _check_normality function ''' size = 100 dist = normal(loc = 0, scale = 1, size = size) assert ptest._check_normality(dist) == True, "The input distribution is gaussian." dist = uniform(low = -1, high=1, size=size) assert ptest._check_normality(dist) == False, "The input distribution is not gaussian."
Example #28
Source File: utils.py From generalize-unseen-domains with MIT License | 5 votes |
def sample_Z(m, n, mode='uniform'): if mode=='uniform': return npr.uniform(-1., 1., size=[m, n]) if mode=='gaussian': return np.clip(npr.normal(0,0.1,(m,n)),-1,1)
Example #29
Source File: chapter-6.py From Hands-on-Data-Visualization-with-Bokeh with MIT License | 5 votes |
def callback(attr, old, new): if select_widget.value == 'uniform distribution': function = random else: function = normal data_points.data = {'x': function(size = initial_points), 'y': function(size = initial_points)}
Example #30
Source File: read_simulator.py From rmats2sashimiplot with GNU General Public License v2.0 | 5 votes |
def sample_normal_frag_len(frag_mean, frag_variance): """ Sample a fragment length from a rounded 'discretized' normal distribution. """ frag_len = round(normal(frag_mean, sqrt(frag_variance))) return frag_len