Python numpy.triu_indices_from() Examples
The following are 30
code examples of numpy.triu_indices_from().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
numpy
, or try the search function
.
Example #1
Source File: resnet.py From DeepMind-alphafold-repl with BSD 3-Clause "New" or "Revised" License | 6 votes |
def predict(self, output_dir, model_path): x1d, x2d, name, size, iterator = self.build_input_test() preds, logits = self.resn(x1d, x2d) saver = tf.train.Saver() saver.restore(self.sess, model_path) self.sess.run(iterator.initializer, feed_dict={self.input_tfrecord_files:self.dataset.get_chunks(RunMode.TEST)}) while True: try: preds_, names_, sizes_, = self.sess.run([preds, name, size]) for pred_, name_, size_ in zip(preds_, names_, sizes_): pred_ = pred_[:size_, :size_] #inds = np.triu_indices_from(pred_, k=1) #pred_[(inds[1], inds[0])] = pred_[inds] #pred_ = (pred_ + np.transpose(pred_)) / 2.0 output_path = '{}/{}.concat'.format(output_dir, name_) np.savetxt(output_path, pred_) except tf.errors.OutOfRangeError: break
Example #2
Source File: transform.py From skl-groups with BSD 3-Clause "New" or "Revised" License | 6 votes |
def fit(self, X, y=None): ''' If scale_by_median, find :attr:`median_`; otherwise, do nothing. Parameters ---------- X : array The raw pairwise distances. ''' X = check_array(X) if self.scale_by_median: self.median_ = np.median(X[np.triu_indices_from(X, k=1)], overwrite_input=True) elif hasattr(self, 'median_'): del self.median_ return self
Example #3
Source File: test_transforms.py From skl-groups with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_rbfize(): X = np.random.normal(size=(20, 4)) dists = euclidean_distances(X) median = np.median(dists[np.triu_indices_from(dists, k=1)]) rbf = RBFize(gamma=.25) res = rbf.fit_transform(dists) assert not hasattr(res, 'median_') assert np.allclose(res, np.exp(-.25 * dists ** 2)) rbf = RBFize(gamma=.25, squared=True) res = rbf.fit_transform(dists) assert np.allclose(res, np.exp(-.25 * dists)) rbf = RBFize(gamma=4, scale_by_median=True) res = rbf.fit_transform(dists) assert np.allclose(rbf.median_, median) assert np.allclose(res, np.exp((-4 * median**2) * dists ** 2)) rbf = RBFize(gamma=4, scale_by_median=True, squared=True) res = rbf.fit_transform(dists) assert np.allclose(rbf.median_, median) assert np.allclose(res, np.exp((-4 * median) * dists))
Example #4
Source File: abide_utils.py From gcn_metric_learning with MIT License | 6 votes |
def get_net_vectors(subject_list, kind, atlas_name="aal"): """ subject_list : the subject short IDs list kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation atlas_name : name of the atlas used returns: matrix : matrix of connectivity vectors (num_subjects x num_connections) """ # This is an alternative implementation networks = load_all_networks(subject_list, kind, atlas_name=atlas_name) # Get Fisher transformed matrices norm_networks = [np.arctanh(mat) for mat in networks] # Get upper diagonal indices idx = np.triu_indices_from(norm_networks[0], 1) # Get vectorised matrices vec_networks = [mat[idx] for mat in norm_networks] # Each subject should be a row of the matrix matrix = np.vstack(vec_networks) return matrix
Example #5
Source File: utils.py From brainiak with Apache License 2.0 | 6 votes |
def from_sym_2_tri(symm): """convert a 2D symmetric matrix to an upper triangular matrix in 1D format Parameters ---------- symm : 2D array Symmetric matrix Returns ------- tri: 1D array Contains elements of upper triangular matrix """ inds = np.triu_indices_from(symm) tri = symm[inds] return tri
Example #6
Source File: mutations.py From msprime with GNU General Public License v3.0 | 6 votes |
def __init__( self, relative_rates, equilibrium_frequencies=None, root_distribution=None ): alleles = _ACGT_ALLELES assert len(relative_rates) == 6 if equilibrium_frequencies is None: equilibrium_frequencies = [0.25, 0.25, 0.25, 0.25] if root_distribution is None: root_distribution = equilibrium_frequencies transition_matrix = np.zeros((4, 4)) # relative_rates: [A->C, A->G,A->T,C->G,C->T,G->T] tri_upper = np.triu_indices_from(transition_matrix, k=1) transition_matrix[tri_upper] = relative_rates transition_matrix += transition_matrix.T transition_matrix *= equilibrium_frequencies row_sums = transition_matrix.sum(axis=1) transition_matrix = transition_matrix / max(row_sums) row_sums = transition_matrix.sum(axis=1, dtype="float64") np.fill_diagonal(transition_matrix, 1.0 - row_sums) super().__init__(alleles, root_distribution, transition_matrix)
Example #7
Source File: hicFindTADs.py From HiCExplorer with GNU General Public License v3.0 | 6 votes |
def get_triangle(hic_matrix, cut, window_len, return_mean=False): """ like get_cut_weight which is the 'diamond' representing the counts from a region -window_len to +window_len from the given bin position (cut): """ if cut < 0 or cut > hic_matrix.matrix.shape[0]: return None left_idx, right_idx = get_idx_of_bins_at_given_distance(hic_matrix, cut, window_len) def remove_lower_triangle(matrix): """ remove all values in the lower triangle of a matrix """ return matrix[np.triu_indices_from(matrix)].A1 edges_left = remove_lower_triangle(hic_matrix.matrix[left_idx:cut, :][:, left_idx:cut].todense()) edges_right = remove_lower_triangle(hic_matrix.matrix[cut:right_idx, :][:, cut:right_idx].todense()) # if cut > 1000: # import ipdb;ipdb.set_trace() return np.concatenate([edges_left, edges_right])
Example #8
Source File: tangentspace.py From decoding-brain-challenge-2016 with BSD 3-Clause "New" or "Revised" License | 6 votes |
def tangent_space(covmats, Cref): """Project a set of covariance matrices in the tangent space according to the given reference point Cref :param covmats: Covariance matrices set, Ntrials X Nchannels X Nchannels :param Cref: The reference covariance matrix :returns: the Tangent space , a matrix of Ntrials X (Nchannels*(Nchannels+1)/2) """ Nt, Ne, Ne = covmats.shape Cm12 = invsqrtm(Cref) idx = numpy.triu_indices_from(Cref) T = numpy.empty((Nt, Ne * (Ne + 1) / 2)) coeffs = ( numpy.sqrt(2) * numpy.triu( numpy.ones( (Ne, Ne)), 1) + numpy.eye(Ne))[idx] for index in range(Nt): tmp = numpy.dot(numpy.dot(Cm12, covmats[index, :, :]), Cm12) tmp = logm(tmp) T[index, :] = numpy.multiply(coeffs, tmp[idx]) return T
Example #9
Source File: tangentspace.py From decoding-brain-challenge-2016 with BSD 3-Clause "New" or "Revised" License | 6 votes |
def untangent_space(T, Cref): """Project a set of Tangent space vectors in the manifold according to the given reference point Cref :param T: the Tangent space , a matrix of Ntrials X (Nchannels*(Nchannels+1)/2) :param Cref: The reference covariance matrix :returns: A set of Covariance matrix, Ntrials X Nchannels X Nchannels """ Nt, Nd = T.shape Ne = int((numpy.sqrt(1 + 8 * Nd) - 1) / 2) C12 = sqrtm(Cref) idx = numpy.triu_indices_from(Cref) covmats = numpy.empty((Nt, Ne, Ne)) covmats[:, idx[0], idx[1]] = T for i in range(Nt): covmats[i] = numpy.diag(numpy.diag(covmats[i])) + numpy.triu( covmats[i], 1) / numpy.sqrt(2) + numpy.triu(covmats[i], 1).T / numpy.sqrt(2) covmats[i] = expm(covmats[i]) covmats[i] = numpy.dot(numpy.dot(C12, covmats[i]), C12) return covmats
Example #10
Source File: resnet.py From DeepFolding with BSD 3-Clause "New" or "Revised" License | 6 votes |
def predict(self, output_dir, model_path): x1d, x2d, name, size, iterator = self.build_input_test() preds, logits = self.resn(x1d, x2d) saver = tf.train.Saver() saver.restore(self.sess, model_path) self.sess.run(iterator.initializer, feed_dict={self.input_tfrecord_files:self.dataset.get_chunks(RunMode.TEST)}) while True: try: preds_, names_, sizes_, = self.sess.run([preds, name, size]) for pred_, name_, size_ in zip(preds_, names_, sizes_): pred_ = pred_[:size_, :size_] #inds = np.triu_indices_from(pred_, k=1) #pred_[(inds[1], inds[0])] = pred_[inds] #pred_ = (pred_ + np.transpose(pred_)) / 2.0 output_path = '{}/{}.concat'.format(output_dir, name_) np.savetxt(output_path, pred_) except tf.errors.OutOfRangeError: break
Example #11
Source File: coulomb_matrices.py From deepchem with MIT License | 6 votes |
def _featurize(self, mol): """ Calculate Coulomb matrices for molecules. If extra randomized matrices are generated, they are treated as if they are features for additional conformers. Since Coulomb matrices are symmetric, only the (flattened) upper triangular portion is returned. Parameters ---------- mol : RDKit Mol Molecule. """ features = self.coulomb_matrix(mol) if self.upper_tri: features = [f[np.triu_indices_from(f)] for f in features] features = np.asarray(features) return features
Example #12
Source File: sdm.py From scedar with MIT License | 5 votes |
def num_correct_dist_mat(dmat, upper_bound=None): if (not isinstance(dmat, np.ndarray) or dmat.ndim != 2 or dmat.shape[0] != dmat.shape[1]): # check distance matrix shape raise ValueError("dmat must be a 2D (n_samples, n_samples)" " np array") try: # Distance matrix diag vals should be close to 0. np.testing.assert_allclose(dmat[np.diag_indices(dmat.shape[0])], 0, atol=1e-5) except AssertionError as e: warnings.warn("distance matrix might not be numerically " "correct. diag vals " "should be close to 0. {}".format(e)) try: # distance matrix should be approximately symmetric np.testing.assert_allclose(dmat[np.triu_indices_from(dmat)], dmat.T[np.triu_indices_from(dmat)], rtol=0.001) except AssertionError as e: warnings.warn("distance matrix might not be numerically " "correct. should be approximately " "symmetric. {}".format(e)) dmat[dmat < 0] = 0 dmat[np.diag_indices(dmat.shape[0])] = 0 if upper_bound is not None: upper_bound = float(upper_bound) dmat[dmat > upper_bound] = upper_bound dmat[np.triu_indices_from(dmat)] = dmat.T[np.triu_indices_from(dmat)] return dmat
Example #13
Source File: generator.py From e3fp with GNU Lesser General Public License v3.0 | 5 votes |
def generate_conformers(self, mol): """Generate conformers for a molecule. Parameters ---------- mol : RDKit Mol Molecule. Returns ------- RDKit Mol : copy of the input molecule with embedded conformers """ # initial embedding mol = self.embed_molecule(mol) if not mol.GetNumConformers(): msg = "No conformers generated for molecule" if mol.HasProp("_Name"): name = mol.GetProp("_Name") msg += ' "{}".'.format(name) else: msg += "." raise RuntimeError(msg) # minimization and filtering self.minimize_conformers(mol) mol, indices, energies, rmsds = self.filter_conformers(mol) if self.store_energies: add_conformer_energies_to_mol(mol, energies) if self.get_values is True: if self.sparse_rmsd: rmsds_mat = rmsds[np.triu_indices_from(rmsds, k=1)] else: rmsds_mat = rmsds return mol, (self.max_conformers, indices, energies, rmsds_mat) else: return mol
Example #14
Source File: utils.py From pymer4 with MIT License | 5 votes |
def upper(mat): """ Return upper triangle of matrix. Useful for grabbing unique values from a symmetric matrix. Args: mat (np.ndarray): 2d numpy array Returns: np.array: 1d numpy array of values """ idx = np.triu_indices_from(mat, k=1) return mat[idx]
Example #15
Source File: test_models.py From graspy with Apache License 2.0 | 5 votes |
def _test_score(estimator, p_mat, graph): np.random.seed(8888) graph = graph.copy() p_mat = p_mat.copy() estimator.fit(graph) estimator.p_mat_ = p_mat # hack just for testing likelihood if is_symmetric(graph): inds = np.triu_indices_from(graph, k=1) else: xu, yu = np.triu_indices_from(graph, k=1) xl, yl = np.tril_indices_from(graph, k=-1) x = np.concatenate((xl, xu)) y = np.concatenate((yl, yu)) inds = (x, y) p_rav = p_mat[inds] g_rav = graph[inds] lik = np.zeros(g_rav.shape) c = 1 / p_mat.size for i, (g, p) in enumerate(zip(g_rav, p_rav)): if p < c: p = c if p > 1 - c: p = 1 - c if g == 1: lik[i] = p else: lik[i] = 1 - p # lik = np.reshape(lik_rav, p_mat.shape) lik[lik < 1e-10] = 1 lik = np.log(lik) assert_allclose(lik, estimator.score_samples(graph)) assert np.sum(lik) == estimator.score(graph)
Example #16
Source File: mmd_util.py From klcpd_code with BSD 3-Clause "New" or "Revised" License | 5 votes |
def median_heuristic(X, beta=0.5): max_n = min(30000, X.shape[0]) D2 = euclidean_distances(X[:max_n], squared=True) med_sqdist = np.median(D2[np.triu_indices_from(D2, k=1)]) beta_list = [beta**2, beta**1, 1, (1.0/beta)**1, (1.0/beta)**2] return [med_sqdist * b for b in beta_list] # X_p_enc: batch_size x seq_len x hid_dim # X_f_enc: batch_size x seq_len x hid_dim # hid_dim could be either dataspace_dim or codespace_dim # return: MMD2(X_p_enc[i,:,:], X_f_enc[i,:,:]) for i = 1:batch_size
Example #17
Source File: test_quantity_non_ufuncs.py From Carnets with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_triu_indices_from(self): self.check(np.triu_indices_from)
Example #18
Source File: median_heuristic.py From klcpd_code with BSD 3-Clause "New" or "Revised" License | 5 votes |
def median_heuristic(X, beta=0.5): max_n = min(30000, X.shape[0]) D2 = euclidean_distances(X[:max_n], squared=True) med_sqdist = np.median(D2[np.triu_indices_from(D2, k=1)]) beta_list = [beta**2, beta**1, 1, (1.0/beta)**1, (1.0/beta)**2] return [med_sqdist * b for b in beta_list]
Example #19
Source File: plot.py From toad with MIT License | 5 votes |
def corr_plot(frame, figure_size = (20, 15)): """plot for correlation Args: frame (DataFrame): frame to draw plot Returns: Axes """ corr = frame.corr() mask = np.zeros_like(corr, dtype = np.bool) mask[np.triu_indices_from(mask)] = True map_plot = tadpole.heatmap( corr, mask = mask, cmap = HEATMAP_CMAP, vmax = 1, vmin = -1, center = 0, square = True, cbar_kws = {"shrink": .5}, linewidths = .5, annot = True, fmt = '.2f', figure_size = figure_size, ) return map_plot
Example #20
Source File: mutinf.py From mdentropy with MIT License | 5 votes |
def _exec(self): M = np.zeros((self.labels.size, self.labels.size)) with closing(Pool(processes=self.n_threads)) as pool: values = pool.map(self._partial_mutinf, combinations(self.labels, 2)) pool.terminate() idx = np.triu_indices_from(M) M[idx] = values return M + M.T - np.diag(M.diagonal())
Example #21
Source File: algebra.py From pyblp with MIT License | 5 votes |
def vech(x: Array) -> Array: """Ravel the lower triangle of a square matrix A in Fortran order to construct vech(A).""" return x.T[np.triu_indices_from(x)]
Example #22
Source File: parameters.py From pyblp with MIT License | 5 votes |
def expand(self, theta_like: Array, nullify: bool = False) -> Tuple[Array, Array, Array, Array, Array]: """Recover matrices of the same size as parameter matrices from a vector of the same size as theta. By default, fill elements corresponding to fixed parameters with their fixed values. Always fill concentrated out parameters with nulls. """ sigma_like = np.full_like(self.sigma, np.nan) pi_like = np.full_like(self.pi, np.nan) rho_like = np.full_like(self.rho, np.nan) beta_like = np.full_like(self.beta, np.nan) gamma_like = np.full_like(self.gamma, np.nan) items = [ (SigmaParameter, sigma_like), (PiParameter, pi_like), (RhoParameter, rho_like), (BetaParameter, beta_like), (GammaParameter, gamma_like) ] # fill values of unfixed parameters for parameter, value in zip(self.unfixed, theta_like): for parameter_type, values in items: if isinstance(parameter, parameter_type): values[parameter.location] = value break # if they aren't null, fill values of fixed parameters if not nullify: sigma_like[np.triu_indices_from(sigma_like, 1)] = 0 for parameter in self.fixed: for parameter_type, values in items: if isinstance(parameter, parameter_type): values[parameter.location] = parameter.value break return sigma_like, pi_like, rho_like, beta_like, gamma_like
Example #23
Source File: fractal_correlation.py From NeuroKit with MIT License | 5 votes |
def _fractal_correlation_Corr_Dim(embedded, r_vals, dist): """References ----------- - `Corr_Dim <https://github.com/jcvasquezc/Corr_Dim>`_ """ ED = dist[np.triu_indices_from(dist, k=1)] Npairs = (len(embedded[1, :])) * ((len(embedded[1, :]) - 1)) corr = np.zeros(len(r_vals)) for i, r in enumerate(r_vals): N = np.where(((ED < r) & (ED > 0))) corr[i] = len(N[0]) / Npairs omit_pts = 1 k1 = omit_pts k2 = len(r_vals) - omit_pts r_vals = r_vals[k1:k2] corr = corr[k1:k2] return r_vals, corr # ============================================================================= # Utilities # =============================================================================
Example #24
Source File: viz.py From focus with GNU General Public License v3.0 | 5 votes |
def heatmap(wcor): """ Make a scatterplot of zscore values with gene names as xtick labels. :param wcor: numpy.ndarray matrix of sample correlation structure for predicted expression :return: numpy.ndarray (RGB) formatted heatmap of correlation structure """ mpl.rcParams["figure.figsize"] = [6.4, 6.4] fig = plt.figure() fig.subplots_adjust(bottom=0.20, left=0.28) mask = np.zeros_like(wcor, dtype=np.bool) mask[np.triu_indices_from(mask)] = True ax = sns.heatmap(wcor, mask=mask, cmap="RdBu_r", square=True, linewidths=0, cbar=False, xticklabels=False, yticklabels=False, ax=None, vmin=-1, vmax=1) ax.margins(2) ax.set_aspect("equal", "box") fig.canvas.draw() # save image as numpy array data = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep="") img = data.reshape(fig.canvas.get_width_height()[::-1] + (3,)) # rotate heatmap to make upside-down triangle shape rows, cols, ch = img.shape M = cv2.getRotationMatrix2D((cols / 2, rows / 2), 45, 1) dst = cv2.warpAffine(img, M, (cols, rows), borderMode=cv2.BORDER_CONSTANT, borderValue=(255, 255, 255)) # trim extra whitespace crop_img = dst[int(dst.shape[0] / 2.5):int(dst.shape[0] / 1.1)] return crop_img
Example #25
Source File: _namedtensor_test.py From OpenFermion with Apache License 2.0 | 5 votes |
def test_tensor_iterator(): a = np.arange(16).reshape((4, 4)) test_tensor = Tensor(tensor=a) assert np.allclose(test_tensor.data, a) assert test_tensor.size == 16 assert isinstance(test_tensor.basis, Bijection) a_triu = a[np.triu_indices_from(a)] a_tril = a[np.tril_indices_from(a)] counter = 0 for val, idx in test_tensor.utri_iterator(): assert val == a[tuple(idx)] assert val == a_triu[counter] counter += 1 assert counter == 4 * (4 + 1) / 2 counter = 0 for val, idx in test_tensor.ltri_iterator(): assert val == a[tuple(idx)] assert val == a_tril[counter] counter += 1 assert counter == 4 * (4 + 1) / 2 counter = 0 for val, idx in test_tensor.all_iterator(): assert val == a[tuple(idx)] counter += 1 assert np.allclose(test_tensor.vectorize(), a.reshape((-1, 1), order='C')) with pytest.raises(TypeError): list(test_tensor._iterator('blah'))
Example #26
Source File: scattergram.py From cmapPy with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _adjust_axes(g, font_properties={}): for i, j in zip(*np.triu_indices_from(g.axes, 1)): g.axes[i, j].set_visible(False) for i in range(g.axes.shape[0]): for j in range(g.axes.shape[1]): ax = g.axes[i, j] if i > j: ax.set_zorder(100) ax.set_xlim(-0.1, 1.1) ax.set_ylim(-0.1, 1.1) ax.set_ylabel('') ax.set_xlabel('') frame_line_width = 2 _set_axis_thickness(ax, frame_line_width) ax.xaxis.set_tick_params(width=frame_line_width) ax.yaxis.set_tick_params(width=frame_line_width) _set_ticks_fontproperties(ax, font_properties) for i in range(g.axes.shape[0]): ax = g.axes[i, i] ax.set_ylim(-0.1, 1.1) ax.set_xlim(-0.1, 1.1) ax.set_ylabel('') ax.set_xlabel('') ax.set_yticks([0, 0.5, 1]) ax.set_xticks([0, 0.5, 1]) _set_axis_thickness(ax, 1) _set_axis_style(ax, '--') _set_ticks_fontproperties(ax, font_properties)
Example #27
Source File: gromov_hausdorff.py From persim with MIT License | 5 votes |
def find_largest_size_bounded_curvature(DX, diam_X, d): """ Find a largest-size d-bounded curvature of metric space X induced by simple unweighted graph. Parameters ---------- DX: np.array (|X|×|X|) (Integer) distance matrix of X. diam_X: int Largest distance in X. Returns -------- K: np.array (n×n) d-bounded curvature of X of largest size; n ≤ |X|. """ # Initialize curvature K with the distance matrix of X. K = DX while np.any(K[np.triu_indices_from(K, 1)] < d): # Pick a row (and column) with highest number of off-diagonal # distances < d, then with smallest sum of off-diagonal # distances ≥ d. K_rows_sortkeys = -np.sum(K < d, axis=0) * (len(K) * diam_X) + \ np.sum(np.ma.masked_less(K, d), axis=0).data row_to_remove = np.argmin(K_rows_sortkeys) # Remove the row and column from K. K = np.delete(K, row_to_remove, axis=0) K = np.delete(K, row_to_remove, axis=1) return K
Example #28
Source File: standard.py From CatLearn with GNU General Public License v3.0 | 5 votes |
def bag_edges(self, atoms): """Returns the bag of connections, defined as counting connections between types of elements pairs. We define the bag as a vector, e.g. return [Number of C-H connections, # C-C, # C-O, ..., # M-X] Parameters ---------- atoms : object Returns ---------- features : list """ # range of element types n_elements = len(self.atom_types) if atoms is None: symbols = np.array([chemical_symbols[z] for z in self.atom_types]) rows, cols = np.meshgrid(symbols, symbols) pairs = np.core.defchararray.add(rows, cols) labels = ['bag_' + c for c in pairs[np.triu_indices_from(pairs)]] return labels else: # empty bag of bond types. boc = np.zeros([n_elements, n_elements]) natoms = len(atoms) cm = np.array(atoms.connectivity) np.fill_diagonal(cm, 0) bonds = np.where(np.ravel(np.triu(cm)) > 0)[0] for b in bonds: # Get bonded atomic numbers. z_row, z_col = np.unravel_index(b, [natoms, natoms]) bond_index = sorted((atoms.numbers[z_row], atoms.numbers[z_col])) bond_type = tuple((self.atom_types.index(bond_index[0]), self.atom_types.index(bond_index[1]))) # Count bonds in upper triangle. boc[bond_type] += 1 return boc[np.triu_indices_from(boc)].tolist()
Example #29
Source File: ABIDEParser.py From population-gcn with GNU General Public License v3.0 | 5 votes |
def get_networks(subject_list, kind, atlas_name="aal", variable='connectivity'): """ subject_list : list of subject IDs kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation atlas_name : name of the parcellation atlas used variable : variable name in the .mat file that has been used to save the precomputed networks return: matrix : feature matrix of connectivity networks (num_subjects x network_size) """ all_networks = [] for subject in subject_list: fl = os.path.join(data_folder, subject, subject + "_" + atlas_name + "_" + kind + ".mat") matrix = sio.loadmat(fl)[variable] all_networks.append(matrix) # all_networks=np.array(all_networks) idx = np.triu_indices_from(all_networks[0], 1) norm_networks = [np.arctanh(mat) for mat in all_networks] vec_networks = [mat[idx] for mat in norm_networks] matrix = np.vstack(vec_networks) return matrix # Construct the adjacency matrix of the population from phenotypic scores
Example #30
Source File: test_system_funcs.py From harold with MIT License | 5 votes |
def test_simple_hessenberg_trafo(): # Made up discrete time TF G = Transfer([1., -8., 28., -58., 67., -30.], poly([1, 2, 3., 2, 3., 4, 1 + 1j, 1 - 1j]), dt=0.1) H, _ = hessenberg_realization(G, compute_T=1, form='c', invert=1) assert_(not np.any(H.a[triu_indices_from(H.a, k=2)])) assert_(not np.any(H.b[:-1, 0])) H = hessenberg_realization(G, form='o', invert=1) assert_(not np.any(H.c[0, :-1])) assert_(not np.any(H.a.T[triu_indices_from(H.a, k=2)]))