Python numpy.arrays() Examples
The following are 30
code examples of numpy.arrays().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
numpy
, or try the search function
.
Example #1
Source File: utils.py From btgym with GNU Lesser General Public License v3.0 | 6 votes |
def as_array(struct): """ Given a dictionary of lists or tuples returns dictionary of np.arrays of same structure. Args: struct: dictionary of list, tuples etc. Returns: dict of np.arrays """ if isinstance(struct,dict): out = {} for key, value in struct.items(): out[key] = as_array(value) return out else: return np.asarray(struct)
Example #2
Source File: otf_parser.py From flare with MIT License | 6 votes |
def parse_frame_line(frame_line): """parse a line in otf output. :param frame_line: frame line to be parsed :type frame_line: string :return: species, position, force, uncertainty, and velocity of atom :rtype: list, np.arrays """ frame_line = frame_line.split() spec = str(frame_line[0]) position = np.array([float(n) for n in frame_line[1:4]]) force = np.array([float(n) for n in frame_line[4:7]]) uncertainty = np.array([float(n) for n in frame_line[7:10]]) velocity = np.array([float(n) for n in frame_line[10:13]]) return spec, position, force, uncertainty, velocity
Example #3
Source File: ogb.py From spektral with MIT License | 6 votes |
def graph_to_numpy(graph, dtype=None): """ Converts a graph in OGB's library-agnostic format to a representation in Numpy/Scipy. See the [Open Graph Benchmark's website](https://ogb.stanford.edu) for more information. :param graph: OGB library-agnostic graph; :param dtype: if set, all output arrays will be cast to this dtype. :return: - X: np.array of shape (N, F) with the node features; - A: scipy.sparse adjacency matrix of shape (N, N) in COOrdinate format; - E: if edge features are available, np.array of shape (n_edges, S), `None` otherwise. """ N = graph['num_nodes'] X = graph['node_feat'].astype(dtype) row, col = graph['edge_index'] A = sp.coo_matrix((np.ones_like(row), (row, col)), shape=(N, N)).astype(dtype) E = graph['edge_feat'].astype(dtype) return X, A, E
Example #4
Source File: triinterpolate.py From neural-network-animation with MIT License | 6 votes |
def get_function_hessians(self, alpha, J, ecc, dofs): """ Parameters ---------- *alpha* is a (N x 3 x 1) array (array of column-matrices) of barycentric coordinates *J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at triangle first apex) *ecc* is a (N x 3 x 1) array (array of column-matrices) of triangle eccentricities *dofs* is a (N x 1 x 9) arrays (arrays of row-matrices) of computed degrees of freedom. Returns ------- Returns the values of interpolated function 2nd-derivatives [d2z/dx2, d2z/dy2, d2z/dxdy] in global coordinates at locations alpha, as a column-matrices of shape (N x 3 x 1). """ d2sdksi2 = self.get_d2Sidksij2(alpha, ecc) d2fdksi2 = _prod_vectorized(dofs, d2sdksi2) H_rot = self.get_Hrot_from_J(J) d2fdx2 = _prod_vectorized(d2fdksi2, H_rot) return _transpose_vectorized(d2fdx2)
Example #5
Source File: automobile_data.py From yolo_v2 with Apache License 2.0 | 6 votes |
def make_dataset(x, y=None): """Create a slice Dataset from a pandas DataFrame and labels""" # TODO(markdaooust): simplify this after the 1.4 cut. # Convert the DataFrame to a dict x = dict(x) # Convert the pd.Series to np.arrays for key in x: x[key] = np.array(x[key]) items = [x] if y is not None: items.append(np.array(y, dtype=np.float32)) # Create a Dataset of slices return tf.data.Dataset.from_tensor_slices(tuple(items))
Example #6
Source File: test_testing.py From vnpy_crypto with MIT License | 6 votes |
def test_numpy_array_equal_unicode_message(self): # Test ensures that `assert_numpy_array_equals` raises the right # exception when comparing np.arrays containing differing # unicode objects (#20503) expected = """numpy array are different numpy array values are different \\(33\\.33333 %\\) \\[left\\]: \\[á, à, ä\\] \\[right\\]: \\[á, à, å\\]""" with tm.assert_raises_regex(AssertionError, expected): assert_numpy_array_equal(np.array([u'á', u'à', u'ä']), np.array([u'á', u'à', u'å'])) with tm.assert_raises_regex(AssertionError, expected): assert_almost_equal(np.array([u'á', u'à', u'ä']), np.array([u'á', u'à', u'å']))
Example #7
Source File: glove_preprocessor.py From interpret-text with MIT License | 6 votes |
def generate_tokens(self, text: str) -> List[Any]: """ Split text into padded lists of tokens that are part of the recognized vocabulary :param text: a piece of text (e.g. a sentence) :type text: str :return: indexed_text (np.array): the token/vocabulary indices of recognized words in text, padded to the maximum sentence length mask (np.array): a mask indicating which indices in indexed_text correspond to words (1s) and which correspond to pads (0s) :rtype: tuple (of np.arrays) """ indexed_text = [ self.word_vocab[word] if ((word in self.counts) and (self.counts[word] > self.count_threshold)) else self.word_vocab[UNK] for word in text.split() ] pad_length = max((self.token_cutoff - len(indexed_text)), 0) mask = [1] * min(len(indexed_text), self.token_cutoff) + [0] * pad_length indexed_text = indexed_text[0:self.token_cutoff] + [self.word_vocab[PAD]] * pad_length return [np.array(indexed_text), np.array(mask)]
Example #8
Source File: triinterpolate.py From neural-network-animation with MIT License | 6 votes |
def _interpolate_single_key(self, return_key, tri_index, x, y): """ Performs the interpolation at points belonging to the triangulation (inside an unmasked triangles). Parameters ---------- return_index : string key from {'z', 'dzdx', 'dzdy'} Identifies the requested values (z or its derivatives) tri_index : 1d integer array Valid triangle index (-1 prohibited) x, y : 1d arrays, same shape as `tri_index` Valid locations where interpolation is requested. Returns ------- ret : 1-d array Returned array of the same size as *tri_index* """ raise NotImplementedError("TriInterpolator subclasses" + "should implement _interpolate_single_key!")
Example #9
Source File: triinterpolate.py From Computable with MIT License | 6 votes |
def _interpolate_single_key(self, return_key, tri_index, x, y): """ Performs the interpolation at points belonging to the triangulation (inside an unmasked triangles). Parameters ---------- return_index : string key from {'z', 'dzdx', 'dzdy'} Identifies the requested values (z or its derivatives) tri_index : 1d integer array Valid triangle index (-1 prohibited) x, y : 1d arrays, same shape as `tri_index` Valid locations where interpolation is requested. Returns ------- ret : 1-d array Returned array of the same size as *tri_index* """ raise NotImplementedError("TriInterpolator subclasses" + "should implement _interpolate_single_key!")
Example #10
Source File: atoms.py From pyiron with BSD 3-Clause "New" or "Revised" License | 6 votes |
def write(self, filename, format=None, **kwargs): """ Write atoms object to a file. see ase.io.write for formats. kwargs are passed to ase.io.write. Args: filename: format: **kwargs: Returns: """ from ase.io import write atoms = self.copy() atoms.arrays["positions"] = atoms.positions write(filename, atoms, format, **kwargs)
Example #11
Source File: triinterpolate.py From Computable with MIT License | 6 votes |
def get_function_hessians(self, alpha, J, ecc, dofs): """ Parameters ---------- *alpha* is a (N x 3 x 1) array (array of column-matrices) of barycentric coordinates *J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at triangle first apex) *ecc* is a (N x 3 x 1) array (array of column-matrices) of triangle eccentricities *dofs* is a (N x 1 x 9) arrays (arrays of row-matrices) of computed degrees of freedom. Returns ------- Returns the values of interpolated function 2nd-derivatives [d2z/dx2, d2z/dy2, d2z/dxdy] in global coordinates at locations alpha, as a column-matrices of shape (N x 3 x 1). """ d2sdksi2 = self.get_d2Sidksij2(alpha, ecc) d2fdksi2 = _prod_vectorized(dofs, d2sdksi2) H_rot = self.get_Hrot_from_J(J) d2fdx2 = _prod_vectorized(d2fdksi2, H_rot) return _transpose_vectorized(d2fdx2)
Example #12
Source File: atoms.py From pyiron with BSD 3-Clause "New" or "Revised" License | 6 votes |
def group_points_by_symmetry(self, points): """ This function classifies the points into groups according to the box symmetry given by spglib. Args: points: (np.array/list) nx3 array which contains positions Returns: list of arrays containing geometrically equivalent positions It is possible that the original points are not found in the returned list, as the positions outsie the box will be projected back to the box. """ struct_copy = self.copy() points = np.array(points).reshape(-1, 3) struct_copy += Atoms(elements=len(points) * ["Hs"], positions=points) struct_copy.center_coordinates_in_unit_cell() group_IDs = struct_copy.get_symmetry()["equivalent_atoms"][ struct_copy.select_index("Hs") ] return [ np.round(points[group_IDs == ID], decimals=8) for ID in np.unique(group_IDs) ]
Example #13
Source File: __init__.py From scanpy with BSD 3-Clause "New" or "Revised" License | 6 votes |
def compute_neighbors_rapids( X: np.ndarray, n_neighbors: int ): """Compute nearest neighbors using RAPIDS cuml. Parameters ---------- X: array of shape (n_samples, n_features) The data to compute nearest neighbors for. n_neighbors The number of neighbors to use. Returns ------- **knn_indices**, **knn_dists** : np.arrays of shape (n_observations, n_neighbors) """ from cuml.neighbors import NearestNeighbors nn = NearestNeighbors(n_neighbors=n_neighbors) X_contiguous = np.ascontiguousarray(X, dtype=np.float32) nn.fit(X_contiguous) knn_distsq, knn_indices = nn.kneighbors(X_contiguous) return knn_indices, np.sqrt(knn_distsq) # cuml uses sqeuclidean metric so take sqrt
Example #14
Source File: triinterpolate.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def _interpolate_single_key(self, return_key, tri_index, x, y): """ Performs the interpolation at points belonging to the triangulation (inside an unmasked triangles). Parameters ---------- return_index : string key from {'z', 'dzdx', 'dzdy'} Identifies the requested values (z or its derivatives) tri_index : 1d integer array Valid triangle index (-1 prohibited) x, y : 1d arrays, same shape as `tri_index` Valid locations where interpolation is requested. Returns ------- ret : 1-d array Returned array of the same size as *tri_index* """ raise NotImplementedError("TriInterpolator subclasses" + "should implement _interpolate_single_key!")
Example #15
Source File: triinterpolate.py From matplotlib-4-abaqus with MIT License | 6 votes |
def get_function_hessians(self, alpha, J, ecc, dofs): """ Parameters ---------- *alpha* is a (N x 3 x 1) array (array of column-matrices) of barycentric coordinates *J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at triangle first apex) *ecc* is a (N x 3 x 1) array (array of column-matrices) of triangle eccentricities *dofs* is a (N x 1 x 9) arrays (arrays of row-matrices) of computed degrees of freedom. Returns ------- Returns the values of interpolated function 2nd-derivatives [d2z/dx2, d2z/dy2, d2z/dxdy] in global coordinates at locations alpha, as a column-matrices of shape (N x 3 x 1). """ d2sdksi2 = self.get_d2Sidksij2(alpha, ecc) d2fdksi2 = _prod_vectorized(dofs, d2sdksi2) H_rot = self.get_Hrot_from_J(J) d2fdx2 = _prod_vectorized(d2fdksi2, H_rot) return _transpose_vectorized(d2fdx2)
Example #16
Source File: triinterpolate.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def get_function_hessians(self, alpha, J, ecc, dofs): """ Parameters ---------- *alpha* is a (N x 3 x 1) array (array of column-matrices) of barycentric coordinates *J* is a (N x 2 x 2) array of jacobian matrices (jacobian matrix at triangle first apex) *ecc* is a (N x 3 x 1) array (array of column-matrices) of triangle eccentricities *dofs* is a (N x 1 x 9) arrays (arrays of row-matrices) of computed degrees of freedom. Returns ------- Returns the values of interpolated function 2nd-derivatives [d2z/dx2, d2z/dy2, d2z/dxdy] in global coordinates at locations alpha, as a column-matrices of shape (N x 3 x 1). """ d2sdksi2 = self.get_d2Sidksij2(alpha, ecc) d2fdksi2 = _prod_vectorized(dofs, d2sdksi2) H_rot = self.get_Hrot_from_J(J) d2fdx2 = _prod_vectorized(d2fdksi2, H_rot) return _transpose_vectorized(d2fdx2)
Example #17
Source File: triinterpolate.py From matplotlib-4-abaqus with MIT License | 6 votes |
def _interpolate_single_key(self, return_key, tri_index, x, y): """ Performs the interpolation at points belonging to the triangulation (inside an unmasked triangles). Parameters ---------- return_index : string key from {'z', 'dzdx', 'dzdy'} Identifies the requested values (z or its derivatives) tri_index : 1d integer array Valid triangle index (-1 prohibited) x, y : 1d arrays, same shape as `tri_index` Valid locations where interpolation is requested. Returns ------- ret : 1-d array Returned array of the same size as *tri_index* """ raise NotImplementedError("TriInterpolator subclasses" + "should implement _interpolate_single_key!")
Example #18
Source File: triinterpolate.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def _to_matrix_vectorized(M): """ Builds an array of matrices from individuals np.arrays of identical shapes. *M*: ncols-list of nrows-lists of shape sh. Returns M_res np.array of shape (sh, nrow, ncols) so that: M_res[...,i,j] = M[i][j] """ assert isinstance(M, (tuple, list)) assert all(isinstance(item, (tuple, list)) for item in M) c_vec = np.asarray([len(item) for item in M]) assert np.all(c_vec-c_vec[0] == 0) r = len(M) c = c_vec[0] M00 = np.asarray(M[0][0]) dt = M00.dtype sh = [M00.shape[0], r, c] M_ret = np.empty(sh, dtype=dt) for irow in range(r): for icol in range(c): M_ret[:, irow, icol] = np.asarray(M[irow][icol]) return M_ret
Example #19
Source File: atoms.py From pyiron with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _check_if_simple_atoms(atoms): """ Raise a warning if the ASE atoms object includes properties which can not be converted to pymatgen atoms. Args: atoms: ASE atoms object """ dict_keys = [ k for k in atoms.__dict__.keys() if k not in ["_celldisp", "arrays", "_cell", "_pbc", "_constraints", "info", "_calc"] ] array_keys = [ k for k in atoms.__dict__["arrays"].keys() if k not in ["numbers", "positions"] ] if not len(dict_keys) == 0: warnings.warn("Found unknown keys: " + str(dict_keys)) if not np.all(atoms.__dict__["_celldisp"] == np.array([[0.0], [0.0], [0.0]])): warnings.warn("Found cell displacement: " + str(atoms.__dict__["_celldisp"])) if not atoms.__dict__["_calc"] is None: warnings.warn("Found calculator: " + str(atoms.__dict__["_calc"])) if not atoms.__dict__["_constraints"] == []: warnings.warn("Found constraint: " + str(atoms.__dict__["_constraints"])) if not np.all(atoms.__dict__["_pbc"]): warnings.warn("Cell is not periodic: " + str(atoms.__dict__["_pbc"])) if not len(array_keys) == 0: warnings.warn("Found unknown flags: " + str(array_keys)) if not atoms.__dict__["info"] == dict(): warnings.warn("Info is not empty: " + str(atoms.__dict__["info"]))
Example #20
Source File: vtk.py From AeroPy with MIT License | 5 votes |
def generate_points(data, filename): # TODO: Still cannot visualize points well from evtk.hl import pointsToVTK x, y, z = data.T # Sometimes np.arrays could have manipulated to no longer # be c-continuous os we have to impose it x = np.ascontiguousarray(x) y = np.ascontiguousarray(y) z = np.ascontiguousarray(z) pointsToVTK(filename, x, y, z)
Example #21
Source File: triinterpolate.py From neural-network-animation with MIT License | 5 votes |
def _prod_vectorized(M1, M2): """ Matrix product between arrays of matrices, or a matrix and an array of matrices (*M1* and *M2*) """ sh1 = M1.shape sh2 = M2.shape assert len(sh1) >= 2 assert len(sh2) >= 2 assert sh1[-1] == sh2[-2] ndim1 = len(sh1) t1_index = list(xrange(ndim1-2)) + [ndim1-1, ndim1-2] return np.sum(np.transpose(M1, t1_index)[..., np.newaxis] * M2[..., np.newaxis, :], -3)
Example #22
Source File: triinterpolate.py From matplotlib-4-abaqus with MIT License | 5 votes |
def _prod_vectorized(M1, M2): """ Matrix product between arrays of matrices, or a matrix and an array of matrices (*M1* and *M2*) """ sh1 = M1.shape sh2 = M2.shape assert len(sh1) >= 2 assert len(sh2) >= 2 assert sh1[-1] == sh2[-2] ndim1 = len(sh1) t1_index = range(ndim1-2) + [ndim1-1, ndim1-2] return np.sum(np.transpose(M1, t1_index)[..., np.newaxis] * M2[..., np.newaxis, :], -3)
Example #23
Source File: triinterpolate.py From matplotlib-4-abaqus with MIT License | 5 votes |
def _safe_inv22_vectorized(M): """ Inversion of arrays of (2,2) matrices, returns 0 for rank-deficient matrices. *M* : array of (2,2) matrices to inverse, shape (n,2,2) """ assert M.ndim == 3 assert M.shape[-2:] == (2, 2) M_inv = np.empty_like(M) prod1 = M[:, 0, 0]*M[:, 1, 1] delta = prod1 - M[:, 0, 1]*M[:, 1, 0] # We set delta_inv to 0. in case of a rank deficient matrix ; a # rank-deficient input matrix *M* will lead to a null matrix in output rank2 = (np.abs(delta) > 1e-8*np.abs(prod1)) if np.all(rank2): # Normal 'optimized' flow. delta_inv = 1./delta else: # 'Pathologic' flow. delta_inv = np.zeros(M.shape[0]) delta_inv[rank2] = 1./delta[rank2] M_inv[:, 0, 0] = M[:, 1, 1]*delta_inv M_inv[:, 0, 1] = -M[:, 0, 1]*delta_inv M_inv[:, 1, 0] = -M[:, 1, 0]*delta_inv M_inv[:, 1, 1] = M[:, 0, 0]*delta_inv return M_inv
Example #24
Source File: triinterpolate.py From matplotlib-4-abaqus with MIT License | 5 votes |
def __init__(self, vals, rows, cols, shape): """ Creates a sparse matrix in coo format *vals*: arrays of values of non-null entries of the matrix *rows*: int arrays of rows of non-null entries of the matrix *cols*: int arrays of cols of non-null entries of the matrix *shape*: 2-tuple (n,m) of matrix shape """ self.n, self.m = shape self.vals = np.asarray(vals, dtype=np.float64) self.rows = np.asarray(rows, dtype=np.int32) self.cols = np.asarray(cols, dtype=np.int32)
Example #25
Source File: analysis.py From perses with MIT License | 5 votes |
def _prepare_logP_accept(self, environment): """ Organize and retrieve the log acceptance probabilities for each of the transitions in the environment. Parameters ---------- environment : str The name of the environment Returns ------- logP_accept_dict : dict of (str, str) : list of 2 np.array A dictionary with a list of 2 np.arrays, one for s1->s2 logP_accept, another for s2->s1 logP_accepts have had their SAMS weights subtracted if relevant """ logP_accept_values = self.extract_logP_values(environment, "logP_accept", subtract_sams=True) logP_accept_dict = {} for state_pair in itertools.combinations(self._visited_states, 2): try: forward_logP = np.array(logP_accept_values[(state_pair[0], state_pair[1])]) reverse_logP = np.array(logP_accept_values[(state_pair[1], state_pair[0])]) except KeyError: continue logP_accept_dict[state_pair] = [forward_logP, reverse_logP] return logP_accept_dict
Example #26
Source File: triinterpolate.py From matplotlib-4-abaqus with MIT License | 5 votes |
def get_function_values(self, alpha, ecc, dofs): """ Parameters ---------- alpha : is a (N x 3 x 1) array (array of column-matrices) of barycentric coordinates, ecc : is a (N x 3 x 1) array (array of column-matrices) of triangle eccentricities, dofs : is a (N x 1 x 9) arrays (arrays of row-matrices) of computed degrees of freedom. Returns ------- Returns the N-array of interpolated function values. """ subtri = np.argmin(alpha, axis=1)[:, 0] ksi = _roll_vectorized(alpha, -subtri, axis=0) E = _roll_vectorized(ecc, -subtri, axis=0) x = ksi[:, 0, 0] y = ksi[:, 1, 0] z = ksi[:, 2, 0] x_sq = x*x y_sq = y*y z_sq = z*z V = _to_matrix_vectorized([ [x_sq*x], [y_sq*y], [z_sq*z], [x_sq*z], [x_sq*y], [y_sq*x], [y_sq*z], [z_sq*y], [z_sq*x], [x*y*z]]) prod = _prod_vectorized(self.M, V) prod += _scalar_vectorized(E[:, 0, 0], _prod_vectorized(self.M0, V)) prod += _scalar_vectorized(E[:, 1, 0], _prod_vectorized(self.M1, V)) prod += _scalar_vectorized(E[:, 2, 0], _prod_vectorized(self.M2, V)) s = _roll_vectorized(prod, 3*subtri, axis=0) return _prod_vectorized(dofs, s)[:, 0, 0]
Example #27
Source File: effparam.py From python-meep-utils with GNU General Public License v2.0 | 5 votes |
def load_rt(filename): #{{{ """ Loads the reflection and transmission spectra and simulation settings Returns: * frequency axis * reflection s11 and transmission s12 as complex np arrays Compatible with the PKGraph text data file with polar data: * parameters in header like: #param name,value * column identification like: #column Ydata * data columns in ascii separated by space Expects polar data with columns: frequency, s11 ampli, s11 phase, s12 ampli, s12 phase """ ## Extract relevant parameters #with open(filename+'.dat') as datafile: #for line in datafile: #if line[0:1] in "0123456789": break # end of file header #value = line.replace(",", " ").split()[-1] # the value of the parameter will be separated by space or comma #if ("cellsize" in line) and (cellsize == None): cellsize = float(value) #if ("cellnumber" in line): cellnumber = float(value) #if ("plot_freq_min" in line) and (plot_freq_min == 0): plot_freq_min = float(value) #if ("plot_freq_max" in line) and (plot_freq_max == np.infty): plot_freq_max = float(value) #if ("param padding" in line) and (padding == None): padding = float(value) ## Load data columns (freq, s11amp, s11phase, s12amp, s12phase) = \ map(lambda a: np.array(a, ndmin=1), np.loadtxt(filename+".dat", unpack=True)) ## Limit the frequency range to what will be plotted (recommended) TODO wrong approach #XXX RM if truncate and len(freq)>1: #(d0,d1) = np.interp((plot_freq_min, plot_freq_max), freq, range(len(freq))) #(freq, s11amp, s11phase, s12amp, s12phase) = \ #map(lambda a: a[int(d0):int(d1)], (freq, s11amp, s11phase, s12amp, s12phase)) return freq, s11amp, s11phase, s12amp, s12phase #}}}
Example #28
Source File: triinterpolate.py From matplotlib-4-abaqus with MIT License | 5 votes |
def __init__(self, triangulation, z, trifinder=None): if not isinstance(triangulation, Triangulation): raise ValueError("Expected a Triangulation object") self._triangulation = triangulation self._z = np.asarray(z) if self._z.shape != self._triangulation.x.shape: raise ValueError("z array must have same length as triangulation x" " and y arrays") if trifinder is not None and not isinstance(trifinder, TriFinder): raise ValueError("Expected a TriFinder object") self._trifinder = trifinder or self._triangulation.get_trifinder() # Default scaling factors : 1.0 (= no scaling) # Scaling may be used for interpolations for which the order of # magnitude of x, y has an impact on the interpolant definition. # Please refer to :meth:`_interpolate_multikeys` for details. self._unit_x = 1.0 self._unit_y = 1.0 # Default triangle renumbering: None (= no renumbering) # Renumbering may be used to avoid unecessary computations # if complex calculations are done inside the Interpolator. # Please refer to :meth:`_interpolate_multikeys` for details. self._tri_renum = None # __call__ and gradient docstrings are shared by all subclasses # (except, if needed, relevant additions). # However these methods are only implemented in subclasses to avoid # confusion in the documentation.
Example #29
Source File: triinterpolate.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def __init__(self, triangulation, z, trifinder=None): if not isinstance(triangulation, Triangulation): raise ValueError("Expected a Triangulation object") self._triangulation = triangulation self._z = np.asarray(z) if self._z.shape != self._triangulation.x.shape: raise ValueError("z array must have same length as triangulation x" " and y arrays") if trifinder is not None and not isinstance(trifinder, TriFinder): raise ValueError("Expected a TriFinder object") self._trifinder = trifinder or self._triangulation.get_trifinder() # Default scaling factors : 1.0 (= no scaling) # Scaling may be used for interpolations for which the order of # magnitude of x, y has an impact on the interpolant definition. # Please refer to :meth:`_interpolate_multikeys` for details. self._unit_x = 1.0 self._unit_y = 1.0 # Default triangle renumbering: None (= no renumbering) # Renumbering may be used to avoid unnecessary computations # if complex calculations are done inside the Interpolator. # Please refer to :meth:`_interpolate_multikeys` for details. self._tri_renum = None # __call__ and gradient docstrings are shared by all subclasses # (except, if needed, relevant additions). # However these methods are only implemented in subclasses to avoid # confusion in the documentation.
Example #30
Source File: atoms.py From pyiron with BSD 3-Clause "New" or "Revised" License | 5 votes |
def set_array(self, name, a, dtype=None, shape=None): """ Update array. This function is for the purpose of compatibility with the ASE package Args: name (str): Name of the array a (list/numpy.ndarray): The array to be added dtype (type): Data type of the array shape (list/turple): Shape of the array """ b = self.arrays.get(name) if b is None: if a is not None: self.new_array(name, a, dtype, shape) else: if a is None: del self.arrays[name] else: a = np.asarray(a) if a.shape != b.shape: raise ValueError( "Array has wrong shape %s != %s." % (a.shape, b.shape) ) b[:] = a