Python numpy.s_() Examples
The following are 30
code examples of numpy.s_().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
numpy
, or try the search function
.
Example #1
Source File: test_indexing.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def test_prepend_not_one(self): assign = self.assign s_ = np.s_ a = np.zeros(5) # Too large and not only ones. assert_raises(ValueError, assign, a, s_[...], np.ones((2, 1))) with warnings.catch_warnings(): # Will be a ValueError as well. warnings.simplefilter("error", DeprecationWarning) assert_raises(DeprecationWarning, assign, a, s_[[1, 2, 3],], np.ones((2, 1))) assert_raises(DeprecationWarning, assign, a, s_[[[1], [2]],], np.ones((2,2,1)))
Example #2
Source File: test_aux_linalg.py From harold with MIT License | 6 votes |
def test_e_i(): assert_almost_equal(e_i(7, 5, output='r'), array([[0., 0., 0., 0., 0., 1., 0.]]) ) assert_almost_equal(e_i(5, [0, 4, 4, 4, 1]), array([[1., 0., 0., 0., 0.], [0., 0., 0., 0., 1.], [0., 0., 0., 0., 0.], [0., 0., 0., 0., 0.], [0., 1., 1., 1., 0.]]) ) assert_almost_equal(e_i(5, s_[1:3]), array([[0., 0.], [1., 0.], [0., 1.], [0., 0.], [0., 0.]]) ) assert_almost_equal(e_i(5, slice(1, 5, 2), output='r'), array([[0., 1., 0., 0., 0.], [0., 0., 0., 1., 0.]]) )
Example #3
Source File: dynamic_factor.py From vnpy_crypto with MIT License | 6 votes |
def _initialize_factor_transition(self): order = self.factor_order * self.k_factors k_factors = self.k_factors # Initialize the parameters self.parameters['factor_transition'] = ( self.factor_order * self.k_factors**2) # Setup fixed components of state space matrices # VAR(p) for factor transition if self.k_factors > 0: if self.factor_order > 0: self.ssm['transition', k_factors:order, :order - k_factors] = ( np.eye(order - k_factors)) self.ssm['selection', :k_factors, :k_factors] = np.eye(k_factors) # Identification requires constraining the state covariance to an # identity matrix self.ssm['state_cov', :k_factors, :k_factors] = np.eye(k_factors) # Setup indices of state space matrices self._idx_factor_transition = np.s_['transition', :k_factors, :order]
Example #4
Source File: test_high_level.py From pyfive with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_read_direct(): with pyfive.File(EARLIEST_HDF5_FILE) as hfile: dset1 = hfile['dataset1'] arr = np.zeros(4) dset1.read_direct(arr) assert_array_equal(arr, [0, 1, 2, 3]) arr = np.zeros(4) dset1.read_direct(arr, np.s_[:2], np.s_[:2]) assert_array_equal(arr, [0, 1, 0, 0]) arr = np.zeros(4) dset1.read_direct(arr, np.s_[1:3], np.s_[2:]) assert_array_equal(arr, [0, 0, 1, 2])
Example #5
Source File: dynamic_factor.py From vnpy_crypto with MIT License | 6 votes |
def _initialize_error_transition_var(self): k_endog = self.k_endog _factor_order = self._factor_order _error_order = self._error_order # Initialize the parameters self.parameters['error_transition'] = _error_order * k_endog # Fixed components already setup above # Setup indices of state space matrices # Here we want to set all of the elements of the coefficient matrices, # the same as in a VAR specification self._idx_error_transition = np.s_[ 'transition', _factor_order:_factor_order + k_endog, _factor_order:_factor_order + _error_order]
Example #6
Source File: SpectraLearnPredict.py From SpectralMachine with GNU General Public License v3.0 | 6 votes |
def readPredMap(mapFile): try: with open(mapFile, 'r') as f: En = np.array(f.readline().split(), dtype=np.dtype(float)) A = np.loadtxt(f, unpack =False) except: print('\033[1m' + ' Map data file not found \n' + '\033[0m') return X = A[:,0] Y = A[:,1] A = np.delete(A, np.s_[0:2], 1) print(' Shape map: ' + str(A.shape)) return X, Y, A, En ####################################################################
Example #7
Source File: SpectraLearnPredict.py From SpectralMachine with GNU General Public License v3.0 | 6 votes |
def readPredMap(mapFile): try: with open(mapFile, 'r') as f: En = np.array(f.readline().split(), dtype=np.dtype(float)) A = np.loadtxt(f, unpack =False) except: print('\033[1m' + ' Map data file not found \n' + '\033[0m') return X = A[:,0] Y = A[:,1] A = np.delete(A, np.s_[0:2], 1) print(' Shape map: ' + str(A.shape)) return X, Y, A, En ####################################################################
Example #8
Source File: SpectraLearnPredict.py From SpectralMachine with GNU General Public License v3.0 | 6 votes |
def readPredMap(mapFile): try: with open(mapFile, 'r') as f: En = np.array(f.readline().split(), dtype=np.dtype(float)) A = np.loadtxt(f, unpack =False) except: print('\033[1m' + ' Map data file not found \n' + '\033[0m') return X = A[:,0] Y = A[:,1] A = np.delete(A, np.s_[0:2], 1) print(' Shape map: ' + str(A.shape)) return X, Y, A, En ####################################################################
Example #9
Source File: SpectraLearnPredict_test-TF-new.py From SpectralMachine with GNU General Public License v3.0 | 6 votes |
def readPredMap(mapFile): try: with open(mapFile, 'r') as f: En = np.array(f.readline().split(), dtype=np.dtype(float)) A = np.loadtxt(f, unpack =False) except: print('\033[1m' + ' Map data file not found \n' + '\033[0m') return X = A[:,0] Y = A[:,1] A = np.delete(A, np.s_[0:2], 1) print(' Shape map: ' + str(A.shape)) return X, Y, A, En ####################################################################
Example #10
Source File: tennnnnnnnnnnnnnnnnnnnnnnnis.py From pycolab with Apache License 2.0 | 6 votes |
def update(self, actions, board, layers, backdrop, things, the_plot): # Move up or down as directed if there is room. action = Actions.STAY if actions is None else actions[self.character] if action == Actions.UP: if self._paddle_top > 1: self._paddle_top -= 1 elif action == Actions.DOWN: if self._paddle_top < 7: self._paddle_top += 1 # Repaint the paddle. Note "blinking" effect if the ball slips past us. self.curtain[:, self._paddle_col] = False blink = (things['@'].position.col <= self._paddle_col # "past" us depends if self.character == '1' else # on which paddle things['@'].position.col >= self._paddle_col) # we are. if not blink or (the_plot.frame % 2 == 0): paddle_rows = np.s_[self._paddle_top:(self._paddle_top + 2)] self.curtain[paddle_rows, self._paddle_col] = True
Example #11
Source File: SplitCrossValidation._legacy1.py From SpectralMachine with GNU General Public License v3.0 | 6 votes |
def readLearnFile(learnFile): try: with open(learnFile, 'r') as f: M = np.loadtxt(f, unpack =False) except: print('\033[1m' + ' Learn data file not found \n' + '\033[0m') return learnFileRoot = os.path.splitext(learnFile)[0] #En = np.delete(np.array(M[0,:]),np.s_[0:1],0) #M = np.delete(np.array(M[:,1:]),np.s_[0:1],0) En = np.delete(np.array(M[0,:]),np.s_[0:1],0) M = np.delete(M,np.s_[0:1],0) Cl = np.asarray(['{:.2f}'.format(x) for x in M[:,0]]).reshape(-1,1) M = np.delete(M,np.s_[0:1],1) print("En:",En.shape) print("M:",M.shape) return En, M, Cl, learnFileRoot ####################################################################
Example #12
Source File: slp_preprocess.py From SpectralMachine with GNU General Public License v3.0 | 6 votes |
def readPredMap(mapFile): try: with open(mapFile, 'r') as f: En = np.array(f.readline().split(), dtype=np.dtype(float)) A = np.loadtxt(f, unpack =False) except: print('\033[1m' + ' Map data file not found \n' + '\033[0m') return X = A[:,0] Y = A[:,1] A = np.delete(A, np.s_[0:2], 1) print(' Shape map: ' + str(A.shape)) return X, Y, A, En ####################################################################
Example #13
Source File: slp_preprocess.py From SpectralMachine with GNU General Public License v3.0 | 6 votes |
def readPredMap(mapFile): try: with open(mapFile, 'r') as f: En = np.array(f.readline().split(), dtype=np.dtype(float)) A = np.loadtxt(f, unpack =False) except: print('\033[1m' + ' Map data file not found \n' + '\033[0m') return X = A[:,0] Y = A[:,1] A = np.delete(A, np.s_[0:2], 1) print(' Shape map: ' + str(A.shape)) return X, Y, A, En ####################################################################
Example #14
Source File: test_dataset_getitem.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_slice_zerostride(self): self.assertNumpyBehavior(self.dset, self.data, np.s_[::0])
Example #15
Source File: test_dataset_getitem.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_slice_negindexes(self): self.assertNumpyBehavior(self.dset, self.data, np.s_[-8:-2:3])
Example #16
Source File: test_dataset_getitem.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_slice_zerosize(self): self.assertNumpyBehavior(self.dset, self.data, np.s_[4:4])
Example #17
Source File: test_dataset_getitem.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_slice_simple(self): self.assertNumpyBehavior(self.dset, self.data, np.s_[0:4])
Example #18
Source File: test_dataset_getitem.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_slice_stop_less_than_start(self): self.assertNumpyBehavior(self.dset, self.data, np.s_[7:5])
Example #19
Source File: test_dataset_getitem.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_slice_outofrange(self): self.assertNumpyBehavior(self.dset, self.data, np.s_[100:400:3])
Example #20
Source File: test_dataset_getitem.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_indexlist_simple(self): self.assertNumpyBehavior(self.dset, self.data, np.s_[[1,2,5]])
Example #21
Source File: test_dataset_getitem.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_indexlist_single_index_ellipsis(self): self.assertNumpyBehavior(self.dset, self.data, np.s_[[0], ...])
Example #22
Source File: test_dataset.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_ref_shape(self): """ Region reference shape and selection shape """ slic = np.s_[25:35, 10:100:5] ref = self.dset.regionref[slic] self.assertEqual(self.dset.regionref.shape(ref), self.dset.shape) self.assertEqual(self.dset.regionref.selection(ref), (10, 18))
Example #23
Source File: test_sgrid_deltares.py From gridded with The Unlicense | 5 votes |
def test_variable_slice(sgrid_obj): u_center_slices = sgrid_obj.U1.center_slicing v_center_slices = sgrid_obj.V1.center_slicing u_center_expected = (np.s_[:], np.s_[:], np.s_[:], np.s_[1:]) v_center_expected = (np.s_[:], np.s_[:], np.s_[1:], np.s_[:]) xz_center_slices = sgrid_obj.XZ.center_slicing xcor_center_slices = sgrid_obj.XCOR.center_slicing xz_center_expected = (np.s_[1:], np.s_[1:]) xcor_center_expected = (np.s_[:], np.s_[:]) assert u_center_slices == u_center_expected assert v_center_slices == v_center_expected assert xz_center_slices == xz_center_expected assert xcor_center_slices == xcor_center_expected
Example #24
Source File: test_sgrid_roms.py From gridded with The Unlicense | 5 votes |
def test_variable_slicing(sgrid): u_center_slices = sgrid.u.center_slicing v_center_slices = sgrid.v.center_slicing u_center_expected = (np.s_[:], np.s_[:], np.s_[1:-1], np.s_[:]) v_center_expected = (np.s_[:], np.s_[:], np.s_[:], np.s_[1:-1]) assert u_center_slices == u_center_expected assert v_center_slices == v_center_expected
Example #25
Source File: test_utilities.py From gridded with The Unlicense | 5 votes |
def test_gen_celltree_mask_from_center_mask(): center_mask = np.array(([True, True, True, True, True], [True, False, True, True, True], [True, False, False, False, True], [True, True, True, True, True])) center_sl = np.s_[1:-1,1:-1] #'both' padding m = utilities.gen_celltree_mask_from_center_mask(center_mask, center_sl) expected_mask = np.array(([False, True, True], [False, False, False])) assert np.all(m == expected_mask) testds = nc.Dataset('foo', mode='w', diskless=True) testds.createDimension('x', 5) testds.createDimension('y', 4) testds.createVariable('mask', 'b', dimensions=('y', 'x')) testds['mask'][:] = center_mask m3 = utilities.gen_celltree_mask_from_center_mask(center_mask, center_sl) assert np.all(m3 == expected_mask) testds['mask'][:] = ~center_mask testds['mask'].flag_values = [0, 1] testds['mask'].flag_meanings = ['land', 'water'] m4 = utilities.gen_celltree_mask_from_center_mask(center_mask, center_sl) assert np.all(m4 == expected_mask) testds['mask'][:] = ~center_mask testds['mask'].flag_values = [0, 1] # because sometimes it's a damn string testds['mask'].flag_meanings = 'land water' m5 = utilities.gen_celltree_mask_from_center_mask(center_mask, center_sl) assert np.all(m5 == expected_mask) testds.close()
Example #26
Source File: test_uvh5.py From pyuvdata with BSD 2-Clause "Simplified" License | 5 votes |
def test_write_complex_astype(tmp_path): # make sure we can write data out test_file = str(tmp_path / "test_file.h5") test_data_shape = (2, 3, 4, 5) test_data = np.zeros(test_data_shape, dtype=np.complex64) test_data.real = 1.0 test_data.imag = 2.0 with h5py.File(test_file, "w") as h5f: dgrp = h5f.create_group("Data") dset = dgrp.create_dataset( "testdata", test_data_shape, dtype=uvh5._hera_corr_dtype ) inds = (np.s_[:], np.s_[:], np.s_[:], np.s_[:]) uvh5._write_complex_astype(test_data, dset, inds) # read the data back in to confirm it's right with h5py.File(test_file, "r") as h5f: dset = h5f["Data/testdata"] file_data = np.zeros(test_data_shape, dtype=np.complex64) with dset.astype(uvh5._hera_corr_dtype): file_data.real = dset["r"][:, :, :, :] file_data.imag = dset["i"][:, :, :, :] assert np.allclose(file_data, test_data) return
Example #27
Source File: test_uvh5.py From pyuvdata with BSD 2-Clause "Simplified" License | 5 votes |
def test_read_complex_astype_errors(tmp_path): # make a testfile with a test dataset test_file = str(tmp_path / "test_file.h5") test_data_shape = (2, 3, 4, 5) test_data = np.zeros(test_data_shape, dtype=np.complex64) test_data.real = 1.0 test_data.imag = 2.0 with h5py.File(test_file, "w") as h5f: dgrp = h5f.create_group("Data") dset = dgrp.create_dataset( "testdata", test_data_shape, dtype=uvh5._hera_corr_dtype ) with dset.astype(uvh5._hera_corr_dtype): dset[:, :, :, :, "r"] = test_data.real dset[:, :, :, :, "i"] = test_data.imag # test passing in a forbidden output datatype indices = (np.s_[:], np.s_[:], np.s_[:], np.s_[:]) with h5py.File(test_file, "r") as h5f: dset = h5f["Data/testdata"] with pytest.raises(ValueError) as cm: uvh5._read_complex_astype(dset, indices, np.int32) assert str(cm.value).startswith("output datatype must be one of (complex") # clean up os.remove(test_file) return
Example #28
Source File: test_uvh5.py From pyuvdata with BSD 2-Clause "Simplified" License | 5 votes |
def test_read_complex_astype(tmp_path): # make a testfile with a test dataset test_file = str(tmp_path / "test_file.h5") test_data_shape = (2, 3, 4, 5) test_data = np.zeros(test_data_shape, dtype=np.complex64) test_data.real = 1.0 test_data.imag = 2.0 with h5py.File(test_file, "w") as h5f: dgrp = h5f.create_group("Data") dset = dgrp.create_dataset( "testdata", test_data_shape, dtype=uvh5._hera_corr_dtype ) with dset.astype(uvh5._hera_corr_dtype): dset[:, :, :, :, "r"] = test_data.real dset[:, :, :, :, "i"] = test_data.imag # test that reading the data back in works as expected indices = (np.s_[:], np.s_[:], np.s_[:], np.s_[:]) with h5py.File(test_file, "r") as h5f: dset = h5f["Data/testdata"] file_data = uvh5._read_complex_astype(dset, indices, np.complex64) assert np.allclose(file_data, test_data) # clean up os.remove(test_file) return
Example #29
Source File: ClassDataMaker_legacy1.py From SpectralMachine with GNU General Public License v3.0 | 5 votes |
def makeFile(sampleFile, learnFile, param): try: with open(sampleFile, 'r') as f: En = np.loadtxt(f, unpack = True, usecols=range(0,1), delimiter = ',', skiprows = 10) if(En.size == 0): print('\n Empty file \n' ) return False with open(sampleFile, 'r') as f: R = np.loadtxt(f, unpack = True, usecols=range(1,2), delimiter = ',', skiprows = 10) print(' Number of points in \"' + sampleFile + '\": ' + str(En.shape[0])) except: print('\033[1m' + ' Sample data file not found \n' + '\033[0m') return False if os.path.exists(learnFile): with open(learnFile, 'r') as f: M = np.loadtxt(f, unpack =False) EnT = np.delete(np.array(M[0,:]),np.s_[0:1],0) if EnT.shape[0] == En.shape[0]: print(' Number of points in the learning dataset: ' + str(EnT.shape[0])) else: print('\033[1m' + ' Mismatch in datapoints: ' + str(EnT.shape[0]) + '; sample = ' + str(En.shape[0]) + '\033[0m') R = np.interp(EnT, En, R, left = 0, right = 0) print('\033[1m' + ' Mismatch corrected: datapoints in sample: ' + str(R.shape[0]) + '\033[0m') print('\n Added spectra to \"' + learnFile + '\"\n') newTrain = np.append(float(param),R).reshape(1,-1) else: print('\n\033[1m' + ' Train data file not found. Creating...' + '\033[0m') newTrain = np.append([0], En) print(' Added spectra to \"' + learnFile + '\"\n') newTrain = np.vstack((newTrain, np.append(float(param),R))) with open(learnFile, 'ab') as f: np.savetxt(f, newTrain, delimiter='\t', fmt='%10.6f') return True #************************************
Example #30
Source File: MakeCrossValidSet.py From SpectralMachine with GNU General Public License v3.0 | 5 votes |
def readIndexFile(File): try: csv = np.genfromtxt(File,delimiter=',') L = np.nan_to_num(csv[:,1]) except: print('\033[1m' + ' Index data file not found \n' + '\033[0m') return L = np.delete(L,np.s_[0:1],0) return L #************************************