Python astropy.io.fits.getdata() Examples
The following are 30
code examples of astropy.io.fits.getdata().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
astropy.io.fits
, or try the search function
.
Example #1
Source File: test_table.py From Carnets with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_table_from_bool_fields(self): """ Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/113 Tests creating a table from a recarray containing numpy.bool columns. """ array = np.rec.array([(True, False), (False, True)], formats='|b1,|b1') thdu = fits.BinTableHDU.from_columns(array) assert thdu.columns.formats == ['L', 'L'] assert comparerecords(thdu.data, array) # Test round trip thdu.writeto(self.temp('table.fits')) data = fits.getdata(self.temp('table.fits'), ext=1) assert thdu.columns.formats == ['L', 'L'] assert comparerecords(data, array)
Example #2
Source File: reconstruction.py From soapy with GNU General Public License v3.0 | 6 votes |
def loadCMat(self): super(LearnAndApply, self).loadCMat() #Load tomo reconstructor tomoFilename = self.sim_config.simName+"/tomoMat.fits" tomoMat = fits.getdata(tomoFilename) #And check its the right size if tomoMat.shape != ( 2*self.wfss[0].activeSubaps, self.sim_config.totalWfsData - 2*self.wfss[0].activeSubaps): logger.warning("Loaded Tomo matrix not the expected shape - gonna make a new one..." ) raise Exception else: self.tomoRecon = tomoMat
Example #3
Source File: test_image.py From Carnets with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_fortran_array(self): # Test that files are being correctly written+read for "C" and "F" order arrays a = np.arange(21).reshape(3,7) b = np.asfortranarray(a) afits = self.temp('a_str.fits') bfits = self.temp('b_str.fits') # writting to str specified files fits.PrimaryHDU(data=a).writeto(afits) fits.PrimaryHDU(data=b).writeto(bfits) np.testing.assert_array_equal(fits.getdata(afits), a) np.testing.assert_array_equal(fits.getdata(bfits), a) # writting to fileobjs aafits = self.temp('a_fileobj.fits') bbfits = self.temp('b_fileobj.fits') with open(aafits, mode='wb') as fd: fits.PrimaryHDU(data=a).writeto(fd) with open(bbfits, mode='wb') as fd: fits.PrimaryHDU(data=b).writeto(fd) np.testing.assert_array_equal(fits.getdata(aafits), a) np.testing.assert_array_equal(fits.getdata(bbfits), a)
Example #4
Source File: test_image.py From Carnets with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_fortran_array_non_contiguous(self): # Test that files are being correctly written+read for 'C' and 'F' order arrays a = np.arange(105).reshape(3,5,7) b = np.asfortranarray(a) # writting to str specified files afits = self.temp('a_str_slice.fits') bfits = self.temp('b_str_slice.fits') fits.PrimaryHDU(data=a[::2, ::2]).writeto(afits) fits.PrimaryHDU(data=b[::2, ::2]).writeto(bfits) np.testing.assert_array_equal(fits.getdata(afits), a[::2, ::2]) np.testing.assert_array_equal(fits.getdata(bfits), a[::2, ::2]) # writting to fileobjs aafits = self.temp('a_fileobj_slice.fits') bbfits = self.temp('b_fileobj_slice.fits') with open(aafits, mode='wb') as fd: fits.PrimaryHDU(data=a[::2, ::2]).writeto(fd) with open(bbfits, mode='wb') as fd: fits.PrimaryHDU(data=b[::2, ::2]).writeto(fd) np.testing.assert_array_equal(fits.getdata(aafits), a[::2, ::2]) np.testing.assert_array_equal(fits.getdata(bbfits), a[::2, ::2])
Example #5
Source File: test_convenience.py From Carnets with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_fileobj_not_closed(self): """ Tests that file-like objects are not closed after being passed to convenience functions. Regression test for https://github.com/astropy/astropy/issues/5063 """ f = open(self.data('test0.fits'), 'rb') _ = fits.getdata(f) assert not f.closed f.seek(0) _ = fits.getheader(f) assert not f.closed f.close() # Close it now
Example #6
Source File: test_BANE.py From Aegean with Academic Free License v3.0 | 6 votes |
def test_quantitative(): """Test that the images are equal to a pre-calculated version""" fbase = 'tests/test_files/1904-66_SIN' outbase = 'dlme' BANE.filter_image(fbase+'.fits', out_base=outbase, cores=2, nslice=2) rms = outbase + '_rms.fits' bkg = outbase + '_bkg.fits' ref_rms = fbase + '_rms.fits' ref_bkg = fbase + '_bkg.fits' r1 = fits.getdata(rms) r2 = fits.getdata(ref_rms) b1 = fits.getdata(bkg) b2 = fits.getdata(ref_bkg) os.remove(rms) os.remove(bkg) if not np.allclose(r1, r2, atol=0.01, equal_nan=True): raise AssertionError("rms is wrong") if not np.allclose(b1, b2, atol=0.003, equal_nan=True): raise AssertionError("bkg is wrong") return
Example #7
Source File: test_table.py From Carnets with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_dim_column_byte_order_mismatch(self): """ When creating a table column with non-trivial TDIMn, and big-endian array data read from an existing FITS file, the data should not be unnecessarily byteswapped. Regression test for https://github.com/astropy/astropy/issues/3561 """ data = fits.getdata(self.data('random_groups.fits'))['DATA'] col = fits.Column(name='TEST', array=data, dim='(3,1,128,1,1)', format='1152E') thdu = fits.BinTableHDU.from_columns([col]) thdu.writeto(self.temp('test.fits')) with fits.open(self.temp('test.fits')) as hdul: assert np.all(hdul[1].data['TEST'] == data)
Example #8
Source File: test_table.py From Carnets with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_getdata_vla(self): """Regression test for https://aeon.stsci.edu/ssb/trac/pyfits/ticket/200""" def test(format_code): col = fits.Column(name='QUAL_SPE', format=format_code, array=[np.arange(1572)] * 225) tb_hdu = fits.BinTableHDU.from_columns([col]) pri_hdu = fits.PrimaryHDU() hdu_list = fits.HDUList([pri_hdu, tb_hdu]) with ignore_warnings(): hdu_list.writeto(self.temp('toto.fits'), overwrite=True) data = fits.getdata(self.temp('toto.fits')) # Need to compare to the original data row by row since the FITS_rec # returns an array of _VLA objects for row_a, row_b in zip(data['QUAL_SPE'], col.array): assert (row_a == row_b).all() for code in ('PJ()', 'QJ()'): test(code)
Example #9
Source File: catalogs.py From drizzlepac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def __init__(self, wcs, catalog_source, src_find_filters=None, **kwargs): # 'src_find_filters' - None or a dictionary. The dictionary # MUST contain keys 'region_file' and 'region_file_mode': # - 'region_file': the name of the region file that indicates regions # of the image that should be used for source finding # ("include" regions) or regions of the image that should NOT be used # for source finding ("exclude" regions). If it is None - the entire # image will be used for source finding. # - 'region_file_mode': 'exclude only' or 'normal' - if 'exclude only' then regular regions are # interpretted as 'exclude' regions and exclude regions (with '-' in front) # are ignored. If 'region_file_mode' = 'normal' then normal DS9 interpretation # of the regions will be applied. self.src_find_filters = src_find_filters super().__init__(wcs, catalog_source, **kwargs) extind = self.fname.rfind('[') self.fnamenoext = self.fname if extind < 0 else self.fname[:extind] if self.wcs.extname == ('',None): self.wcs.extname = (0) self.source = fits.getdata(self.wcs.filename,ext=self.wcs.extname, memmap=False) self.nbright = None # No GUI parameter defined yet for this filtering
Example #10
Source File: test_table.py From Carnets with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_fits_rec_from_existing(self): """ Tests creating a `FITS_rec` object with `FITS_rec.from_columns` from an existing `FITS_rec` object read from a FITS file. This ensures that the per-column arrays are updated properly. Regression test for https://github.com/spacetelescope/PyFITS/issues/99 """ # The use case that revealed this problem was trying to create a new # table from an existing table, but with additional rows so that we can # append data from a second table (with the same column structure) data1 = fits.getdata(self.data('tb.fits')) data2 = fits.getdata(self.data('tb.fits')) nrows = len(data1) + len(data2) merged = fits.FITS_rec.from_columns(data1, nrows=nrows) merged[len(data1):] = data2 mask = merged['c1'] > 1 masked = merged[mask] # The test table only has two rows, only the second of which is > 1 for # the 'c1' column assert comparerecords(data1[1:], masked[:1]) assert comparerecords(data1[1:], masked[1:]) # Double check that the original data1 table hasn't been affected by # its use in creating the "merged" table assert comparerecords(data1, fits.getdata(self.data('tb.fits')))
Example #11
Source File: reconstruction.py From soapy with GNU General Public License v3.0 | 5 votes |
def load_interaction_matrix(self): """ Loads the interaction matrix from file AO interaction matrices can get very big, so its useful to be able to load it frmo file rather than make it new everytime. It is assumed that the iMat is saved as "FITS" in the simulation saved directory, with other accompanying FITS files that contain the indices of actuators which are "valid". Some DMs also use pre made influence functions, which are also loaded here. """ filename = self.sim_config.simName+"/iMat.fits" imat_header = fits.getheader(filename) imat_data = fits.getdata(filename) # Check that imat shape is copatibile with curretn sim if imat_data.shape != (self.sim_config.totalActs, self.sim_config.totalWfsData): logger.warning( "interaction matrix does not match required required size." ) raise IOError("interaction matrix does not match required required size.") self.interaction_matrix[:] = imat_data # Load valid actuators for i in range(self.n_dms): valid_acts_filename = self.sim_config.simName+"/active_acts_dm{}.fits".format(i) valid_acts = fits.getdata(valid_acts_filename) self.dms[i].valid_actuators = valid_acts # DM may also have preloaded influence functions try: dm_shapes_filename = self.sim_config.simName + "/dmShapes_dm{}.fits".format(i) dm_shapes = fits.getdata(dm_shapes_filename) self.dms[i].iMatShapes = dm_shapes except IOError: # Found no DM influence funcs logger.info("DM Influence functions not found. If the DM doesn't use them, this is ok. If not, set 'forceNew=True' when making IMat")
Example #12
Source File: wfpc2Data.py From drizzlepac with BSD 3-Clause "New" or "Revised" License | 5 votes |
def buildMask(self, chip, bits=0, write=False): """ Build masks as specified in the user parameters found in the configObj object. """ sci_chip = self._image[self.scienceExt,chip] ### For WFPC2 Data, build mask files using: maskname = sci_chip.dqrootname+'_dqmask.fits' dqmask_name = buildmask.buildShadowMaskImage(sci_chip.dqfile,sci_chip.detnum,sci_chip.extnum,maskname,bitvalue=bits,binned=sci_chip.binned) sci_chip.dqmaskname = dqmask_name sci_chip.outputNames['dqmask'] = dqmask_name sci_chip.outputNames['tmpmask'] = 'wfpc2_inmask%d.fits'%(sci_chip.detnum) dqmask = fits.getdata(dqmask_name, ext=0, memmap=False) return dqmask
Example #13
Source File: test_image.py From Carnets with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_compressed_integers(self, dtype): """Test that the various integer dtypes are correctly written and read. Regression test for https://github.com/astropy/astropy/issues/9072 """ mid = np.iinfo(dtype).max // 2 data = np.arange(mid-50, mid+50, dtype=dtype) testfile = self.temp('test.fits') hdu = fits.CompImageHDU(data=data) hdu.writeto(testfile, overwrite=True) new = fits.getdata(testfile) np.testing.assert_array_equal(data, new)
Example #14
Source File: test_image.py From Carnets with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_comp_image_quantize_level(self): """ Regression test for https://github.com/astropy/astropy/issues/5969 Test that quantize_level is used. """ import scipy.misc np.random.seed(42) data = scipy.misc.ascent() + np.random.randn(512, 512)*10 fits.ImageHDU(data).writeto(self.temp('im1.fits')) fits.CompImageHDU(data, compression_type='RICE_1', quantize_method=1, quantize_level=-1, dither_seed=5)\ .writeto(self.temp('im2.fits')) fits.CompImageHDU(data, compression_type='RICE_1', quantize_method=1, quantize_level=-100, dither_seed=5)\ .writeto(self.temp('im3.fits')) im1 = fits.getdata(self.temp('im1.fits')) im2 = fits.getdata(self.temp('im2.fits')) im3 = fits.getdata(self.temp('im3.fits')) assert not np.array_equal(im2, im3) assert np.isclose(np.min(im1 - im2), -0.5, atol=1e-3) assert np.isclose(np.max(im1 - im2), 0.5, atol=1e-3) assert np.isclose(np.min(im1 - im3), -50, atol=1e-1) assert np.isclose(np.max(im1 - im3), 50, atol=1e-1)
Example #15
Source File: findobj.py From drizzlepac with BSD 3-Clause "New" or "Revised" License | 5 votes |
def roundness(im): """ from astropy.io import fits as pyfits data=pyfits.getdata('j94f05bgq_flt.fits',ext=1) star0=data[403:412,423:432] star=data[396:432,3522:3558] In [53]: findobj.roundness(star0) Out[53]: 0.99401955054989544 In [54]: findobj.roundness(star) Out[54]: 0.83091919980660645 """ perimeter = im.shape[0]*2 +im.shape[1]*2 -4 area = im.size return 4*np.pi*area/perimeter**2
Example #16
Source File: test_structured.py From Carnets with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_structured(self): fname = self.data('stddata.fits') data1, h1 = fits.getdata(fname, ext=1, header=True) data2, h2 = fits.getdata(fname, ext=2, header=True) st = get_test_data() outfile = self.temp('test.fits') fits.writeto(outfile, data1, overwrite=True) fits.append(outfile, data2) fits.append(outfile, st) assert st.dtype.isnative assert np.all(st['f1'] == [1, 3, 5]) data1check, h1check = fits.getdata(outfile, ext=1, header=True) data2check, h2check = fits.getdata(outfile, ext=2, header=True) stcheck, sthcheck = fits.getdata(outfile, ext=3, header=True) assert compare_arrays(data1, data1check, verbose=True) assert compare_arrays(data2, data2check, verbose=True) assert compare_arrays(st, stcheck, verbose=True) # try reading with view dataviewcheck, hviewcheck = fits.getdata(outfile, ext=2, header=True, view=np.ndarray) assert compare_arrays(data2, dataviewcheck, verbose=True)
Example #17
Source File: DM.py From soapy with GNU General Public License v3.0 | 5 votes |
def makeIMatShapes(self): ''' Creates all the DM shapes which are required for creating the interaction Matrix. if the shapes are not of the correct size, it interpolates them by slices with non valid value either NaN or 0. For best result, it is however preferable that no interpolation occurs. ''' try: with open(self.config.dmShapesFilename, "rb") as shapes_file: dmShape = fits.getdata(shapes_file) except: raise ValueError("Can't open fits file %s" % self.config.dmShapesFilename) assert dmShape.shape[0] >= int(self.n_acts), 'Not enough shapes in DM shape file' dmShape = dmShape[:int(self.n_acts), :, :] if (dmShape.shape[1] != int(self.nx_dm_elements)) or\ (dmShape.shape[2] != int(self.nx_dm_elements)): logger.warning("Interpolation custom DM shapes of size " "({0:1.0f}, {1:1.0f}) to size of " "({2:1.0f}, {2:1.0f})".format(dmShape.shape[1], dmShape.shape[1], self.nx_dm_elements)) if (numpy.any(dmShape == 0)): dmShape[dmShape == 0] = numpy.nan shapes = interp.zoomWithMissingData(dmShape, (self.nx_dm_elements, self.nx_dm_elements), non_valid_value=numpy.nan) else: shapes = dmShape shapes[numpy.isnan(shapes)] = 0 pad = self.simConfig.simPad self.iMatShapes = numpy.pad( shapes, ((0, 0), (pad, pad), (pad, pad)), mode="constant" ).astype("float32")
Example #18
Source File: astrometric_utils.py From drizzlepac with BSD 3-Clause "New" or "Revised" License | 5 votes |
def compute_zero_mask(imgarr, iterations=8, ext=0): """Find section from image with no masked out pixels and max total flux""" if isinstance(imgarr, str): img_mask = fits.getdata(imgarr, ext=0) else: img_mask = imgarr.copy() img_mask[img_mask > 0] = 1 img_mask = ndimage.binary_erosion(img_mask, iterations=iterations) return img_mask
Example #19
Source File: extendedshackhartmann.py From soapy with GNU General Public License v3.0 | 5 votes |
def extendedObject(self, extendedObject): if extendedObject is not None: # If a string, assume a fits file if isinstance(extendedObject, str): extendedObject = fits.getdata(extendedObject) if extendedObject.shape!=(self.subapFFTPadding,)*2: raise ValueError("Shape of extended object must be ({}, {}). This is `pxlsPersubap * fftOversamp`".format(self.subapFFTPadding, self.subapFFTPadding)) self._extendedObject = extendedObject else: self._extendedObject = None
Example #20
Source File: io.py From hcipy with MIT License | 5 votes |
def read_fits(filename): '''Read an array from a fits file. Parameters ---------- filename : string The filename of the file to read. This can include a path. Returns ------- ndarray The ndarray read from the fits file. ''' from astropy.io import fits return fits.getdata(filename).copy()
Example #21
Source File: read_spec.py From serval with MIT License | 5 votes |
def getdata(self, extname=None, o=np.s_[:]): if extname is None: # search first extension with data extname = 0 while not self[extname].NAXIS: extname += 1 ext = self[extname] dtype = {-32: '>f4', -64: '>f8'}[ext.BITPIX] dsize = {-32: 4, -64: 8}[ext.BITPIX] was_open = True if isinstance(self.fileobj, (tarfile.ExFileObject, file)): funit = self.fileobj else: funit = open(self.fileobj) was_open = False # with open(self.fileobj) as funit: if 1: if o == np.s_[:]: # read all funit.seek(ext.EXTDATA) data = np.fromfile(funit, dtype=dtype, count=ext.NAXIS1*ext.NAXIS2).reshape((ext.NAXIS2,ext.NAXIS1)) else: funit.seek(ext.EXTDATA+o*ext.NAXIS1*dsize) data = np.fromfile(funit, dtype=dtype, count=ext.NAXIS1) if not was_open: funit.close() return data
Example #22
Source File: chi2map.py From serval with MIT License | 5 votes |
def fromfile(name, *args): chi2map = np.array(pyfits.getdata(name), np.float) hdr = pyfits.getheader(name) vrange = hdr['CRVAL1'], hdr['CDELT1'] # -15, 0.1 return Chi2Map(chi2map, vrange, *args, name=name)
Example #23
Source File: yaml_generator.py From mirage with BSD 3-Clause "New" or "Revised" License | 5 votes |
def find_ipc_file(self, inputipc): """Given a list of potential IPC kernel files for a given detector, select the most appropriate one, and check to see whether the kernel needs to be inverted, in order to populate the invertIPC field. This is not intended to be terribly smart. The first inverted kernel found will be used. If none are found, the first kernel will be used and set to be inverted. Parameters ---------- inputipc : list List of fits files containing IPC kernels for a single detector Returns ------- (ipcfile, invstatus) : tup ipcfile is the name of the IPC kernel file to use, and invstatus lists whether the kernel needs to be inverted or not. """ for ifile in inputipc: kernel = fits.getdata(ifile) kshape = kernel.shape # If kernel is 4 dimensional, extract the 3x3 kernel associated # with a single pixel if len(kernel.shape) == 4: kernel = kernel[:, :, np.int(kshape[2]/2), np.int(kshape[2]/2)] if kernel[1, 1] < 1.0: return (ifile, False) # If no inverted kernel was found, just return the first file return (inputipc[0], True)
Example #24
Source File: obs_generator.py From mirage with BSD 3-Clause "New" or "Revised" License | 5 votes |
def simple_get_image(self, name): """Read in an array from a fits file and crop using subarray_bounds Parameters ---------- name : str Name of fits file to be read in Returns ------- image : numpy.ndarray Array populated with the file contents """ try: image, header = fits.getdata(name, header=True) except: raise FileNotFoundError('WARNING: unable to read in {}'.format(name)) # assume that the input is 2D, since we are using it to build a signal rate frame imageshape = image.shape if len(imageshape) != 2: self.printfunc("Error: image %s is not two-dimensional" % (name)) return None, None imageshape = image.shape try: image = image[self.subarray_bounds[1]:self.subarray_bounds[3]+1, self.subarray_bounds[0]:self.subarray_bounds[2]+1] except: raise ValueError("Unable to crop image from {}".format(name)) return image
Example #25
Source File: psf_selection.py From mirage with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _load_itm_library(library_file): """Load ITM FITS file Parameters ---------- library_path : str Path pointing to the location of the PSF library Returns ------- library : photutils.griddedPSFModel Object containing PSF library """ data = fits.getdata(library_file) hdr = fits.getheader(library_file) if data.shape == (2048, 2048): # Normalize the data data /= np.sum(data) # Add PSF location and oversampling keywords hdr['DET_YX0'] = ('(1023, 1023)', "The #0 PSF's (y,x) detector pixel position") hdr['OVERSAMP'] = (1, 'Oversampling factor for FFTs in computation') # Convert to HDUList and create library phdu = fits.PrimaryHDU(data, hdr) hdulist = fits.HDUList(phdu) library = to_griddedpsfmodel(hdulist) return library else: raise ValueError('Expecting ITM data of size (2048, 2048), not {}'.format(data.shape))
Example #26
Source File: base.py From marvin with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _get_data(self, vacfile=None, ext=1): ''' Get only the data from the VAC file from a given extension ''' if not vacfile: vacfile = self._vacfile return fits.getdata(vacfile, ext)
Example #27
Source File: utils.py From juliet with MIT License | 5 votes |
def get_TESS_data(filename, fluxtype = 'PDCSAP_FLUX'): """ Given a filename, this function returns an array of times, fluxes and errors on those fluxes. """ # Manipulate the fits file: data = fits.getdata(filename) # Identify zero-flux values to take them out of the data arrays: idx = np.where((data[fluxtype]!=0.)&(~np.isnan(data[fluxtype])))[0] # Return median-normalized flux: return data['TIME'][idx],data[fluxtype][idx]/np.median(data[fluxtype][idx]), \ data[fluxtype+'_ERR'][idx]/np.median(data[fluxtype][idx])
Example #28
Source File: extendedshackhartmann.py From soapy with GNU General Public License v3.0 | 5 votes |
def referenceImage(self, referenceImage): if referenceImage is not None: # If given value is a string, assume a filename of fits file if isinstance(referenceImage, str): referenceImage = fits.getdata(referenceImage) # Shape of expected ref values refShape = ( self.activeSubaps, self.wfsConfig.pxlsPerSubap, self.wfsConfig.pxlsPerSubap) self._referenceImage = numpy.zeros(refShape) # if its an array of sub-aps, no work needed if referenceImage.shape == refShape: self._referenceImage = referenceImage # If its the size of a sub-ap, set all subaps to that value elif referenceImage.shape == (self.wfsConfig.pxlsPerSubap,)*2: # Make a placeholder for the reference image self._referenceImage = numpy.zeros( (self.activeSubaps, self.wfsConfig.pxlsPerSubap, self.wfsConfig.pxlsPerSubap)) self._referenceImage[:] = referenceImage # If its the size of the detector, assume its a tiled array of sub-aps elif referenceImage.shape == (self.detectorPxls,)*2: for i, (x, y) in enumerate(self.detectorSubapCoords): self._referenceImage[i] = referenceImage[ x:x+self.wfsConfig.pxlsPerSubap, y:y+self.wfsConfig.pxlsPerSubap] # Do the FFT of the reference image for the correlation self.iReferenceImage = numpy.fft.ifft2( self._referenceImage, axes=(1,2)) else: self._referenceImage = None
Example #29
Source File: utils.py From juliet with MIT License | 4 votes |
def get_all_TESS_data(object_name, radius = ".02 deg", get_PDC = True, get_all = False): """ Given a planet name, this function returns a dictionary of times, fluxes and errors on fluxes in a juliet-friendly format for usage. The function does an astroquery to MAST using a default radius of .02 deg around the target name. If get_PDC is True, this function returns PDC fluxes. False returns SAP fluxes. If get_all is true, this function returns a dictionary that in addition to the times, fluxes and errors, returns other metadata. """ if not has_astroquery: print("Error on using juliet function `get_all_TESS_data`: astroquery.mast not found.") obs_table = Observations.query_object(object_name,radius=radius) out_dict = {} times = {} fluxes = {} fluxes_errors = {} for i in range(len(obs_table['dataURL'])): if 's_lc.fits' in obs_table['dataURL'][i]: fname = obs_table['dataURL'][i].split('/')[-1] metadata = fname.split('-') if len(metadata) == 5: # Extract metadata: sector = np.int(metadata[1].split('s')[-1]) ticid = np.int(metadata[2]) # Download files: data_products = Observations.get_product_list(obs_table[i]) manifest = Observations.download_products(data_products) # Read lightcurve file: d,h = fits.getdata('mastDownload/TESS/'+fname[:-8]+'/'+fname,header=True) t,fs,fserr,f,ferr = d['TIME']+h['BJDREFI'],d['SAP_FLUX'],d['SAP_FLUX_ERR'],\ d['PDCSAP_FLUX'],d['PDCSAP_FLUX_ERR'] idx_goodpdc = np.where((f != 0.)&(~np.isnan(f)))[0] idx_goodsap = np.where((fs != 0.)&(~np.isnan(fs)))[0] # Save to output dictionary: if 'TIC' not in out_dict.keys(): out_dict['TIC'] = ticid out_dict[sector] = {} out_dict[sector]['TIME_PDCSAP_FLUX'] = t[idx_goodpdc] out_dict[sector]['PDCSAP_FLUX'] = f[idx_goodpdc] out_dict[sector]['PDCSAP_FLUX_ERR'] = ferr[idx_goodpdc] out_dict[sector]['TIME_SAP_FLUX'] = t[idx_goodsap] out_dict[sector]['SAP_FLUX'] = fs[idx_goodsap] out_dict[sector]['SAP_FLUX_ERR'] = fserr[idx_goodsap] if get_PDC: times['TESS'+str(sector)] = t[idx_goodpdc] med = np.median(f[idx_goodpdc]) fluxes['TESS'+str(sector)] = f[idx_goodpdc]/med fluxes_errors['TESS'+str(sector)] = ferr[idx_goodpdc]/med else: times['TESS'+str(sector)] = t[idx_goodsap] med = np.median(fs[idx_goodsap]) fluxes['TESS'+str(sector)] = fs[idx_goodsap]/med fluxes_errors['TESS'+str(sector)] = fserr[idx_goodsap]/med # Remove downloaded folder: os.system('rm -r mastDownload') if get_all: return out_dict, times, fluxes, fluxes_errors else: return times, fluxes, fluxes_errors
Example #30
Source File: make_contours.py From rgz_rcnn with MIT License | 4 votes |
def contour(f, rms, LEVEL_INTERVAL): BBox = namedtuple('BBox', 'max_x max_y min_x min_y') data = fits.getdata(f)[::-1] height = len(data) width = len(data[0]) def filter_small(c): box = c['bbox'] x = box.max_x - box.min_x y = box.max_y - box.min_y return cmath.sqrt(x * x + y * y).real > (THRESHOLD * (width / 301)) def group_contours(c): group = [] group.append(c) bbox = c['bbox'] for sc in subcontours: p = sc['arr'][0] if bbox.max_x > p.x and bbox.min_x < p.x and bbox.max_y > p.y and bbox.min_y < p.y: group.append(sc) return group def bounding_box(c): xs = map(lambda p: p.x, c['arr']) ys = map(lambda p: p.y, c['arr']) max_x = max(xs) min_x = min(xs) max_y = max(ys) min_y = min(ys) c['bbox'] = BBox(max_x, max_y, min_x, min_y) return c idx = range(1, height + 1) jdx = range(1, width + 1) LEVELS = make_levels(NLEVELS, LEVEL_INTERVAL) cs = contour_list(conrec(data, 0, height - 1, 0, width - 1, idx, jdx, len(LEVELS), map(lambda l: l * rms / LEVELS[0], LEVELS))) k0contours = map(bounding_box, filter(lambda c: c['k'] == 0, cs)) subcontours = filter(lambda c: c['k'] != 0, cs) return {'height': height, 'width': width, 'contours': map(group_contours, filter(filter_small, k0contours))}