Python xarray.concat() Examples

The following are 30 code examples of xarray.concat(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module xarray , or try the search function .
Example #1
Source File: model.py    From aospy with Apache License 2.0 6 votes vote down vote up
def _bounds_from_array(arr, dim_name, bounds_name):
    """Get the bounds of an array given its center values.

    E.g. if lat-lon grid center lat/lon values are known, but not the
    bounds of each grid box.  The algorithm assumes that the bounds
    are simply halfway between each pair of center values.
    """
    # TODO: don't assume needed dimension is in axis=0
    # TODO: refactor to get rid of repetitive code
    spacing = arr.diff(dim_name).values
    lower = xr.DataArray(np.empty_like(arr), dims=arr.dims,
                         coords=arr.coords)
    lower.values[:-1] = arr.values[:-1] - 0.5*spacing
    lower.values[-1] = arr.values[-1] - 0.5*spacing[-1]
    upper = xr.DataArray(np.empty_like(arr), dims=arr.dims,
                         coords=arr.coords)
    upper.values[:-1] = arr.values[:-1] + 0.5*spacing
    upper.values[-1] = arr.values[-1] + 0.5*spacing[-1]
    bounds = xr.concat([lower, upper], dim='bounds')
    return bounds.T 
Example #2
Source File: ncep.py    From atlite with GNU General Public License v3.0 6 votes vote down vote up
def convert_lons_lats_ncep(ds, xs, ys):
    if not isinstance(xs, slice):
        first, second, last = np.asarray(xs)[[0,1,-1]]
        xs = slice(first - 0.1*(second - first), last + 0.1*(second - first))
    if not isinstance(ys, slice):
        first, second, last = np.asarray(ys)[[0,1,-1]]
        ys = slice(first - 0.1*(second - first), last + 0.1*(second - first))

    ds = ds.sel(lat_0=ys)

    # Lons should go from -180. to +180.
    if len(ds.coords['lon_0'].sel(lon_0=slice(xs.start + 360., xs.stop + 360.))):
        ds = xr.concat([ds.sel(lon_0=slice(xs.start + 360., xs.stop + 360.)),
                        ds.sel(lon_0=xs)],
                       dim="lon_0")
        ds = ds.assign_coords(lon_0=np.where(ds.coords['lon_0'].values <= 180,
                                             ds.coords['lon_0'].values,
                                             ds.coords['lon_0'].values - 360.))
    else:
        ds = ds.sel(lon_0=xs)

    ds = ds.rename({'lon_0': 'x', 'lat_0': 'y'})
    ds = ds.assign_coords(lon=ds.coords['x'], lat=ds.coords['y'])
    return ds 
Example #3
Source File: raster.py    From intake-xarray with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def _open_files(self, files):
        import xarray as xr
        das = [xr.open_rasterio(f, chunks=self.chunks, **self._kwargs)
               for f in files]
        out = xr.concat(das, dim=self.dim)

        coords = {}
        if self.pattern:
            coords = {
                k: xr.concat(
                    [xr.DataArray(
                        np.full(das[i].sizes.get(self.dim, 1), v),
                        dims=self.dim
                    ) for i, v in enumerate(values)], dim=self.dim)
                for k, values in reverse_formats(self.pattern, files).items()
            }

        return out.assign_coords(**coords).chunk(self.chunks) 
Example #4
Source File: concatenate.py    From starfish with MIT License 6 votes vote down vote up
def concatenate(expression_matrices: Iterable[ExpressionMatrix]) -> ExpressionMatrix:
    """Concatenate IntensityTables produced for different fields of view or across imaging rounds

    Expression Matrices are concatenated along the cells axis, and the resulting arrays are stored
    densely.

    Parameters
    ----------
    expression_matrices : Iterable[ExpressionMatrix]
        iterable (list-like) of expression matrices to combine

    Returns
    -------
    ExpressionMatrix :
        Concatenated expression matrix containing all input cells. Missing gene values are filled
        with np.nan

    See Also
    --------
    Combine_first: http://xarray.pydata.org/en/stable/combining.html#combine

    """
    concatenated_matrix: xr.DataArray = xr.concat(list(expression_matrices), Features.CELLS)
    return ExpressionMatrix(concatenated_matrix) 
Example #5
Source File: intensity_table.py    From starfish with MIT License 6 votes vote down vote up
def concatenate_intensity_tables(
        intensity_tables: List["IntensityTable"],
        overlap_strategy: Optional[OverlapStrategy] = None
    ) -> "IntensityTable":
        """
        Parameters
        ----------
        intensity_tables: List[IntensityTable]
            List of IntensityTables to be combined.
        overlap_strategy


        Returns
        -------

        """
        if overlap_strategy:
            intensity_tables = IntensityTable._process_overlaps(
                intensity_tables, overlap_strategy
            )
        return xr.concat(intensity_tables, dim=Features.AXIS) 
Example #6
Source File: test_integration_xarray_extensions_vectorxarray.py    From geocube with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_multidimensional_error():
    gdf = gpd.read_file(os.path.join(TEST_INPUT_DATA_DIR, "soil_data_flat.geojson"))
    vxd = vectorxarray.from_geodataframe(gdf)
    vxd2 = vxd.copy()
    vxd.coords["time"] = parse("20170516T000000")
    vxd2.coords["time"] = parse("20170517T000000")
    merged_vxd = xarray.concat([vxd, vxd2], dim="time")
    with pytest.raises(ValueError):
        merged_vxd.vector.plot(column="sandtotal_r") 
Example #7
Source File: plot_forecasts.py    From DLWP with MIT License 5 votes vote down vote up
def add_southern_hemisphere(da):
    da_s = da.assign_coords(lat=(-1. * da.lat.values))
    result = xr.concat([da, da_s.sel(lat=(da_s.lat < 0.0)).isel(lat=slice(None, None, -1))], dim='lat')
    return result 
Example #8
Source File: datashader.py    From holoviews with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def concatenate(cls, overlay):
        """
        Concatenates an NdOverlay of Image types into a single 3D
        xarray Dataset.
        """
        if not isinstance(overlay, NdOverlay):
            raise ValueError('Only NdOverlays can be concatenated')
        xarr = xr.concat([v.data.transpose() for v in overlay.values()],
                         pd.Index(overlay.keys(), name=overlay.kdims[0].name))
        params = dict(get_param_values(overlay.last),
                      vdims=overlay.last.vdims,
                      kdims=overlay.kdims+overlay.last.kdims)
        return Dataset(xarr.transpose(), datatype=['xarray'], **params) 
Example #9
Source File: utils.py    From xarrayutils with MIT License 5 votes vote down vote up
def concat_dim_da(data, name):
    """creates an xarray.Dataarray to label the concat dim in xarray.concat.
    data is the dimension array and name is the name (DuHHHHH)"""
    return xr.DataArray(data, dims=[name], coords={name: (name, data)}, name=name) 
Example #10
Source File: utils.py    From xmitgcm with MIT License 5 votes vote down vote up
def find_concat_dim_facet(da, facet, extra_metadata):
    """ In llc grids, find along which horizontal dimension to concatenate
    facet between i, i_g and j, j_g. If the order of the facet is F, concat
    along i or i_g. If order is C, concat along j or j_g. Also return
    horizontal dim not to concatenate

    PARAMETERS
    ----------
    da : xarray.DataArray
        xmitgcm llc data array
    facet : int
        facet number
    extra_metadata : dict
        dict of extra_metadata from get_extra_metadata

    RETURNS
    -------
    concat_dim, nonconcat_dim : str, str
        names of the dimensions for concatenation or not

    """
    order = extra_metadata['facet_orders'][facet]
    if order == 'C':
        possible_concat_dims = ['j', 'j_g']
    elif order == 'F':
        possible_concat_dims = ['i', 'i_g']

    concat_dim = find_concat_dim(da, possible_concat_dims)

    # we also need to other horizontal dimension for vector indexing
    all_dims = list(da.dims)
    # discard face
    all_dims.remove('face')
    # remove the concat_dim to find horizontal non_concat dimension
    all_dims.remove(concat_dim)
    non_concat_dim = all_dims[0]
    return concat_dim, non_concat_dim 
Example #11
Source File: utils.py    From xclim with Apache License 2.0 5 votes vote down vote up
def add_endpoints(
    da: xr.DataArray,
    left: Sequence[Union[int, float]],
    right: Sequence[Union[int, float]],
    dim: str = "quantiles",
):
    """Add left and right endpoints to a DataArray.

    Parameters
    ----------
    da : DataArray
      Source array.
    left : [x, y]
      Values to prepend
    right : [x, y]
      Values to append.
    dim : str
      Dimension along which to add endpoints.
    """
    elems = []
    for (x, y) in (left, right):
        if isinstance(y, xr.DataArray):
            if "quantiles" not in y.dims:
                y = y.expand_dims("quantiles")
            y = y.assign_coords(quantiles=x)
        else:
            y = xr.DataArray(y, coords={dim: x}, dims=(dim,))
        elems.append(y)
    l, r = elems  # pylint: disable=unbalanced-tuple-unpacking
    out = xr.concat((l, da, r), dim=dim)
    return ensure_chunk_size(out, **{dim: -1}) 
Example #12
Source File: run_length.py    From xclim with Apache License 2.0 5 votes vote down vote up
def rle(da: xr.DataArray, dim: str = "time", max_chunk: int = 1_000_000):
    n = len(da[dim])
    i = xr.DataArray(np.arange(da[dim].size), dims=dim).chunk({"time": 1})
    ind = xr.broadcast(i, da)[0].chunk(da.chunks)
    b = ind.where(~da)  # find indexes where false
    end1 = (
        da.where(b[dim] == b[dim][-1], drop=True) * 0 + n
    )  # add additional end value index (deal with end cases)
    start1 = (
        da.where(b[dim] == b[dim][0], drop=True) * 0 - 1
    )  # add additional start index (deal with end cases)
    b = xr.concat([start1, b, end1], dim)

    # Ensure bfill operates on entire (unchunked) time dimension
    # Determine appropraite chunk size for other dims - do not exceed 'max_chunk' total size per chunk (default 1000000)
    ndims = len(b.shape)
    chunk_dim = b[dim].size
    # divide extra dims into equal size
    # Note : even if calculated chunksize > dim.size result will have chunk==dim.size
    chunksize_ex_dims = None
    if ndims > 1:
        chunksize_ex_dims = np.round(np.power(max_chunk / chunk_dim, 1 / (ndims - 1)))
    chunks = dict()
    chunks[dim] = -1
    for dd in b.dims:
        if dd != dim:
            chunks[dd] = chunksize_ex_dims
    b = b.chunk(chunks)

    # back fill nans with first position after
    z = b.bfill(dim=dim)
    # calculate lengths
    d = z.diff(dim=dim) - 1
    d = d.where(d >= 0)
    return d 
Example #13
Source File: test_run_length.py    From xclim with Apache License 2.0 5 votes vote down vote up
def test_simple(self, tas_series, coord, expected, use_dask, use_1dim):
        t = np.zeros(60)
        t[30:40] = 2
        tas = tas_series(t, start="2000-01-01")
        runs = xr.concat((tas, tas), dim="dim0")

        if use_dask:
            runs = runs.chunk({"time": 10, "dim0": 1})

        out = rl.first_run(runs, window=1, dim="time", coord=coord, ufunc_1dim=use_1dim)
        np.testing.assert_array_equal(out.load(), expected) 
Example #14
Source File: test_run_length.py    From xclim with Apache License 2.0 5 votes vote down vote up
def test_simple(self, tas_series, coord, expected, use_dask, use_1dim):
        t = np.zeros(60)
        t[30:40] = 2
        tas = tas_series(t, start="2000-01-01")
        runs = xr.concat((tas, tas), dim="dim0")

        if use_dask:
            runs = runs.chunk({"time": 10, "dim0": 1})

        out = rl.last_run(runs, window=1, dim="time", coord=coord, ufunc_1dim=use_1dim)
        np.testing.assert_array_equal(out.load(), expected) 
Example #15
Source File: test_run_length.py    From xclim with Apache License 2.0 5 votes vote down vote up
def test_run_length_with_date(self, tas_series, date, end, expected, use_dask):
        t = np.zeros(360)
        t[140:end] = 1
        tas = tas_series(t, start="2000-01-01")
        runs = xr.concat((tas, tas), dim="dim0")
        runs = runs == 1

        if use_dask:
            runs = runs.chunk({"time": 10, "dim0": 1})

        out = rl.run_length_with_date(runs, window=1, dim="time", date=date,)
        np.testing.assert_array_equal(np.mean(out.load()), expected) 
Example #16
Source File: utils.py    From xarrayutils with MIT License 5 votes vote down vote up
def convert_flux_array(da, da_full, dim, top=True, fillval=0):
    dummy = xr.DataArray(
        ones_like(da_full.data) * fillval, coords=da_full.coords, dims=da_full.dims
    )
    if top:
        da.coords[dim] = da_full[dim][0]
        dummy_cut = dummy[{dim: slice(1, None)}]
        out = xr.concat([da, dummy_cut], dim=dim)
    else:
        da.coords[dim] = da_full[dim][-1]
        dummy_cut = dummy[{dim: slice(0, -1)}]
        out = xr.concat([dummy_cut, da], dim=dim)
    return out 
Example #17
Source File: plot_movie.py    From DLWP with MIT License 5 votes vote down vote up
def add_pole(da):
    pole = da.sel(lat=da.lat.max()).mean('lon').drop('lat')
    pole = pole.expand_dims(dim='lat', axis=-1).assign_coords(lat=[90.])
    pole = xr.concat([pole.expand_dims(dim='lon', axis=-1).assign_coords(lon=[l]) for l in da.lon], dim='lon')
    result = xr.concat([pole, da], dim='lat')
    return result 
Example #18
Source File: retrieve_cimiss_server.py    From nmc_met_io with GNU General Public License v3.0 5 votes vote down vote up
def cimiss_model_by_piont_levels(init_time_str,
                                 data_code='NAFP_FOR_FTM_HIGH_EC_ANEA',
                                 fcst_levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 250, 200],
                                 time_range=[0, 72], 
                                 point="39.90/116.40", fcst_ele="TEM"):
    """
    Retrieve grid point data from CIMISS service.

    :param init_time_str: model run time, like "2020020600"
    :param data_code: MUSIC data code, default is "NAFP_FOR_FTM_HIGH_EC_ANEA"
    :param fcst_levels: vertical levels, list like [1000, 950, 925, ...]
    :param time_range: [minimum, maximum] forecast hour, default is [0, 72]
    :param point: point location "latitude/longitude"
    :param fcst_ele: forecast element, default is temperature "TEM"
    :return: pandas dataframe
    """

    # loop every level
    data = None
    for fcst_level in fcst_levels:
        temp = cimiss_model_by_piont(
            init_time_str, data_code=data_code, fcst_level=fcst_level,
            time_range=time_range, points=point, fcst_ele=fcst_ele)
        if temp is None:
            return None
        
        temp['level'] = fcst_level
        if data is None:
            data = temp
        else:
            data = pd.concat([data, temp])

    data = data.pivot(index='Validtime', columns='level',values=fcst_ele)
    data = xr.DataArray(data, coords=[data.index.values, data.columns.values],
                        dims=['time', 'level'], name=fcst_ele)
    data = data.loc[{'level':sorted(data.coords['level'].values, reverse=True)}]
    data = data.loc[{'time':sorted(data.coords['time'].values)}]

    return data 
Example #19
Source File: retrieve_micaps_server.py    From nmc_met_io with GNU General Public License v3.0 5 votes vote down vote up
def get_radar_mosaics(directory, filenames, allExists=True, pbar=False, **kargs):
    """
    Retrieve multiple radar mosaics from MICAPS cassandra service.
    
    Args:
        directory (string): the data directory on the service.
        filenames (list): the list of filenames.
        allExists (boolean): all files should exist, or return None.
        pbar (boolean): Show progress bar, default to False.
        **kargs: key arguments passed to get_fy_awx function.
    """

    dataset = []
    if pbar:
        tqdm_filenames = tqdm(filenames, desc=directory + ": ")
    else:
        tqdm_filenames = filenames
    for filename in tqdm_filenames:
        data = get_radar_mosaic(directory, filename=filename, **kargs)
        if data:
            dataset.append(data)
        else:
            if allExists:
                warnings.warn("{} doese not exists.".format(directory+'/'+filename))
                return None
    
    return xr.concat(dataset, dim='time') 
Example #20
Source File: test_utils_times.py    From aospy with Apache License 2.0 5 votes vote down vote up
def test_extract_months():
    time = xr.DataArray(pd.date_range(start='2001-02-18', end='2002-07-12',
                                      freq='1D'), dims=[TIME_STR])
    months = 'mam'  # March-April-May
    desired = xr.concat([
        xr.DataArray(pd.date_range(start='2001-03-01', end='2001-05-31',
                                   freq='1D'), dims=[TIME_STR]),
        xr.DataArray(pd.date_range(start='2002-03-01', end='2002-05-31',
                                   freq='1D'), dims=[TIME_STR])
    ], dim=TIME_STR)
    actual = extract_months(time, months)
    xr.testing.assert_identical(actual, desired) 
Example #21
Source File: plot_forecasts.py    From DLWP with MIT License 5 votes vote down vote up
def add_pole(da):
    pole = da.sel(lat=da.lat.max()).mean('lon').drop('lat')
    pole = pole.expand_dims(dim='lat', axis=-1).assign_coords(lat=[90.])
    pole = xr.concat([pole.expand_dims(dim='lon', axis=-1).assign_coords(lon=[l]) for l in da.lon], dim='lon')
    result = xr.concat([pole, da], dim='lat')
    return result 
Example #22
Source File: retrieve_micaps_server.py    From nmc_met_io with GNU General Public License v3.0 5 votes vote down vote up
def get_model_grids(directory, filenames, allExists=True, pbar=False, **kargs):
    """
    Retrieve multiple time grids from MICAPS cassandra service.
    
    Args:
        directory (string): the data directory on the service.
        filenames (list): the list of filenames.
        allExists (boolean): all files should exist, or return None.
        pbar (boolean): Show progress bar, default to False.
        **kargs: key arguments passed to get_model_grid function.
    """

    dataset = []
    if pbar:
        tqdm_filenames = tqdm(filenames, desc=directory + ": ")
    else:
        tqdm_filenames = filenames
    for filename in tqdm_filenames:
        data = get_model_grid(directory, filename=filename, **kargs)
        if data:
            dataset.append(data)
        else:
            if allExists:
                warnings.warn("{} doese not exists.".format(directory+'/'+filename))
                return None
    
    return xr.concat(dataset, dim='time') 
Example #23
Source File: concatenate.py    From starfish with MIT License 5 votes vote down vote up
def concatenate(intensity_tables: Iterable[IntensityTable]) -> IntensityTable:
    """Concatenate IntensityTables produced for different fields of view or across imaging rounds

    IntensityTables are concatenated along the features axis, and the resulting arrays are stored
    densely, even if the underlying data is sparse, since xarray does not yet support sparse array
    structures. This means that spots that are identified in different rounds and channels will
    be identified as separate features, even if they have exactly identical coordinates.

    To merge spots that share coordinates across rounds and channels into single features amenable
    to decoding, use IntensityTable.combine_first()

    Parameters
    ----------
    intensity_tables : Iterable[IntensityTable]
        iterable (list-like) of intensity tables to combine

    Returns
    -------
        merged IntensityTable. Missing values are filled with np.NaN

    See Also
    --------
    Sparse Arrays in xarray: https://github.com/pydata/xarray/issues/1375
    Combine_first: http://xarray.pydata.org/en/stable/combining.html#combine
    """
    concatenated_intensities: xr.DataArray = xr.concat(list(intensity_tables), Features.AXIS)
    return IntensityTable(concatenated_intensities) 
Example #24
Source File: retrieve_micaps_server.py    From nmc_met_io with GNU General Public License v3.0 5 votes vote down vote up
def get_model_3D_grid(directory, filename, levels, allExists=True, pbar=False, **kargs):
    """
    Retrieve 3D [level, lat, lon] grids from  MICAPS cassandra service.
    
    Args:
        directory (string): the data directory on the service, which includes all levels.
        filename (string): the data file name.
        levels (list): the high levels.
        allExists (boolean): all levels should be exist, if not, return None.
        pbar (boolean): show progress bar.
        **kargs: key arguments passed to get_model_grid function.

    Examples:
    >>> directory = "ECMWF_HR/TMP"
    >>> levels = [1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 250, 200, 100]
    >>> filename = "19083008.024"
    >>> data = get_model_3D_grid(directory, filename, levels)
    """

    dataset = []
    if pbar:
        tqdm_levels = tqdm(levels, desc=directory+": ")
    else:
        tqdm_levels = levels
    for level in tqdm_levels:
        if directory[-1] == '/':
            dataDir = directory + str(int(level)).strip()
        else:
            dataDir = directory + '/' + str(int(level)).strip()
        data = get_model_grid(dataDir, filename=filename, **kargs)
        if data:
                dataset.append(data)
        else:
            if allExists:
                warnings.warn("{} doese not exists.".format(dataDir+'/'+filename))
                return None

    return xr.concat(dataset, dim='level') 
Example #25
Source File: inference_data.py    From arviz with Apache License 2.0 5 votes vote down vote up
def __add__(self, other):
        """Concatenate two InferenceData objects."""
        return concat(self, other, copy=True, inplace=False) 
Example #26
Source File: epoch.py    From neuroglia with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def transform(self, X):
        """Reduce traces around each event

        Parameters
        ----------
        X : pandas.DataFrame with a columns ['time','duration']

        Returns
        -------
        Xt : xarray.DataArray with columns ['event','neuron']
        """

        # define a local function that will extract traces around each event
        def extractor(ev):
            window = ev['time'], ev['time'] + ev['duration']
            mask = (
                (self.traces.index >= ev['time'])
                & (self.traces.index < (ev['time'] + ev['duration']))
                )
            return (
                self.traces[mask]
                .apply(self.func,axis=0)
                .to_xarray()
                .rename({'index':'neuron'})
            )

        # do the extraction
        tensor = [extractor(ev) for _,ev in X.iterrows()]
        concat_dim = events_to_xr_dim(X)

        # concatenate the DataArrays into a single DataArray
        return xr.concat(tensor,dim=concat_dim) 
Example #27
Source File: event.py    From neuroglia with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def transform(self, X):
        """Sample traces around each event

        Parameters
        ----------
        X : pandas.DataFrame with a column named 'time'

        Returns
        -------
        Xt : xarray.DataArray with columns ['event','sample_time','neuron']
        """
        self._make_splined_traces()

        # define a local function that will extract traces around each event
        def extractor(ev):
            t = self.sample_times + ev['time']
            interpolated = self.splined_traces_.apply(
                lambda s: pd.Series(s(t),index=self.sample_times)
                )
            return xr.DataArray(interpolated.T,dims=['sample_times','neuron'])

        # do the extraction
        tensor = [extractor(ev) for _,ev in X.iterrows()]
        concat_dim = events_to_xr_dim(X)

        # concatenate the DataArrays into a single DataArray
        return xr.concat(tensor,dim=concat_dim) 
Example #28
Source File: retrieve_micaps_server.py    From nmc_met_io with GNU General Public License v3.0 5 votes vote down vote up
def get_swan_radars(directory, filenames, allExists=True, pbar=False, **kargs):
    """
    Retrieve multiple swan 131 radar from MICAPS cassandra service.
    
    Args:
        directory (string): the data directory on the service.
        filenames (list): the list of filenames.
        allExists (boolean): all files should exist, or return None.
        pbar (boolean): Show progress bar, default to False.
        **kargs: key arguments passed to get_fy_awx function.
    """

    dataset = []
    if pbar:
        tqdm_filenames = tqdm(filenames, desc=directory + ": ")
    else:
        tqdm_filenames = filenames
    for filename in tqdm_filenames:
        data = get_swan_radar(directory, filename=filename, **kargs)
        if data:
            dataset.append(data)
        else:
            if allExists:
                warnings.warn("{} doese not exists.".format(directory+'/'+filename))
                return None
    
    return xr.concat(dataset, dim='time') 
Example #29
Source File: retrieve_micaps_server.py    From nmc_met_io with GNU General Public License v3.0 5 votes vote down vote up
def get_fy_awxs(directory, filenames, allExists=True, pbar=False, **kargs):
    """
    Retrieve multiple satellite images from MICAPS cassandra service.
    
    Args:
        directory (string): the data directory on the service.
        filenames (list): the list of filenames.
        allExists (boolean): all files should exist, or return None.
        pbar (boolean): Show progress bar, default to False.
        **kargs: key arguments passed to get_fy_awx function.
    """

    dataset = []
    if pbar:
        tqdm_filenames = tqdm(filenames, desc=directory + ": ")
    else:
        tqdm_filenames = filenames
    for filename in tqdm_filenames:
        data = get_fy_awx(directory, filename=filename, **kargs)
        if data:
            dataset.append(data)
        else:
            if allExists:
                warnings.warn("{} doese not exists.".format(directory+'/'+filename))
                return None
    
    return xr.concat(dataset, dim='time') 
Example #30
Source File: retrieve_micaps_server.py    From nmc_met_io with GNU General Public License v3.0 5 votes vote down vote up
def get_tlogps(directory, filenames, allExists=True, pbar=False, **kargs):
    """
    Retrieve multiple tlog observation from MICAPS cassandra service.
    
    Args:
        directory (string): the data directory on the service.
        filenames (list): the list of filenames.
        allExists (boolean): all files should exist, or return None.
        pbar (boolean): Show progress bar, default to False.
        **kargs: key arguments passed to get_fy_awx function.
    """

    dataset = []
    if pbar:
        tqdm_filenames = tqdm(filenames, desc=directory + ": ")
    else:
        tqdm_filenames = filenames
    for filename in tqdm_filenames:
        data = get_tlogp(directory, filename=filename, **kargs)
        if data:
            dataset.append(data)
        else:
            if allExists:
                warnings.warn("{} doese not exists.".format(directory+'/'+filename))
                return None
    
    return pd.concat(dataset)