Python dask.array.vstack() Examples

The following are 7 code examples of dask.array.vstack(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module dask.array , or try the search function .
Example #1
Source File: transform.py    From nbodykit with GNU General Public License v3.0 6 votes vote down vote up
def StackColumns(*cols):
    """
    Stack the input dask arrays vertically, column by column.

    This uses :func:`dask.array.vstack`.

    Parameters
    ----------
    *cols : :class:`dask.array.Array`
        the dask arrays to stack vertically together

    Returns
    -------
    :class:`dask.array.Array` :
        the dask array where columns correspond to the input arrays

    Raises
    ------
    TypeError
        If the input columns are not dask arrays
    """
    cols = da.broadcast_arrays(*cols)
    return da.vstack(cols).T 
Example #2
Source File: geometry.py    From pyresample with GNU Lesser General Public License v3.0 5 votes vote down vote up
def get_lonlats(self, nprocs=None, data_slice=None, cache=False, dtype=None, chunks=None):
        """Return lon and lat arrays of the area."""
        if chunks is not None:
            from dask.array import vstack
        else:
            vstack = np.vstack

        llons = []
        llats = []
        try:
            row_slice, col_slice = data_slice
        except TypeError:
            row_slice = slice(0, self.height)
            col_slice = slice(0, self.width)
        offset = 0
        for definition in self.defs:
            local_row_slice = slice(max(row_slice.start - offset, 0),
                                    min(max(row_slice.stop - offset, 0), definition.height),
                                    row_slice.step)
            lons, lats = definition.get_lonlats(nprocs=nprocs, data_slice=(local_row_slice, col_slice),
                                                cache=cache, dtype=dtype, chunks=chunks)

            llons.append(lons)
            llats.append(lats)
            offset += lons.shape[0]

        self.lons = vstack(llons)
        self.lats = vstack(llats)

        return self.lons, self.lats 
Example #3
Source File: __init__.py    From satpy with GNU General Public License v3.0 5 votes vote down vote up
def __call__(self, projectables, *args, **kwargs):
        """Generate the composite."""
        from trollimage.image import rgb2ycbcr, ycbcr2rgb
        projectables = self.match_data_arrays(projectables)
        luminance = projectables[0].copy()
        luminance /= 100.
        # Limit between min(luminance) ... 1.0
        luminance = da.where(luminance > 1., 1., luminance)

        # Get the enhanced version of the composite to be sharpened
        rgb_img = enhance2dataset(projectables[1])

        # This all will be eventually replaced with trollimage convert() method
        # ycbcr_img = rgb_img.convert('YCbCr')
        # ycbcr_img.data[0, :, :] = luminance
        # rgb_img = ycbcr_img.convert('RGB')

        # Replace luminance of the IR composite
        y__, cb_, cr_ = rgb2ycbcr(rgb_img.data[0, :, :],
                                  rgb_img.data[1, :, :],
                                  rgb_img.data[2, :, :])

        r__, g__, b__ = ycbcr2rgb(luminance, cb_, cr_)
        y_size, x_size = r__.shape
        r__ = da.reshape(r__, (1, y_size, x_size))
        g__ = da.reshape(g__, (1, y_size, x_size))
        b__ = da.reshape(b__, (1, y_size, x_size))

        rgb_img.data = da.vstack((r__, g__, b__))
        return super(LuminanceSharpeningCompositor, self).__call__(rgb_img, *args, **kwargs) 
Example #4
Source File: seviri_l1b_hrit.py    From satpy with GNU General Public License v3.0 5 votes vote down vote up
def pad_hrv_data(self, res):
        """Add empty pixels around the HRV."""
        logger.debug('Padding HRV data to full disk')
        nlines = int(self.mda['number_of_lines'])

        segment_number = self.mda['segment_sequence_number']

        current_first_line = (segment_number
                              - self.mda['planned_start_segment_number']) * nlines
        bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV']

        upper_south_line = bounds[
          'LowerNorthLineActual'] - current_first_line - 1
        upper_south_line = min(max(upper_south_line, 0), nlines)

        data_list = list()
        if upper_south_line > 0:
            # we have some of the lower window
            data_lower = pad_data(res[:upper_south_line, :].data,
                                  (upper_south_line, 11136),
                                  bounds['LowerEastColumnActual'],
                                  bounds['LowerWestColumnActual'])
            data_list.append(data_lower)

        if upper_south_line < nlines:
            # we have some of the upper window
            data_upper = pad_data(res[upper_south_line:, :].data,
                                  (nlines - upper_south_line, 11136),
                                  bounds['UpperEastColumnActual'],
                                  bounds['UpperWestColumnActual'])
            data_list.append(data_upper)
        return xr.DataArray(da.vstack(data_list), dims=('y', 'x')) 
Example #5
Source File: data.py    From dask-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def fit(
        self,
        X: Union[ArrayLike, DataFrameType],
        y: Optional[Union[ArrayLike, SeriesType]] = None,
    ) -> "RobustScaler":
        q_min, q_max = self.quantile_range
        if not 0 <= q_min <= q_max <= 100:
            raise ValueError("Invalid quantile range: %s" % str(self.quantile_range))

        if isinstance(X, dd.DataFrame):
            n_columns = len(X.columns)
            partition_lengths = X.map_partitions(len).compute()
            dtype = np.find_common_type(X.dtypes, [])
            blocks = X.to_delayed()
            X = da.vstack(
                [
                    da.from_delayed(
                        block.values, shape=(length, n_columns), dtype=dtype
                    )
                    for block, length in zip(blocks, partition_lengths)
                ]
            )

        quantiles: Any = [da.percentile(col, [q_min, 50.0, q_max]) for col in X.T]
        quantiles = da.vstack(quantiles).compute()
        self.center_: List[float] = quantiles[:, 1]
        self.scale_: List[float] = quantiles[:, 2] - quantiles[:, 0]
        self.scale_ = _handle_zeros_in_scale(self.scale_, copy=False)
        self.n_features_in_ = X.shape[1]
        return self 
Example #6
Source File: data.py    From dask-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _dense_fit(
        self, X: Union[ArrayLike, DataFrameType], random_state: int
    ) -> Union[ArrayLike, DataFrameType]:
        references = self.references_ * 100
        quantiles = [da.percentile(col, references) for col in X.T]
        (self.quantiles_,) = compute(da.vstack(quantiles).T) 
Example #7
Source File: data.py    From dask-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _transform(
        self, X: Union[ArrayLike, DataFrameType], inverse: bool = False
    ) -> Union[ArrayLike, DataFrameType]:
        X = X.copy()  # ...
        transformed = [
            self._transform_col(
                X[:, feature_idx], self.quantiles_[:, feature_idx], inverse
            )
            for feature_idx in range(X.shape[1])
        ]
        return da.vstack(transformed, allow_unknown_chunksizes=True).T