Python scipy.ndimage.filters.median_filter() Examples

The following are 26 code examples of scipy.ndimage.filters.median_filter(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module scipy.ndimage.filters , or try the search function .
Example #1
Source File: trends.py    From astrobase with MIT License 6 votes vote down vote up
def smooth_magseries_ndimage_medfilt(mags, windowsize):
    '''This smooths the magseries with a median filter that reflects the array
    at the boundary.

    See https://docs.scipy.org/doc/scipy/reference/tutorial/ndimage.html for
    details.

    Parameters
    ----------

    mags : np.array
        The input mags/flux time-series to smooth.

    windowsize : int
        This is a odd integer containing the smoothing window size.

    Returns
    -------

    np.array
        The smoothed mag/flux time-series array.

    '''
    return median_filter(mags, size=windowsize, mode='reflect') 
Example #2
Source File: segmenter.py    From msaf with MIT License 6 votes vote down vote up
def pick_peaks(nc, L=16, offset_denom=0.1):
    """Obtain peaks from a novelty curve using an adaptive threshold."""
    offset = nc.mean() * float(offset_denom)
    th = filters.median_filter(nc, size=L) + offset
    #th = filters.gaussian_filter(nc, sigma=L/2., mode="nearest") + offset
    #import pylab as plt
    #plt.plot(nc)
    #plt.plot(th)
    #plt.show()
    # th = np.ones(nc.shape[0]) * nc.mean() - 0.08
    peaks = []
    for i in range(1, nc.shape[0] - 1):
        # is it a peak?
        if nc[i - 1] < nc[i] and nc[i] > nc[i + 1]:
            # is it above the threshold?
            if nc[i] > th[i]:
                peaks.append(i)
    return peaks 
Example #3
Source File: segmenter.py    From msaf with MIT License 6 votes vote down vote up
def pick_peaks(nc, L=16):
    """Obtain peaks from a novelty curve using an adaptive threshold."""
    offset = nc.mean() / 20.

    nc = filters.gaussian_filter1d(nc, sigma=4)  # Smooth out nc

    th = filters.median_filter(nc, size=L) + offset
    #th = filters.gaussian_filter(nc, sigma=L/2., mode="nearest") + offset

    peaks = []
    for i in range(1, nc.shape[0] - 1):
        # is it a peak?
        if nc[i - 1] < nc[i] and nc[i] > nc[i + 1]:
            # is it above the threshold?
            if nc[i] > th[i]:
                peaks.append(i)
    #plt.plot(nc)
    #plt.plot(th)
    #for peak in peaks:
        #plt.axvline(peak)
    #plt.show()

    return peaks 
Example #4
Source File: ChangeDet.py    From PyRAT with Mozilla Public License 2.0 6 votes vote down vote up
def filter(self, cov, *args, **kwargs):
        cdim = cov.shape
        npol = cdim[0] // 2
        c_int = lambda n: cov[n, n, ...].real

        max_coh = np.zeros(cdim[2:], dtype='f8')
        inten = np.zeros(cdim[2:], dtype='f8')
        for n in range(npol):
            coh = np.abs(cov[n, n + npol, ...]) / np.sqrt(c_int(n) * c_int(n + npol))
            up_mask = (coh > max_coh)
            max_coh = (1 - up_mask) * max_coh + up_mask * coh
            inten = (1 - up_mask) * inten + 0.5 * up_mask * (c_int(n) + c_int(n + npol))

        mask = np.asarray((inten > self.noise) * (max_coh < self.coh_thresh), dtype='u1')

        return median_filter(mask, 3, mode='constant', cval=0) 
Example #5
Source File: segmenter.py    From msaf with MIT License 6 votes vote down vote up
def filter_activation_matrix(G, R):
    """Filters the activation matrix G, and returns a flattened copy."""

    #import pylab as plt
    #plt.imshow(G, interpolation="nearest", aspect="auto")
    #plt.show()

    idx = np.argmax(G, axis=1)
    max_idx = np.arange(G.shape[0])
    max_idx = (max_idx, idx.flatten())
    G[:, :] = 0
    G[max_idx] = idx + 1

    # TODO: Order matters?
    G = np.sum(G, axis=1)
    G = median_filter(G[:, np.newaxis], R)

    return G.flatten() 
Example #6
Source File: pycrown.py    From pycrown with GNU General Public License v3.0 6 votes vote down vote up
def _smooth_raster(self, raster, ws, resolution, circular=False):
        """ Smooth a raster with a median filter

        Parameters
        ----------
        raster :      ndarray
                      raster to be smoothed
        ws :          int
                      window size of smoothing filter
        resolution :  int
                      resolution of raster in m
        circular :    bool, optional
                      set to True for disc-shaped filter kernel, block otherwise

        Returns
        -------
        ndarray
            smoothed raster
        """
        return filters.median_filter(
            raster, footprint=self._get_kernel(ws, circular=circular)) 
Example #7
Source File: resonator.py    From qkit with GNU General Public License v2.0 6 votes vote down vote up
def set_prefilter(self,gaussian = False, median = False, params = []):
        self._do_prefilter_data = False
        if gaussian or median:
            self._do_prefilter_data = True

        #print gaussian, median
        if median:
            self._prefilter = median_filter
            #print("median_filter")
            if params:
                self._prefilter_params = params[0]
            else:
                self._prefilter_params = 6

        if gaussian:
            self._prefilter = gaussian_filter1d
            #print("gaussian_filter1d")
            if params:
                self._prefilter_params = params[0]
            else:
                self._prefilter_params = 6 # 0.4 
Example #8
Source File: test_median_filter.py    From costar_plan with Apache License 2.0 5 votes vote down vote up
def compare_pyfunc_median_filter_with_direct_call(self):
        with self.test_session() as sess:
            test_input = np.random.random((10, 9)).astype(np.float32)
            test_kernel = (3, 3)
            test_input_tf = tf.convert_to_tensor(test_input)
            filter_result_tf = grasp_dataset_median_filter(test_input_tf, test_kernel[0], test_kernel[1])
            filter_result_np = median_filter(test_input, test_kernel)
            filter_result_tf = sess.run(filter_result_tf)
            self.assertAllEqual(filter_result_tf, filter_result_np) 
Example #9
Source File: twoD.py    From kombine with MIT License 5 votes vote down vote up
def __init__(self, inp_img):
        self.ndim = 2

        # Load up the image (greyscale) and filter to soften it up
        img = median_filter(imread(inp_img, flatten=True), 5)

        # Convert 'ij' indexing to 'xy' coordinates
        self.img = np.flipud(img).T
        self._lower_left = np.array([0., 0.])
        self._upper_right = self.img.shape

        # Construct a spline interpolant to use as a target
        x = np.arange(self._lower_left[0], self._upper_right[0], 1)
        y = np.arange(self._lower_left[1], self._upper_right[1], 1)
        self._interpolant = RectBivariateSpline(x, y, self.img) 
Example #10
Source File: Filter.py    From PyRAT with Mozilla Public License 2.0 5 votes vote down vote up
def filter(self, array, *args, **kwargs):
        win = self.win
        if array.ndim == 3:
            win = [1] + self.win
        if array.ndim == 4:
            win = [1, 1] + self.win

        if np.iscomplexobj(array):
            return filters.median_filter(array.real, size=win) + 1j * filters.median_filter(array.imag, size=win)
        elif self.phase is True:
            tmp = np.exp(1j * array)
            tmp = filters.uniform_filter(tmp.real, win) + 1j * filters.uniform_filter(tmp.imag, win)
            return np.angle(tmp)
        else:
            return filters.median_filter(array.real, size=win) 
Example #11
Source File: Denoise.py    From PyRAT with Mozilla Public License 2.0 5 votes vote down vote up
def median(array, win):
        return filters.median_filter(array.real, win) + 1j * filters.median_filter(array.imag, win) 
Example #12
Source File: ChangeDet.py    From PyRAT with Mozilla Public License 2.0 5 votes vote down vote up
def filter(self, data, *args, **kwargs):

        if (len(data) == 4):
            c1 = data[0]
            n1 = data[1]
            c2 = data[2]
            n2 = data[3]
        else:
            c1 = data[0]
            n1 = self.n1
            c2 = data[1]
            n2 = self.n2

        if c1.ndim > 2:
            p = c1.shape[0]
        else:
            p = 1

        lnq = p * ((n1 + n2) * np.log(n1 + n2) - n1 * np.log(n1) - n2 * np.log(n2)) + \
              n1 * np.log(block_det(c1)) + \
              n2 * np.log(block_det(c2)) - \
              (n1 + n2) * np.log(block_det(c1 + c2))

        rho = 1 - (2 * p ** 2 - 1) * (1 / n1 + 1 / n2 - 1 / (n1 + n2)) / (6 * p)

        o_2 = p ** 2 * (p ** 2 - 1) * \
              (1 / n1 ** 2 + 1 / n2 ** 2 - 1 / (n1 + n2) ** 2) / (24 * rho ** 2) - \
              0.25 * p ** 2 * (1 - 1 / rho) ** 2

        lnq *= -2 * rho

        pfa = (1 - o_2) * chi2.cdf(lnq, p ** 2) + o_2 * chi2.cdf(lnq, p ** 2 + 4)

        return (pfa, median_filter(pfa > self.p_thresh, 3, mode='constant', cval=0)) 
Example #13
Source File: median_blur.py    From plantcv with MIT License 5 votes vote down vote up
def median_blur(gray_img, ksize):
    """Applies a median blur filter (applies median value to central pixel within a kernel size).

    Inputs:
    gray_img  = Grayscale image data
    ksize = kernel size => integer or tuple, ksize x ksize box if integer, (n, m) size box if tuple

    Returns:
    img_mblur = blurred image


    :param gray_img: numpy.ndarray
    :param ksize: int or tuple
    :return img_mblur: numpy.ndarray
    """

    # Make sure ksize is valid
    if type(ksize) is not int and type(ksize) is not tuple:
        fatal_error("Invalid ksize, must be integer or tuple")

    img_mblur = median_filter(gray_img, size=ksize)
    params.device += 1
    if params.debug == 'print':
        print_image(img_mblur, os.path.join(params.debug_outdir,
                                            str(params.device) + '_median_blur' + str(ksize) + '.png'))
    elif params.debug == 'plot':
        plot_image(img_mblur, cmap='gray')
    return img_mblur 
Example #14
Source File: preprocess.py    From srep with GNU General Public License v3.0 5 votes vote down vote up
def _get_amp(data, framerate, num_semg_row, num_semg_col):
    data = np.abs(data)
    data = np.transpose([lowpass(ch, 2, framerate, 4, zero_phase=True) for ch in data.T])
    return [median_filter(image, 3).mean() for image in data.reshape(-1, num_semg_row, num_semg_col)] 
Example #15
Source File: preprocess.py    From srep with GNU General Public License v3.0 5 votes vote down vote up
def _csl_cut(data, framerate):
    window = int(np.round(150 * framerate / 2048))
    data = data[:len(data) // window * window].reshape(-1, 150, data.shape[1])
    rms = np.sqrt(np.mean(np.square(data), axis=1))
    rms = [median_filter(image, 3).ravel() for image in rms.reshape(-1, 24, 7)]
    rms = np.mean(rms, axis=1)
    threshold = np.mean(rms)
    mask = rms > threshold
    for i in range(1, len(mask) - 1):
        if not mask[i] and mask[i - 1] and mask[i + 1]:
            mask[i] = True
    from .. import utils
    begin, end = max(utils.continuous_segments(mask),
                     key=lambda s: (mask[s[0]], s[1] - s[0]))
    return begin * window, end * window 
Example #16
Source File: preprocess.py    From srep with GNU General Public License v3.0 5 votes vote down vote up
def __call__(self, data, num_semg_row, num_semg_col, **kargs):
        return np.array([median_filter(image, 3).ravel() for image
                         in data.reshape(-1, num_semg_row, num_semg_col)]) 
Example #17
Source File: utils.py    From neural-style with MIT License 5 votes vote down vote up
def deprocess_img_and_save(img, filename):
    """Undo pre-processing on an image, and save it."""
    img = img[0, :, :, :]
    add_imagenet_mean(img)
    img = img[::-1].transpose((1, 2, 0))
    img = np.clip(img, 0, 255).astype(np.uint8)
    img = median_filter(img, size=(3, 3, 1))
    try:
        imsave(filename, img)
    except OSError as e:
        print(e)
        sys.exit(1) 
Example #18
Source File: pattern_noise.py    From banzai with GNU General Public License v3.0 5 votes vote down vote up
def compute_snr(power_2d, fractional_window_size=0.05):
    """
    Extract the central region of the 2D Fourier transform

    Parameters
    ----------
    power_2d : numpy array
        The 2D Fourier transform of the data
    fractional_window_size : float
        Median filter window size as a fraction of the 1D power array

    Returns
    -------
    snr : numpy array
        The 1D SNR
    """
    power = np.median(power_2d, axis=0)
    p2p_scatter = abs(power[1:] - power[:-1])
    power = power[1:]  # Throw away DC term

    # Median filter
    window_size = get_odd_integer(fractional_window_size * len(power))
    continuum = median_filter(power, size=window_size)
    pixel_to_pixel_scatter = median_filter(p2p_scatter, size=window_size)
    snr = (power - continuum) / pixel_to_pixel_scatter

    # Also divide out the global scatter for any residual structure that was not removed with the median filter
    global_scatter = robust_standard_deviation(snr)
    snr /= global_scatter

    return snr 
Example #19
Source File: transform.py    From fast-neural-style-keras with Apache License 2.0 5 votes vote down vote up
def median_filter_all_colours(im_small, window_size):
    """
    Applies a median filer to all colour channels
    """
    ims = []
    for d in range(3):
        im_conv_d = median_filter(im_small[:,:,d], size=(window_size,window_size))
        ims.append(im_conv_d)

    im_conv = np.stack(ims, axis=2).astype("uint8")
    
    return im_conv 
Example #20
Source File: test_median.py    From gputools with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _test_single(dshape, size , cval = 0., dtype = np.float32, skip_assert = False):
    d = np.random.randint(0, 40, dshape).astype(dtype)*0

    out1 = spf.median_filter(d, size, mode = "constant", cval = cval)
    out2 = median_filter(d, size, cval=cval)

    print(("shape: %s \tsize: %s\tcval: %.2f\tdtype: %s\tdifference: %.2f" % (dshape, size,cval, dtype,np.amax(np.abs(out1 - out2)))))
    if not skip_assert:
        npt.assert_almost_equal(out1,out2, decimal = 5)
    return out1, out2 
Example #21
Source File: segmenter.py    From msaf with MIT License 5 votes vote down vote up
def median_filter(X, M=8):
    """Median filter along the first axis of the feature matrix X."""
    for i in range(X.shape[1]):
        X[:, i] = filters.median_filter(X[:, i], size=M)
    return X 
Example #22
Source File: segmenter.py    From msaf with MIT License 5 votes vote down vote up
def median_filter(X, M=8):
    """Median filter along the first axis of the feature matrix X."""
    for i in range(X.shape[1]):
        X[:, i] = filters.median_filter(X[:, i], size=M)
    return X 
Example #23
Source File: masking.py    From MDT with GNU Lesser General Public License v3.0 4 votes vote down vote up
def generate_simple_wm_mask(scalar_map, whole_brain_mask, threshold=0.3, median_radius=1, nmr_filter_passes=2):
    """Generate a simple white matter mask by thresholding the given map and smoothing it using a median filter.

    Everything below the given threshold will be masked (not used). It also applies the regular brain mask to
    only retain values inside the brain.

    Args:
        scalar_map (str or ndarray): the path to the FA file
        whole_brain_mask (str or ndarray): the general brain mask used in the FA model fitting
        threshold (double): the FA threshold. Everything below this threshold is masked (set to 0). To be precise:
            where fa_data < fa_threshold set the value to 0.
        median_radius (int): the radius of the median filter
        nmr_filter_passes (int): the number of passes we apply the median filter
    """
    filter_footprint = np.zeros((1 + 2 * median_radius,) * 3)
    filter_footprint[median_radius, median_radius, median_radius] = 1
    filter_footprint[:, median_radius, median_radius] = 1
    filter_footprint[median_radius, :, median_radius] = 1
    filter_footprint[median_radius, median_radius, :] = 1

    if isinstance(scalar_map, str):
        map_data = load_nifti(scalar_map).get_data()
    else:
        map_data = np.copy(scalar_map)

    map_data[map_data < threshold] = 0
    wm_mask = map_data.astype(np.bool)

    if len(wm_mask.shape) > 3:
        wm_mask = wm_mask[:, :, :, 0]

    wm_mask[np.logical_not(load_brain_mask(whole_brain_mask))] = 0

    if nmr_filter_passes == 0:
        return wm_mask

    mask = load_brain_mask(whole_brain_mask)

    wm_mask_masked = np.ma.masked_array(wm_mask, mask=mask)
    for ind in range(nmr_filter_passes):
        wm_mask_masked = median_filter(wm_mask_masked, footprint=filter_footprint, mode='constant')
    return wm_mask_masked 
Example #24
Source File: segmenter.py    From msaf with MIT License 4 votes vote down vote up
def processFlat(self):
        """Main process.
        Returns
        -------
        est_idxs : np.array(N)
            Estimated indeces for the segment boundaries in frames.
        est_labels : np.array(N-1)
            Estimated labels for the segments.
        """
        # C-NMF params
        niter = self.config["niters"]  # Iterations for the MF and clustering

        # Preprocess to obtain features, times, and input boundary indeces
        F = self._preprocess()

        # Normalize
        F = U.normalize(F, norm_type=self.config["norm_feats"])

        if F.shape[0] >= self.config["h"]:
            # Median filter
            F = median_filter(F, M=self.config["h"])
            #plt.imshow(F.T, interpolation="nearest", aspect="auto"); plt.show()

            # Find the boundary indices and labels using matrix factorization
            est_idxs, est_labels = get_segmentation(
                F.T, self.config["rank"], self.config["R"],
                self.config["rank_labels"], self.config["R_labels"],
                niter=niter, bound_idxs=self.in_bound_idxs, in_labels=None)

            # Remove empty segments if needed
            est_idxs, est_labels = U.remove_empty_segments(est_idxs, est_labels)
        else:
            # The track is too short. We will only output the first and last
            # time stamps
            if self.in_bound_idxs is None:
                est_idxs = np.array([0, F.shape[0] - 1])
                est_labels = [1]
            else:
                est_idxs = self.in_bound_idxs
                est_labels = [1] * (len(est_idxs) + 1)

        # Make sure that the first and last boundaries are included
        assert est_idxs[0] == 0 and est_idxs[-1] == F.shape[0] - 1

        # Post process estimations
        est_idxs, est_labels = self._postprocess(est_idxs, est_labels)

        return est_idxs, est_labels 
Example #25
Source File: segmenter.py    From msaf with MIT License 4 votes vote down vote up
def processFlat(self):
        """Main process.
        Returns
        -------
        est_idxs : np.array(N)
            Estimated indeces the segment boundaries in frames.
        est_labels : np.array(N-1)
            Estimated labels for the segments.
        """
        # Preprocess to obtain features
        F = self._preprocess()

        # Normalize
        F = msaf.utils.normalize(F, norm_type=self.config["bound_norm_feats"])

        # Make sure that the M_gaussian is even
        if self.config["M_gaussian"] % 2 == 1:
            self.config["M_gaussian"] += 1

        # Median filter
        F = median_filter(F, M=self.config["m_median"])
        #plt.imshow(F.T, interpolation="nearest", aspect="auto"); plt.show()

        # Self similarity matrix
        S = compute_ssm(F)

        # Compute gaussian kernel
        G = compute_gaussian_krnl(self.config["M_gaussian"])
        #plt.imshow(S, interpolation="nearest", aspect="auto"); plt.show()

        # Compute the novelty curve
        nc = compute_nc(S, G)

        # Find peaks in the novelty curve
        est_idxs = pick_peaks(nc, L=self.config["L_peaks"])

        # Add first and last frames
        est_idxs = np.concatenate(([0], est_idxs, [F.shape[0] - 1]))

        # Empty labels
        est_labels = np.ones(len(est_idxs) - 1) * -1

        # Post process estimations
        est_idxs, est_labels = self._postprocess(est_idxs, est_labels)

        return est_idxs, est_labels
        # plt.figure(1)
        # plt.plot(nc);
        # [plt.axvline(p, color="m") for p in est_bounds]
        # [plt.axvline(b, color="g") for b in ann_bounds]
        # plt.figure(2)
        # plt.imshow(S, interpolation="nearest", aspect="auto")
        # [plt.axvline(b, color="g") for b in ann_bounds]
        # plt.show() 
Example #26
Source File: masking.py    From MDT with GNU Lesser General Public License v3.0 4 votes vote down vote up
def median_otsu(unweighted_volume, median_radius=4, numpass=4, dilate=1):
    """ Simple brain extraction tool for dMRI data.

    This function is inspired from the ``median_otsu`` function from ``dipy``
    and is copied here to remove a dependency.

    It uses a median filter smoothing of the ``unweighted_volume``
    automatic histogram Otsu thresholding technique, hence the name
    *median_otsu*.

    This function is inspired from Mrtrix's bet which has default values
    ``median_radius=3``, ``numpass=2``. However, from tests on multiple 1.5T
    and 3T data. From GE, Philips, Siemens, the most robust choice is
    ``median_radius=4``, ``numpass=4``.

    Args:
        unweighted_volume (ndarray): ndarray of the unweighted volumes brain volumes
        median_radius (int): Radius (in voxels) of the applied median filter (default 4)
        numpass (int): Number of pass of the median filter (default 4)
        dilate (None or int): optional number of iterations for binary dilation

    Returns:
        ndarray: a 3D ndarray with the binary brain mask
    """
    b0vol = unweighted_volume

    logger = logging.getLogger(__name__)
    logger.info('We will use a single precision float type for the calculations.'.format())
    for env in mot.configuration.get_cl_environments():
        logger.info('Using device \'{}\'.'.format(str(env)))

    for ind in range(numpass):
        b0vol = median_filter(b0vol, size=median_radius, mode='mirror')

    thresh = _otsu(b0vol)
    mask = b0vol > thresh

    if dilate is not None:
        cross = generate_binary_structure(3, 1)
        mask = binary_dilation(mask, cross, iterations=dilate)

    return mask