Python matplotlib.pyplot.matshow() Examples

The following are 30 code examples of matplotlib.pyplot.matshow(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module matplotlib.pyplot , or try the search function .
Example #1
Source File: pearsons_filtering.py    From simba with GNU Lesser General Public License v3.0 7 votes vote down vote up
def pearson_filter(projectPath, featuresDf, del_corr_status, del_corr_threshold, del_corr_plot_status):
    print('Reducing features. Correlation threshold: ' + str(del_corr_threshold))
    col_corr = set()
    corr_matrix = featuresDf.corr()
    for i in range(len(corr_matrix.columns)):
        for j in range(i):
            if (corr_matrix.iloc[i, j] >= del_corr_threshold) and (corr_matrix.columns[j] not in col_corr):
                colname = corr_matrix.columns[i]
                col_corr.add(colname)
                if colname in featuresDf.columns:
                    del featuresDf[colname]
    if del_corr_plot_status == 'yes':
        print('Creating feature correlation heatmap...')
        dateTime = datetime.now().strftime('%Y%m%d%H%M%S')
        plt.matshow(featuresDf.corr())
        plt.tight_layout()
        plt.savefig(os.path.join(projectPath, 'logs', 'Feature_correlations_' + dateTime + '.png'), dpi=300)
        plt.close('all')
        print('Feature correlation heatmap .png saved in project_folder/logs directory')

    return featuresDf 
Example #2
Source File: interactive-eval.py    From multiffn-nli with MIT License 6 votes vote down vote up
def plot_attention(tokens1, tokens2, attention):
    """
    Print a colormap showing attention values from tokens 1 to
    tokens 2.
    """
    len1 = len(tokens1)
    len2 = len(tokens2)
    extent = [0, len2, 0, len1]
    pl.matshow(attention, extent=extent, aspect='auto')
    ticks1 = np.arange(len1) + 0.5
    ticks2 = np.arange(len2) + 0.5
    pl.xticks(ticks2, tokens2, rotation=45)
    pl.yticks(ticks1, reversed(tokens1))
    ax = pl.gca()
    ax.xaxis.set_ticks_position('bottom')
    pl.colorbar()
    pl.title('Alignments')
    pl.show(block=False) 
Example #3
Source File: pursuit_evade.py    From MADRL with MIT License 6 votes vote down vote up
def render(self, plt_delay=1.0):
        plt.matshow(self.model_state[0].T, cmap=plt.get_cmap('Greys'), fignum=1)
        for i in range(self.pursuer_layer.n_agents()):
            x, y = self.pursuer_layer.get_position(i)
            plt.plot(x, y, "r*", markersize=12)
            if self.train_pursuit:
                ax = plt.gca()
                ofst = self.obs_range / 2.0
                ax.add_patch(
                    Rectangle((x - ofst, y - ofst), self.obs_range, self.obs_range, alpha=0.5,
                              facecolor="#FF9848"))
        for i in range(self.evader_layer.n_agents()):
            x, y = self.evader_layer.get_position(i)
            plt.plot(x, y, "b*", markersize=12)
            if not self.train_pursuit:
                ax = plt.gca()
                ofst = self.obs_range / 2.0
                ax.add_patch(
                    Rectangle((x - ofst, y - ofst), self.obs_range, self.obs_range, alpha=0.5,
                              facecolor="#009ACD"))
        plt.pause(plt_delay)
        plt.clf() 
Example #4
Source File: tf_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    print(type(cwtmatr))
    print(len(cwtmatr))
    print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[100])
    
    plt.plot(cwtmatr[1200])
    plt.plot(cwtmatr[1210])
    plt.plot(cwtmatr[1300])
    plt.plot(cwtmatr[1400])
    plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #5
Source File: plot_results.py    From nasbench-1shot1 with Apache License 2.0 6 votes vote down vote up
def plot_correlation_image(single_one_shot_training_database, epoch_idx=-1):
    correlation_matrix = np.zeros((3, 5))
    for idx_cell, num_cells in enumerate([3, 6, 9]):
        for idx_ch, num_channels in enumerate([2, 4, 8, 16, 36]):
            config = single_one_shot_training_database.query(
                {'unrolled': False, 'cutout': False, 'search_space': '3', 'epochs': 50, 'init_channels': num_channels,
                 'weight_decay': 0.0003, 'warm_start_epochs': 0, 'learning_rate': 0.025, 'layers': num_cells})
            if len(config) > 0:
                correlation = extract_correlation_per_epoch(config)
                correlation_matrix[idx_cell, idx_ch] = 1 - correlation[epoch_idx]

    plt.figure()
    plt.matshow(correlation_matrix)
    plt.xticks(np.arange(5), (2, 4, 8, 16, 36))
    plt.yticks(np.arange(3), (3, 6, 9))
    plt.colorbar()
    plt.savefig('test_correlation.png')
    plt.close()
    return correlation_matrix 
Example #6
Source File: utils_visualization.py    From deepwriting with MIT License 6 votes vote down vote up
def plot_matrix_and_get_image(plot_data, fig_height=8, fig_width=12, axis_off=False, colormap="jet"):
    fig = plt.figure()
    fig.set_figheight(fig_height)
    fig.set_figwidth(fig_width)
    plt.matshow(plot_data, fig.number)

    if fig_height < fig_width:
        plt.colorbar(orientation="horizontal")
    else:
        plt.colorbar(orientation="vertical")

    plt.set_cmap(colormap)
    if axis_off:
        plt.axis('off')

    img = fig_to_img(fig)
    plt.close(fig)
    return img 
Example #7
Source File: GetMLPara.py    From dr_droid with Apache License 2.0 6 votes vote down vote up
def draw_confusion_matrix(y_test, y_pred):

    from sklearn.metrics import confusion_matrix
    cm = confusion_matrix(y_test, y_pred)
    print(cm)

    # Show confusion matrix in a separate window
    plt.matshow(cm)
    plt.title('Confusion matrix')
    plt.colorbar()
    plt.ylabel('True label')
    plt.xlabel('Predicted label')
    plt.show()


####################10 CV FALSE POSITIVE FLASE NEGATIVe################################################# 
Example #8
Source File: deconvolutional_autoencoder_1.py    From Deep-Learning-with-TensorFlow with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWORK PARAMETERS 
Example #9
Source File: denoising_autoencoder_1.py    From Deep-Learning-with-TensorFlow with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)),\
                cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)),\
                cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg   = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWRK PARAMETERS 
Example #10
Source File: denoising_autoencoder_1.py    From Deep-Learning-with-TensorFlow with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWRK PARAMETERS 
Example #11
Source File: deconvolutional_autoencoder_1.py    From Deep-Learning-with-TensorFlow with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)),\
                cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)),\
                cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg   = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWORK PARAMETERS 
Example #12
Source File: deconvolutional_autoencoder.py    From Deep-Learning-with-TensorFlow-Second-Edition with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWORK PARAMETERS 
Example #13
Source File: denoising_autoencoder.py    From Deep-Learning-with-TensorFlow-Second-Edition with MIT License 6 votes vote down vote up
def plotresult(org_vec,noisy_vec,out_vec):
    plt.matshow(np.reshape(org_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Original Image")
    plt.colorbar()

    plt.matshow(np.reshape(noisy_vec, (28, 28)), cmap=plt.get_cmap('gray'))
    plt.title("Input Image")
    plt.colorbar()
    
    outimg = np.reshape(out_vec, (28, 28))
    plt.matshow(outimg, cmap=plt.get_cmap('gray'))
    plt.title("Reconstructed Image")
    plt.colorbar()
    plt.show()

# NETOWRK PARAMETERS 
Example #14
Source File: visualization_utils.py    From ludwig with Apache License 2.0 5 votes vote down vote up
def confusion_matrix_plot(
        confusion_matrix,
        labels=None,
        output_feature_name=None,
        filename=None
):
    mpl.rcParams.update({'figure.autolayout': True})
    fig, ax = plt.subplots()

    ax.invert_yaxis()
    ax.xaxis.tick_top()
    ax.xaxis.set_label_position('top')

    cax = ax.matshow(confusion_matrix, cmap='viridis')

    ax.xaxis.set_major_locator(ticker.MultipleLocator(1))
    ax.yaxis.set_major_locator(ticker.MultipleLocator(1))
    ax.set_xticklabels([''] + labels, rotation=45, ha='left')
    ax.set_yticklabels([''] + labels)
    ax.grid(False)
    ax.tick_params(axis='both', which='both', length=0)
    fig.colorbar(cax, ax=ax, extend='max')
    ax.set_xlabel('Predicted {}'.format(output_feature_name))
    ax.set_ylabel('Actual {}'.format(output_feature_name))

    plt.tight_layout()
    ludwig.contrib.contrib_command("visualize_figure", plt.gcf())
    if filename:
        plt.savefig(filename)
    else:
        plt.show() 
Example #15
Source File: plot_quasar_transform.py    From 3DChromatin_ReplicateQC with MIT License 5 votes vote down vote up
def main():
    parser = argparse.ArgumentParser(description='')
    parser.add_argument('--transform')
    parser.add_argument('--out')
    args = parser.parse_args()
    
    infile1 = h5py.File(args.transform, 'r')
    resolutions = infile1['resolutions'][...]
    chroms = infile1['chromosomes'][...]
    data1 = load_data(infile1, chroms, resolutions)
    infile1.close()

    '''
    #for now, don't plot this
    for resolution in data1.keys():
        for chromo in chroms:
            N = data1[resolution][chromo][1].shape[0]
            full=numpy.empty((N,N))
            #full=full/0
            for i in range(100):
                temp1 = numpy.arange(N - i - 1)
                temp2 = numpy.arange(i+1, N)
                full[temp1, temp2] = data1[resolution][chromo][1][temp1, i]
                full[temp2, temp1] = full[temp1, temp2]
            x=0.8
            plt.matshow(full,cmap='seismic',vmin=-x,vmax=x)
            plt.colorbar()
            plt.show()
            plt.savefig(args.out+'.res'+str(resolution)+'.chr'+chromo+'.pdf')    
   ''' 
Example #16
Source File: CNN_DogvsCat_Classifier.py    From Practical-Convolutional-Neural-Networks with MIT License 5 votes vote down vote up
def plot_confusion_matrix(cls_pred):
    # cls_pred is an array of the predicted class-number for
    # all images in the test-set.

    # Get the true classifications for the test-set.
    cls_true = data.valid.cls
    
    # Get the confusion matrix using sklearn.
    cm = confusion_matrix(y_true=cls_true, y_pred=cls_pred)
    
    # Compute the precision, recall and f1 score of the classification
    p, r, f, s = precision_recall_fscore_support(cls_true, cls_pred, average='weighted')
    print('Precision:', p)
    print('Recall:', r)
    print('F1-score:', f)

    # Print the confusion matrix as text.
    print(cm)

    # Plot the confusion matrix as an image.
    plt.matshow(cm)

    # Make various adjustments to the plot.
    plt.colorbar()
    tick_marks = np.arange(num_classes)
    plt.xticks(tick_marks, range(num_classes))
    plt.yticks(tick_marks, range(num_classes))
    plt.xlabel('Predicted')
    plt.ylabel('True')

    # Ensure the plot is shown correctly with multiple plots
    # in a single Notebook cell.
    plt.show() 
Example #17
Source File: pursuit_evade.py    From MADRL with MIT License 5 votes vote down vote up
def save_image(self, file_name):
        plt.cla()
        plt.matshow(self.model_state[0].T, cmap=plt.get_cmap('Greys'), fignum=0)
        x, y = self.pursuer_layer.get_position(0)
        plt.plot(x, y, "r*", markersize=12)
        for i in range(self.pursuer_layer.n_agents()):
            x, y = self.pursuer_layer.get_position(i)
            plt.plot(x, y, "r*", markersize=12)
            if self.train_pursuit:
                ax = plt.gca()
                ofst = self.obs_range / 2.0
                ax.add_patch(
                    Rectangle((x - ofst, y - ofst), self.obs_range, self.obs_range, alpha=0.5,
                              facecolor="#FF9848"))
        for i in range(self.evader_layer.n_agents()):
            x, y = self.evader_layer.get_position(i)
            plt.plot(x, y, "b*", markersize=12)
            if not self.train_pursuit:
                ax = plt.gca()
                ofst = self.obs_range / 2.0
                ax.add_patch(
                    Rectangle((x - ofst, y - ofst), self.obs_range, self.obs_range, alpha=0.5,
                              facecolor="#009ACD"))

        xl, xh = -self.obs_offset - 1, self.xs + self.obs_offset + 1
        yl, yh = -self.obs_offset - 1, self.ys + self.obs_offset + 1
        plt.xlim([xl, xh])
        plt.ylim([yl, yh])
        plt.axis('off')
        plt.savefig(file_name, dpi=200) 
Example #18
Source File: mnist_cnn.py    From Deep-Learning-By-Example with MIT License 5 votes vote down vote up
def plot_confusionMatrix(cls_predicted):
    # cls_predicted is an array of the predicted class number of each image in the test set.

    # Get the actual classes for the test-set.
    cls_actual = mnist_data.test.cls_integer

    # Generate the confusion matrix using sklearn.
    conf_matrix = confusion_matrix(y_true=cls_actual,
                                   y_pred=cls_predicted)

    # Print the matrix.
    print(conf_matrix)

    # visualizing the confusion matrix.
    plt.matshow(conf_matrix)

    plt.colorbar()
    tick_marks = np.arange(num_classes)
    plt.xticks(tick_marks, range(num_classes))
    plt.yticks(tick_marks, range(num_classes))
    plt.xlabel('Predicted class')
    plt.ylabel('True class')

    # Showing the plot
    plt.show()


# measuring the accuracy of the trained model over the test set by splitting it into small batches 
Example #19
Source File: plot.py    From gumpy with MIT License 5 votes vote down vote up
def confusion_matrix(true_labels, predicted_labels, cmap=plt.cm.Blues):
    cm = skm.confusion_matrix(true_labels, predicted_labels)
    # TODO:
    # print(cm)
    # Show confusion matrix in a separate window ?
    plt.matshow(cm,cmap=cmap)
    plt.title('Confusion matrix')
    plt.colorbar()
    plt.ylabel('True label')
    plt.xlabel('Predicted label')
    plt.show()


# TODO: permit the user to specify the figure where this plot shall appear 
Example #20
Source File: CNNModel_EarlyStopping_Save_Restore.py    From MachineLearning_TensorFlow with MIT License 5 votes vote down vote up
def plot_confusion_matrix(cls_pred):
    cls_true = data.test.cls
    cm = confusion_matrix(cls_true, cls_pred)
    print(cm)
    plt.matshow(cm)
    plt.xlabel("Predicted")
    plt.ylabel("True")
    plt.show() 
Example #21
Source File: CNN_Model.py    From MachineLearning_TensorFlow with MIT License 5 votes vote down vote up
def plot_confusin_martrix(cls_pred):
    '''
    @param cls_pred: the prediction value, because we know the testset true value
    '''
    cls_true = data.test.cls
    cm = confusion_matrix(cls_true, cls_pred)
    plt.matshow(cm)
    plt.colorbar()
    tick_marks = np.arange(num_classes)
    plt.xticks(tick_marks, range(num_classes))
    plt.yticks(tick_marks, range(num_classes))
    plt.xlabel('Predicted')
    plt.ylabel('True')    
    plt.show() 
Example #22
Source File: CNNModel_prettytensor.py    From MachineLearning_TensorFlow with MIT License 5 votes vote down vote up
def plot_confusion_matrix_image(cls_pred):
    cm = confusion_matrix(data.test.cls, cls_pred)
    plt.matshow(cm)
    plt.colorbar()
    tick_marks = np.arange(num_classes)
    plt.xticks(tick_marks, range(num_classes))
    plt.yticks(tick_marks, range(num_classes))
    plt.xlabel('Predicted')
    plt.ylabel('True')    
    plt.show() 
Example #23
Source File: core.py    From dagbldr with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def plot_images_as_subplots(list_of_plot_args, plot_name, width, height,
                            invert_y=False, invert_x=False,
                            figsize=None, turn_on_agg=True):
    if turn_on_agg:
        import matplotlib
        matplotlib.use('Agg')
    import matplotlib.pyplot as plt
    lengths = [len(a) for a in list_of_plot_args]
    if len(list(filter(lambda x: x != lengths[0], lengths))) > 0:
        raise ValueError("list_of_plot_args has elements of different lengths!")

    if figsize is None:
        f, axarr = plt.subplots(lengths[0], len(lengths))
    else:
        f, axarr = plt.subplots(lengths[0], len(lengths), figsize=figsize)
    for n, v in enumerate(list_of_plot_args):
        for i, X_i in enumerate(v):
            axarr[i, n].matshow(X_i.reshape(width, height), cmap="gray",
                                interpolation="none")
            axarr[i, n].axis('off')
            if invert_y:
                axarr[i, n].set_ylim(axarr[i, n].get_ylim()[::-1])
            if invert_x:
                axarr[i, n].set_xlim(axarr[i, n].get_xlim()[::-1])
    plt.tight_layout()
    plt.savefig(plot_name + ".png") 
Example #24
Source File: util.py    From artificial-intelligence with MIT License 5 votes vote down vote up
def displayBoard(locations, shape):
    """Draw a chessboard with queens placed at each position specified
    by the assignment.

    Parameters
    ----------
    locations : list
        The locations list should contain one element for each queen
        of the chessboard containing a tuple (r, c) indicating the
        row and column coordinates of a queen to draw on the board.

    shape : integer
        The number of cells in each dimension of the board (e.g.,
        shape=3 indicates a 3x3 board)

    Returns
    -------
    matplotlib.figure.Figure
        The handle to the figure containing the board and queens
    """
    r = c = shape
    cmap = mpl.colors.ListedColormap(['#f5ecce', '#614532'])
    img = mpl.image.imread('queen.png').astype(np.float)
    boxprops = {"facecolor": "none", "edgecolor": "none"}

    x, y = np.meshgrid(range(c), range(r))
    plt.matshow(x % 2 ^ y % 2, cmap=cmap)
    plt.axis("off")  # eliminate borders from plot

    fig = plt.gcf()
    fig.set_size_inches([r, c])
    scale = 0.75 * fig.get_dpi() / max(img.shape)
    ax = plt.gca()
    for y, x in set(locations):
        box = mpl.offsetbox.OffsetImage(img, zoom=scale)
        ab = mpl.offsetbox.AnnotationBbox(box, (y, x), bboxprops=boxprops)
        ax.add_artist(ab)

    plt.show()
    return fig 
Example #25
Source File: connectivity.py    From mmvt with GNU General Public License v3.0 5 votes vote down vote up
def calc_electrodes_coh(subject, data_dict, windows_length, windows_shift, sfreq=1000, fmin=55, fmax=110, bw=15,
                        max_windows_num=None, n_jobs=6):

    from mne.connectivity import spectral_connectivity
    import time

    # input_file = op.join(SUBJECTS_DIR, subject, 'electrodes', mat_fname)
    output_file = op.join(MMVT_DIR, subject, 'connectivity', 'electrodes_coh.npy')
    T = data_dict.data.shape[1]
    windows = calc_windows(windows_length, windows_shift, T)
    if max_windows_num is None or max_windows_num is np.inf:
        max_windows_num = len(windows)
    # windows = np.linspace(0, t_max - dt, t_max / dt)
    # for cond, data in enumerate([d[cond] for cond in conditions]):
    for cond_ind, cond_name in enumerate(data_dict.conditions):
        data = data_dict.data[:, :, cond_ind]
        cond_name = utils.to_str(cond_name)
        if cond_ind == 0:
            coh_mat = np.zeros((data.shape[0], data.shape[0], max_windows_num, 2))
            # coh_mat = np.load(output_file)
            # continue
        # ds_data = downsample_data(data)
        # ds_data = ds_data[:, :, from_t_ind:to_t_ind]
        now = time.time()
        # for win, tmin in enumerate(windows):
        for w in range(max_windows_num):
            w1, w2 = int(windows[w, 0]), int(windows[w, 1])
            utils.time_to_go(now, w, max_windows_num)
            # data : array-like, shape=(n_epochs, n_signals, n_times)
            con_cnd, _, _, _, _ = spectral_connectivity(
                data[np.newaxis, :, w1:w2], method='coh', mode='multitaper', sfreq=sfreq,
                fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True)
            con_cnd = np.mean(con_cnd, axis=2).squeeze()
            coh_mat[:, :, w, cond_ind] = con_cnd
            # plt.matshow(con_cnd)
            # plt.show()
        np.save(output_file[:-4], coh_mat)
    return coh_mat 
Example #26
Source File: calc_coherence.py    From mmvt with GNU General Public License v3.0 5 votes vote down vote up
def calc_electrodes_coh(subject, from_t_ind, to_t_ind, sfreq=1000, fmin=55, fmax=110, bw=15,
        dt=0.1, window_len=0.1, n_jobs=6):
    input_file = op.join(SUBJECTS_DIR, subject, 'electrodes', 'electrodes_data_trials.mat')
    d = sio.loadmat(input_file)
    output_file = op.join(BLENDER_ROOT_DIR, subject, 'electrodes_coh.npy')
    windows = np.linspace(0, 2.5-dt, 2.5 / dt)
    for cond, data in enumerate([d['interference'], d['noninterference']]):
        if cond == 0:
            coh_mat = np.zeros((data.shape[1], data.shape[1], len(windows), 2))
            # coh_mat = np.load(output_file)
            # continue
        ds_data = downsample_data(data)
        ds_data = ds_data[:, :, from_t_ind:to_t_ind]
        now = time.time()
        for win, tmin in enumerate(windows):
            print('cond {}, tmin {}'.format(cond, tmin))
            utils.time_to_go(now, win+1, len(windows))
            con_cnd, _, _, _, _ = spectral_connectivity(
                ds_data, method='coh', mode='multitaper', sfreq=sfreq,
                fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
                tmin=tmin, tmax=tmin+window_len)
            con_cnd = np.mean(con_cnd, axis=2)
            coh_mat[:, :, win, cond] = con_cnd
            # plt.matshow(con_cnd)
            # plt.show()
        np.save(output_file[:-4], coh_mat) 
Example #27
Source File: meg_electrodes.py    From mmvt with GNU General Public License v3.0 5 votes vote down vote up
def compare_coh(subject, task, conditions, do_plot=False):
    electrodes_coh = np.load(op.join(ELECTRODES_DIR, subject, task, 'electrodes_coh.npy'))
    meg_electrodes_coh = np.load(op.join(ELECTRODES_DIR, subject, task, 'meg_electrodes_ts_coh.npy'))
    for cond_id, cond in enumerate(conditions):
        # plt.matshow(electrodes_coh[:, :, cond_id])
        # plt.title('electrodes_coh ' + cond)
        # plt.colorbar()
        # plt.matshow(meg_electrodes_coh[:, :, cond_id])
        # plt.title('meg_electrodes_coh ' + cond)
        # plt.colorbar()
        plt.matshow(meg_electrodes_coh[:, :, cond_id]-electrodes_coh[:, :, cond_id])
        plt.title('meg_electrodes_coh-electrodes_coh ' + cond)
        plt.colorbar()
    plt.show() 
Example #28
Source File: tensorflow_input_data.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def MyPlot(cwtmatr):
    ''' 绘图 '''
    
    # print(type(cwtmatr))
    # print(len(cwtmatr))
    # print(len(cwtmatr[0]))

    # plt.plot(cwtmatr[1])
    # plt.plot(cwtmatr[10])
    # plt.plot(cwtmatr[200])
    # plt.plot(cwtmatr[300])
    # plt.plot(cwtmatr[400])
    # plt.plot(cwtmatr[500])
    # plt.plot(cwtmatr[600])
    # plt.plot(cwtmatr[700])
    
    # plt.plot(cwtmatr[1200])
    # plt.plot(cwtmatr[1210])
    # plt.plot(cwtmatr[1300])
    # plt.plot(cwtmatr[1400])
    # plt.plot(cwtmatr[1500])

    # plt.plot(cwtmatr[1800])
    # plt.plot(cwtmatr[1850])
    # plt.plot(cwtmatr[1900])
    # plt.plot(cwtmatr[1950])
    # plt.plot(cwtmatr[2000])
    # plt.plot(cwtmatr[2100])
    # plt.plot(cwtmatr[2300])
    # plt.plot(cwtmatr[2500])

    # plt.matshow(cwtmatr) 
    plt.show() 
Example #29
Source File: visualization.py    From tf-image-segmentation with MIT License 5 votes vote down vote up
def _discrete_matshow_adaptive(data, labels_names=[], title=""):
    """Displays segmentation results using colormap that is adapted
    to a number of classes. Uses labels_names to write class names
    aside the color label. Used as a helper function for 
    visualize_segmentation_adaptive() function.
    
    Parameters
    ----------
    data : 2d numpy array (width, height)
        Array with integers representing class predictions
    labels_names : list
        List with class_names
    """
    
    fig_size = [7, 6]
    plt.rcParams["figure.figsize"] = fig_size
    
    #get discrete colormap
    cmap = plt.get_cmap('Paired', np.max(data)-np.min(data)+1)
    
    # set limits .5 outside true range
    mat = plt.matshow(data,
                      cmap=cmap,
                      vmin = np.min(data)-.5,
                      vmax = np.max(data)+.5)
    
    #tell the colorbar to tick at integers
    cax = plt.colorbar(mat,
                       ticks=np.arange(np.min(data),np.max(data)+1))
    
    # The names to be printed aside the colorbar
    if labels_names:
        cax.ax.set_yticklabels(labels_names)
    
    if title:
        plt.suptitle(title, fontsize=15, fontweight='bold')
    
    plt.show() 
Example #30
Source File: visualization_utils.py    From ludwig with Apache License 2.0 5 votes vote down vote up
def plot_matrix(
        matrix,
        cmap='hot',
        filename=None
):
    plt.matshow(matrix, cmap=cmap)
    ludwig.contrib.contrib_command("visualize_figure", plt.gcf())
    if filename:
        plt.savefig(filename)
    else:
        plt.show()