Python keras.backend.count_params() Examples

The following are 8 code examples of keras.backend.count_params(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module keras.backend , or try the search function .
Example #1
Source File: a00_augmentation_functions.py    From Urban3d with MIT License 6 votes vote down vote up
def get_model_memory_usage(batch_size, model):
    import numpy as np
    from keras import backend as K

    shapes_mem_count = 0
    for l in model.layers:
        single_layer_mem = 1
        for s in l.output_shape:
            if s is None:
                continue
            single_layer_mem *= s
        shapes_mem_count += single_layer_mem

    trainable_count = np.sum([K.count_params(p) for p in set(model.trainable_weights)])
    non_trainable_count = np.sum([K.count_params(p) for p in set(model.non_trainable_weights)])

    total_memory = 4.0*batch_size*(shapes_mem_count + trainable_count + non_trainable_count)
    gbytes = np.round(total_memory / (1024.0 ** 3), 3)
    return gbytes 
Example #2
Source File: model_utils.py    From aiexamples with Apache License 2.0 6 votes vote down vote up
def plot_parameter_statistic(model, layer_types=['Dense', 'Conv2D'], trainable=True, non_trainable=False, outputs=False):
    parameter_count = []
    names = []
    for l in model.layers:
        if l.__class__.__name__ not in layer_types:
            continue
        count = 0
        if outputs:
            count += np.sum([np.sum([np.prod(s[1:]) for s in n.output_shapes]) for n in l._inbound_nodes])
        if trainable:
            count += np.sum([K.count_params(p) for p in set(l.trainable_weights)])
        if non_trainable:
            count += np.sum([K.count_params(p) for p in set(l.non_trainable_weights)])
        parameter_count.append(count)
        names.append(l.name)
    
    y = range(len(names))
    plt.figure(figsize=[12,max(len(y)//4,1)])
    plt.barh(y, parameter_count, align='center')
    plt.yticks(y, names)
    plt.ylim(y[0]-1, y[-1]+1)
    ax = plt.gca()
    ax.invert_yaxis()
    ax.xaxis.tick_top()
    plt.show() 
Example #3
Source File: test_model_train.py    From DeepMoon with MIT License 6 votes vote down vote up
def test_build_model(self):
        dim = 256
        FL = 3
        learn_rate = 0.0001
        n_filters = 112
        init = 'he_normal'
        lmbda = 1e-06
        drop = 0.15

        model = mt.build_model(dim, learn_rate, lmbda, drop, FL, init,
                               n_filters)

        # Following https://stackoverflow.com/questions/45046525/keras-number-of-trainable-parameters-in-model
        trainable_count = int(np.sum([K.count_params(p) for p in
                                      set(model.trainable_weights)]))
        non_trainable_count = int(np.sum([K.count_params(p) for p in
                                          set(model.non_trainable_weights)]))
        assert trainable_count + non_trainable_count == 10278017
        assert trainable_count == 10278017
        assert non_trainable_count == 0 
Example #4
Source File: model_utils.py    From aiexamples with Apache License 2.0 5 votes vote down vote up
def calc_memory_usage(model, batch_size=1):
    """Compute the memory usage of a keras modell.
    
    # Arguments
        model: Keras model.
        batch_size: Batch size used for training.
    
    source: https://stackoverflow.com/a/46216013/445710
    """

    shapes_mem_count = 0
    for l in model.layers:
        shapes_mem_count += np.sum([np.sum([np.prod(s[1:]) for s in n.output_shapes]) for n in l._inbound_nodes])
        
    trainable_count = np.sum([K.count_params(p) for p in set(model.trainable_weights)])
    non_trainable_count = np.sum([K.count_params(p) for p in set(model.non_trainable_weights)])
    
    # each shape unit occupies 4 bytes in memory
    total_memory = 4.0 * batch_size * (shapes_mem_count + trainable_count + non_trainable_count)
    
    for s in ['Byte', 'KB', 'MB', 'GB', 'TB']:
        if total_memory > 1024:
            total_memory /= 1024
        else:
            break
    print('model memory usage %8.2f %s' % (total_memory, s)) 
Example #5
Source File: model_utils.py    From aiexamples with Apache License 2.0 5 votes vote down vote up
def count_parameters(model):
    trainable_count = int(np.sum([K.count_params(p) for p in set(model.trainable_weights)]))
    non_trainable_count = int(np.sum([K.count_params(p) for p in set(model.non_trainable_weights)]))
    
    print('trainable     {:>16,d}'.format(trainable_count))
    print('non-trainable {:>16,d}'.format(non_trainable_count)) 
Example #6
Source File: train_i3d.py    From sign-language with MIT License 5 votes vote down vote up
def count_params(keModel:keras.Model):

    trainable_count = int(
        np.sum([K.count_params(p) for p in set(keModel.trainable_weights)]))
    non_trainable_count = int(
        np.sum([K.count_params(p) for p in set(keModel.non_trainable_weights)]))

    print('Total params: {:,}'.format(trainable_count + non_trainable_count))
    print('Trainable params: {:,}'.format(trainable_count))
    print('Non-trainable params: {:,}'.format(non_trainable_count))
    
    return 
Example #7
Source File: solr_keras.py    From Benchmarks with MIT License 5 votes vote down vote up
def compute_trainable_params(model):
    """ Extract number of parameters from the given Keras model

        Parameters
        -----------
        model : Keras model

        Return
        ----------
        python dictionary that contains trainable_params, non_trainable_params and total_params
    """
    if str(type(model)).startswith("<class 'keras."):
        from keras import backend as K
    else:
        import tensorflow.keras.backend as K
   
    trainable_count = int(
        np.sum([K.count_params(w) for w in model.trainable_weights])
    )
    non_trainable_count = int(
        np.sum([K.count_params(w) for w in model.non_trainable_weights])
    )

    return {'trainable_params': trainable_count,
            'non_trainable_params': non_trainable_count,
            'total_params': (trainable_count + non_trainable_count)} 
Example #8
Source File: ErrorMapModel.py    From Pix2Depth with GNU General Public License v3.0 5 votes vote down vote up
def customLoss(yTrue,yPred):
    if(yTrue.shape[1]!=None):
            return (K.mean(K.square((K.log(yTrue) - K.log(yPred))),axis=-1)-(0.5/pow(K.count_params(yTrue),2))*K.sum((K.log(yTrue) - K.log(yPred)),axis=-1))
    else:
            return K.sum(K.abs(yPred - yTrue), axis=-1)