Python cntk.splice() Examples

The following are 30 code examples of cntk.splice(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module cntk , or try the search function .
Example #1
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #2
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #3
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #4
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, int):
        n = (n,)
    elif isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([1 for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #5
Source File: cntk_backend.py    From deepQuest with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, int):
        n = (n,)
    elif isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([1 for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #6
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #7
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #8
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, int):
        n = (n,)
    elif isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([1 for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #9
Source File: cntk_backend.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, int):
        n = (n,)
    elif isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([1 for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #10
Source File: cntk_backend.py    From keras-lambda with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape

    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)

    return x 
Example #11
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #12
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, int):
        n = (n,)
    elif isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([1 for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #13
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #14
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #15
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, int):
        n = (n,)
    elif isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([1 for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #16
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #17
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #18
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, int):
        n = (n,)
    elif isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([1 for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #19
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #20
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #21
Source File: cntk_backend.py    From keras-lambda with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #22
Source File: cntk_backend.py    From deepQuest with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #23
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #24
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #25
Source File: cntk_backend.py    From deepQuest with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _padding(x, pattern, axis):
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #26
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, int):
        n = (n,)
    elif isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([1 for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #27
Source File: cntk_backend.py    From keras-lambda with MIT License 6 votes vote down vote up
def tile(x, n):
    if isinstance(n, list):
        n = tuple(n)

    shape = int_shape(x)
    num_dynamic_axis = _get_dynamic_axis_num(x)
    # Padding the axis
    if len(n) < len(shape):
        n = tuple([None for _ in range(len(shape) - len(n))]) + n

    if len(n) != len(shape):
        raise NotImplementedError

    i = num_dynamic_axis
    for i, rep in enumerate(n):
        if i >= num_dynamic_axis and shape[i] is not None:
            tmp = [x] * rep
            x = C.splice(*tmp, axis=i - num_dynamic_axis)
        i += 1

    return x 
Example #28
Source File: cntk_backend.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def repeat(x, n):
    # this is a workaround for recurrent layer
    # if n is inferred dimension,
    # we can't figure out how to repeat it in cntk now
    # return the same x to take cntk broadcast feature
    # to make the recurrent layer work.
    # need to be fixed in GA.
    if n is C.InferredDimension or n is C.FreeDimension:
        return x
    index = 1 - _get_dynamic_axis_num(x)
    if index < 0 or index > 1:
        raise NotImplementedError

    new_shape = list(x.shape)
    new_shape.insert(index, 1)
    new_shape = tuple(new_shape)
    x = C.reshape(x, new_shape)
    temp = [x] * n
    return C.splice(*temp, axis=index) 
Example #29
Source File: cntk_backend.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def _padding(x, pattern, axis):  # pragma: no cover
    base_shape = x.shape
    if b_any([dim < 0 for dim in base_shape]):
        raise ValueError('CNTK Backend: padding input tensor with '
                         'shape `%s` contains non-specified dimension, '
                         'which is not supported. Please give fixed '
                         'dimension to enable padding.' % base_shape)
    if pattern[0] > 0:
        prefix_shape = list(base_shape)
        prefix_shape[axis] = pattern[0]
        prefix_shape = tuple(prefix_shape)
        x = C.splice(C.constant(value=0, shape=prefix_shape), x, axis=axis)
        base_shape = x.shape
    if pattern[1] > 0:
        postfix_shape = list(base_shape)
        postfix_shape[axis] = pattern[1]
        postfix_shape = tuple(postfix_shape)
        x = C.splice(x, C.constant(value=0, shape=postfix_shape), axis=axis)
    return x 
Example #30
Source File: cntk_backend.py    From DeepLearning_Wavelet-LSTM with MIT License 5 votes vote down vote up
def concatenate(tensors, axis=-1):
    if len(tensors) == 0:
        return None

    axis = [axis]
    axis = _normalize_axis(axis, tensors[0])
    return C.splice(*tensors, axis=axis[0])