Python tensorflow.python.ops.rnn._transpose_batch_time() Examples
The following are 4
code examples of tensorflow.python.ops.rnn._transpose_batch_time().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.ops.rnn
, or try the search function
.
Example #1
Source File: shapes.py From Counterfactual-StoryRW with MIT License | 6 votes |
def transpose_batch_time(inputs): """Transposes inputs between time-major and batch-major. Args: inputs: A Tensor of shape `[batch_size, max_time, ...]` (batch-major) or `[max_time, batch_size, ...]` (time-major), or a (possibly nested) tuple of such elements. Returns: A (possibly nested tuple of) Tensor with transposed batch and time dimensions of inputs. """ flat_input = nest.flatten(inputs) flat_input = [ops.convert_to_tensor(input_) for input_ in flat_input] # pylint: disable=protected-access flat_input = [rnn._transpose_batch_time(input_) for input_ in flat_input] return nest.pack_sequence_as(structure=inputs, flat_sequence=flat_input)
Example #2
Source File: shapes.py From texar with Apache License 2.0 | 6 votes |
def transpose_batch_time(inputs): """Transposes inputs between time-major and batch-major. Args: inputs: A Tensor of shape `[batch_size, max_time, ...]` (batch-major) or `[max_time, batch_size, ...]` (time-major), or a (possibly nested) tuple of such elements. Returns: A (possibly nested tuple of) Tensor with transposed batch and time dimensions of inputs. """ flat_input = nest.flatten(inputs) flat_input = [ops.convert_to_tensor(input_) for input_ in flat_input] # pylint: disable=protected-access flat_input = [rnn._transpose_batch_time(input_) for input_ in flat_input] return nest.pack_sequence_as(structure=inputs, flat_sequence=flat_input)
Example #3
Source File: shapes.py From Counterfactual-StoryRW with MIT License | 4 votes |
def _mask_sequences_tensor(sequence, sequence_length, dtype=None, time_major=False, tensor_rank=2): """Masks out sequence entries that are beyond the respective sequence lengths. Masks along the time dimension. Args: sequence: A Tensor of sequence values. If `time_major=False` (default), this must be a Tensor of shape: `[batch_size, max_time, d_2, ..., d_rank]`, where the rank of the Tensor is specified with :attr:`tensor_rank`. If `time_major=True`, this must be a Tensor of shape: `[max_time, batch_size, d_2, ..., d_rank].` sequence_length: A Tensor of shape `[batch_size]`. Time steps beyond the respective sequence lengths will be made zero. dtype: Type of :attr:`sequence`. If `None`, inferred from :attr:`sequence` automatically. time_major (bool): The shape format of the inputs. If `True`, :attr:`sequence` must have shape `[max_time, batch_size, d_2, ..., d_rank]`. If `False` (default), :attr:`sequence` must have shape `[batch_size, max_time, d_2, ..., d_rank]`. tensor_rank (int): The number of dimensions of :attr:`sequence`. Default is 2, i.e., :attr:`sequence` is a 2D Tensor consisting of batch and time dimensions. Returns: The masked sequence, i.e., a Tensor of the same shape as :attr:`sequence` but with masked-out entries (set to zero). """ if tensor_rank is None: tensor_rank = 2 if tensor_rank < 2: raise ValueError( "tensor_rank must be > 2. Got tensor_rank = {}".format(tensor_rank)) if time_major: sequence = rnn._transpose_batch_time(sequence) max_time = tf.to_int32(tf.shape(sequence)[1]) if dtype is None: dtype = sequence.dtype mask = tf.sequence_mask( tf.to_int32(sequence_length), max_time, dtype=dtype) for _ in range(2, tensor_rank): mask = tf.expand_dims(mask, axis=-1) sequence = sequence * mask if time_major: sequence = rnn._transpose_batch_time(sequence) return sequence
Example #4
Source File: shapes.py From texar with Apache License 2.0 | 4 votes |
def _mask_sequences_tensor(sequence, sequence_length, dtype=None, time_major=False, tensor_rank=2): """Masks out sequence entries that are beyond the respective sequence lengths. Masks along the time dimension. Args: sequence: A Tensor of sequence values. If `time_major=False` (default), this must be a Tensor of shape: `[batch_size, max_time, d_2, ..., d_rank]`, where the rank of the Tensor is specified with :attr:`tensor_rank`. If `time_major=True`, this must be a Tensor of shape: `[max_time, batch_size, d_2, ..., d_rank].` sequence_length: A Tensor of shape `[batch_size]`. Time steps beyond the respective sequence lengths will be made zero. dtype (dtype): Type of :attr:`sequence`. If `None`, infer from :attr:`sequence` automatically. time_major (bool): The shape format of the inputs. If `True`, :attr:`sequence` must have shape `[max_time, batch_size, d_2, ..., d_rank]`. If `False` (default), :attr:`sequence` must have shape `[batch_size, max_time, d_2, ..., d_rank]`. tensor_rank (int): The number of dimensions of :attr:`sequence`. Default is 2, i.e., :attr:`sequence` is a 2D Tensor consisting of batch and time dimensions. Returns: The masked sequence, i.e., a Tensor of the same shape as :attr:`sequence` but with masked-out entries (set to zero). """ if tensor_rank is None: tensor_rank = 2 if tensor_rank < 2: raise ValueError( "tensor_rank must be > 2. Got tensor_rank = {}".format(tensor_rank)) if time_major: sequence = rnn._transpose_batch_time(sequence) max_time = tf.cast(tf.shape(sequence)[1], tf.int32) if dtype is None: dtype = sequence.dtype mask = tf.sequence_mask( tf.cast(sequence_length, tf.int32), max_time, dtype=dtype) for _ in range(2, tensor_rank): mask = tf.expand_dims(mask, axis=-1) sequence = sequence * mask if time_major: sequence = rnn._transpose_batch_time(sequence) return sequence