Python tensorflow.python.ops.array_ops.broadcast_static_shape() Examples
The following are 30
code examples of tensorflow.python.ops.array_ops.broadcast_static_shape().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.python.ops.array_ops
, or try the search function
.
Example #1
Source File: distribution_util.py From lambda-packs with MIT License | 6 votes |
def prefer_static_broadcast_shape( shape1, shape2, name="prefer_static_broadcast_shape"): """Convenience function which statically broadcasts shape when possible. Args: shape1: `1-D` integer `Tensor`. Already converted to tensor! shape2: `1-D` integer `Tensor`. Already converted to tensor! name: A string name to prepend to created ops. Returns: The broadcast shape, either as `TensorShape` (if broadcast can be done statically), or as a `Tensor`. """ with ops.name_scope(name, values=[shape1, shape2]): if (tensor_util.constant_value(shape1) is not None and tensor_util.constant_value(shape2) is not None): return array_ops.broadcast_static_shape( tensor_shape.TensorShape(tensor_util.constant_value(shape1)), tensor_shape.TensorShape(tensor_util.constant_value(shape2))) return array_ops.broadcast_dynamic_shape(shape1, shape2)
Example #2
Source File: linear_operator_udvh_update.py From lambda-packs with MIT License | 6 votes |
def _check_shapes(self): """Static check that shapes are compatible.""" # Broadcast shape also checks that u and v are compatible. uv_shape = array_ops.broadcast_static_shape( self.u.get_shape(), self.v.get_shape()) batch_shape = array_ops.broadcast_static_shape( self.base_operator.batch_shape, uv_shape[:-2]) self.base_operator.domain_dimension.assert_is_compatible_with( uv_shape[-2]) if self._diag_update is not None: uv_shape[-1].assert_is_compatible_with(self._diag_update.get_shape()[-1]) array_ops.broadcast_static_shape( batch_shape, self._diag_update.get_shape()[:-1])
Example #3
Source File: gamma.py From keras-lambda with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self.alpha.get_shape(), self.beta.get_shape())
Example #4
Source File: normal.py From auto-alt-text-lambda-api with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self._mu.get_shape(), self.sigma.get_shape())
Example #5
Source File: linear_operator_identity.py From auto-alt-text-lambda-api with MIT License | 5 votes |
def _possibly_broadcast_batch_shape(self, x): """Return 'x', possibly after broadcasting the leading dimensions.""" # If we have no batch shape, our batch shape broadcasts with everything! if self._batch_shape_arg is None: return x # Static attempt: # If we determine that no broadcast is necessary, pass x through # If we need a broadcast, add to an array of zeros. # # special_shape is the shape that, when broadcast with x's shape, will give # the correct broadcast_shape. Note that # We have already verified the second to last dimension of self.shape # matches x's shape in assert_compatible_matrix_dimensions. # Also, the final dimension of 'x' can have any shape. # Therefore, the final two dimensions of special_shape are 1's. special_shape = self.batch_shape.concatenate([1, 1]) bshape = array_ops.broadcast_static_shape(x.get_shape(), special_shape) if special_shape.is_fully_defined(): # bshape.is_fully_defined iff special_shape.is_fully_defined. if bshape == x.get_shape(): return x # Use the built in broadcasting of addition. zeros = array_ops.zeros(shape=special_shape, dtype=self.dtype) return x + zeros # Dynamic broadcast: # Always add to an array of zeros, rather than using a "cond", since a # cond would require copying data from GPU --> CPU. special_shape = array_ops.concat((self.batch_shape_dynamic(), [1, 1]), 0) zeros = array_ops.zeros(shape=special_shape, dtype=self.dtype) return x + zeros
Example #6
Source File: von_mises_fisher.py From s-vae-tf with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self._loc.get_shape(), self._scale.get_shape())
Example #7
Source File: gamma.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self.concentration.get_shape(), self.rate.get_shape())
Example #8
Source File: transformed_distribution.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _finish_log_prob_for_one_fiber(self, y, x, ildj): """Finish computation of log_prob on one element of the inverse image.""" x = self._maybe_rotate_dims(x, rotate_right=True) log_prob = self.distribution.log_prob(x) if self._is_maybe_event_override: log_prob = math_ops.reduce_sum(log_prob, self._reduce_event_indices) log_prob = ildj + log_prob if self._is_maybe_event_override: log_prob.set_shape(array_ops.broadcast_static_shape( y.get_shape().with_rank_at_least(1)[:-1], self.batch_shape)) return log_prob
Example #9
Source File: transformed_distribution.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _finish_prob_for_one_fiber(self, y, x, ildj): """Finish computation of prob on one element of the inverse image.""" x = self._maybe_rotate_dims(x, rotate_right=True) prob = self.distribution.prob(x) if self._is_maybe_event_override: prob = math_ops.reduce_prod(prob, self._reduce_event_indices) prob *= math_ops.exp(ildj) if self._is_maybe_event_override: prob.set_shape(array_ops.broadcast_static_shape( y.get_shape().with_rank_at_least(1)[:-1], self.batch_shape)) return prob
Example #10
Source File: student_t.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( array_ops.broadcast_static_shape(self.df.get_shape(), self.loc.get_shape()), self.scale.get_shape())
Example #11
Source File: uniform.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self.low.get_shape(), self.high.get_shape())
Example #12
Source File: normal.py From Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self.loc.get_shape(), self.scale.get_shape())
Example #13
Source File: binomial.py From auto-alt-text-lambda-api with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self.n.get_shape(), self.p.get_shape())
Example #14
Source File: inverse_gamma.py From keras-lambda with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self.alpha.get_shape(), self.beta.get_shape())
Example #15
Source File: logistic.py From keras-lambda with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self.loc.get_shape(), self.scale.get_shape())
Example #16
Source File: student_t.py From keras-lambda with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( array_ops.broadcast_static_shape(self.df.get_shape(), self.mu.get_shape()), self.sigma.get_shape())
Example #17
Source File: gumbel.py From keras-lambda with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self.loc.get_shape(), self.scale.get_shape())
Example #18
Source File: binomial.py From keras-lambda with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self.n.get_shape(), self.p.get_shape())
Example #19
Source File: laplace.py From keras-lambda with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self.loc.get_shape(), self.scale.get_shape())
Example #20
Source File: normal.py From keras-lambda with MIT License | 5 votes |
def _get_batch_shape(self): return array_ops.broadcast_static_shape( self._mu.get_shape(), self.sigma.get_shape())
Example #21
Source File: linear_operator_identity.py From keras-lambda with MIT License | 5 votes |
def _possibly_broadcast_batch_shape(self, x): """Return 'x', possibly after broadcasting the leading dimensions.""" # If we have no batch shape, our batch shape broadcasts with everything! if self._batch_shape_arg is None: return x # Static attempt: # If we determine that no broadcast is necessary, pass x through # If we need a broadcast, add to an array of zeros. # # special_shape is the shape that, when broadcast with x's shape, will give # the correct broadcast_shape. Note that # We have already verified the second to last dimension of self.shape # matches x's shape in assert_compatible_matrix_dimensions. # Also, the final dimension of 'x' can have any shape. # Therefore, the final two dimensions of special_shape are 1's. special_shape = self.batch_shape.concatenate([1, 1]) bshape = array_ops.broadcast_static_shape(x.get_shape(), special_shape) if special_shape.is_fully_defined(): # bshape.is_fully_defined iff special_shape.is_fully_defined. if bshape == x.get_shape(): return x # Use the built in broadcasting of addition. zeros = array_ops.zeros(shape=special_shape, dtype=self.dtype) return x + zeros # Dynamic broadcast: # Always add to an array of zeros, rather than using a "cond", since a # cond would require copying data from GPU --> CPU. special_shape = array_ops.concat((self.batch_shape_dynamic(), [1, 1]), 0) zeros = array_ops.zeros(shape=special_shape, dtype=self.dtype) return x + zeros
Example #22
Source File: linear_operator_addition.py From lambda-packs with MIT License | 5 votes |
def _static_check_for_broadcastable_batch_shape(operators): """ValueError if operators determined to have non-broadcastable shapes.""" if len(operators) < 2: return # This will fail if they cannot be broadcast together. batch_shape = operators[0].batch_shape for op in operators[1:]: batch_shape = array_ops.broadcast_static_shape(batch_shape, op.batch_shape)
Example #23
Source File: transformed_distribution.py From lambda-packs with MIT License | 5 votes |
def _log_prob(self, y): x = self.bijector.inverse(y) ildj = self.bijector.inverse_log_det_jacobian(y) x = self._maybe_rotate_dims(x, rotate_right=True) log_prob = self.distribution.log_prob(x) if self._is_maybe_event_override: log_prob = math_ops.reduce_sum(log_prob, self._reduce_event_indices) log_prob = ildj + log_prob if self._is_maybe_event_override: log_prob.set_shape(array_ops.broadcast_static_shape( y.get_shape().with_rank_at_least(1)[:-1], self.batch_shape)) return log_prob
Example #24
Source File: transformed_distribution.py From lambda-packs with MIT License | 5 votes |
def _prob(self, y): x = self.bijector.inverse(y) ildj = self.bijector.inverse_log_det_jacobian(y) x = self._maybe_rotate_dims(x, rotate_right=True) prob = self.distribution.prob(x) if self._is_maybe_event_override: prob = math_ops.reduce_prod(prob, self._reduce_event_indices) prob *= math_ops.exp(ildj) if self._is_maybe_event_override: prob.set_shape(array_ops.broadcast_static_shape( y.get_shape().with_rank_at_least(1)[:-1], self.batch_shape)) return prob
Example #25
Source File: student_t.py From lambda-packs with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( array_ops.broadcast_static_shape(self.df.get_shape(), self.loc.get_shape()), self.scale.get_shape())
Example #26
Source File: uniform.py From lambda-packs with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self.low.get_shape(), self.high.get_shape())
Example #27
Source File: normal.py From lambda-packs with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self.loc.get_shape(), self.scale.get_shape())
Example #28
Source File: inverse_gamma.py From lambda-packs with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self.concentration.get_shape(), self.rate.get_shape())
Example #29
Source File: logistic.py From lambda-packs with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self.loc.get_shape(), self.scale.get_shape())
Example #30
Source File: gumbel.py From lambda-packs with MIT License | 5 votes |
def _batch_shape(self): return array_ops.broadcast_static_shape( self.loc.get_shape(), self.scale.get_shape())