Python tensorflow.contrib.framework.is_tensor() Examples
The following are 10
code examples of tensorflow.contrib.framework.is_tensor().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tensorflow.contrib.framework
, or try the search function
.
Example #1
Source File: estimator.py From lambda-packs with MIT License | 6 votes |
def _verify_input_args(x, y, input_fn, feed_fn, batch_size): """Verifies validity of co-existance of input arguments.""" if input_fn is None: if x is None: raise ValueError('Either x or input_fn must be provided.') if contrib_framework.is_tensor(x) or (y is not None and contrib_framework.is_tensor(y)): raise ValueError('Inputs cannot be tensors. Please provide input_fn.') if feed_fn is not None: raise ValueError('Can not provide both feed_fn and x or y.') else: if (x is not None) or (y is not None): raise ValueError('Can not provide both input_fn and x or y.') if batch_size is not None: raise ValueError('Can not provide both input_fn and batch_size.')
Example #2
Source File: estimator.py From auto-alt-text-lambda-api with MIT License | 6 votes |
def _verify_input_args(x, y, input_fn, feed_fn, batch_size): """Verifies validity of co-existance of input arguments.""" if input_fn is None: if x is None: raise ValueError('Either x or input_fn must be provided.') if contrib_framework.is_tensor(x) or (y is not None and contrib_framework.is_tensor(y)): raise ValueError('Inputs cannot be tensors. Please provide input_fn.') if feed_fn is not None: raise ValueError('Can not provide both feed_fn and x or y.') else: if (x is not None) or (y is not None): raise ValueError('Can not provide both input_fn and x or y.') if batch_size is not None: raise ValueError('Can not provide both input_fn and batch_size.')
Example #3
Source File: estimator.py From keras-lambda with MIT License | 6 votes |
def _verify_input_args(x, y, input_fn, feed_fn, batch_size): """Verifies validity of co-existance of input arguments.""" if input_fn is None: if x is None: raise ValueError('Either x or input_fn must be provided.') if contrib_framework.is_tensor(x) or (y is not None and contrib_framework.is_tensor(y)): raise ValueError('Inputs cannot be tensors. Please provide input_fn.') if feed_fn is not None: raise ValueError('Can not provide both feed_fn and x or y.') else: if (x is not None) or (y is not None): raise ValueError('Can not provide both input_fn and x or y.') if batch_size is not None: raise ValueError('Can not provide both input_fn and batch_size.')
Example #4
Source File: estimator.py From deep_image_model with Apache License 2.0 | 5 votes |
def _get_input_fn(x, y, input_fn, feed_fn, batch_size, shuffle=False, epochs=1): """Make inputs into input and feed functions. Args: x: Numpy, Pandas or Dask matrix or iterable. y: Numpy, Pandas or Dask matrix or iterable. input_fn: Pre-defined input function for training data. feed_fn: Pre-defined data feeder function. batch_size: Size to split data into parts. Must be >= 1. shuffle: Whether to shuffle the inputs. epochs: Number of epochs to run. Returns: Data input and feeder function based on training data. Raises: ValueError: Only one of `(x & y)` or `input_fn` must be provided. """ if input_fn is None: if x is None: raise ValueError('Either x or input_fn must be provided.') if contrib_framework.is_tensor(x) or (y is not None and contrib_framework.is_tensor(y)): raise ValueError('Inputs cannot be tensors. Please provide input_fn.') if feed_fn is not None: raise ValueError('Can not provide both feed_fn and x or y.') df = data_feeder.setup_train_data_feeder(x, y, n_classes=None, batch_size=batch_size, shuffle=shuffle, epochs=epochs) return df.input_builder, df.get_feed_dict_fn() if (x is not None) or (y is not None): raise ValueError('Can not provide both input_fn and x or y.') if batch_size is not None: raise ValueError('Can not provide both input_fn and batch_size.') return input_fn, feed_fn
Example #5
Source File: meta.py From tf-matplotlib with MIT License | 5 votes |
def __init__(self, args): self.args = args self.tf_args = [(i,a) for i,a in enumerate(args) if is_tensor(a)]
Example #6
Source File: distribution.py From auto-alt-text-lambda-api with MIT License | 4 votes |
def __init__(self, dtype, is_continuous, is_reparameterized, validate_args, allow_nan_stats, parameters=None, graph_parents=None, name=None): """Constructs the `Distribution`. **This is a private method for subclass use.** Args: dtype: The type of the event samples. `None` implies no type-enforcement. is_continuous: Python boolean. If `True` this `Distribution` is continuous over its supported domain. is_reparameterized: Python boolean. If `True` this `Distribution` can be reparameterized in terms of some standard distribution with a function whose Jacobian is constant for the support of the standard distribution. validate_args: Python boolean. Whether to validate input with asserts. If `validate_args` is `False`, and the inputs are invalid, correct behavior is not guaranteed. allow_nan_stats: Python boolean. If `False`, raise an exception if a statistic (e.g., mean, mode) is undefined for any batch member. If True, batch members with valid parameters leading to undefined statistics will return `NaN` for this statistic. parameters: Python dictionary of parameters used to instantiate this `Distribution`. graph_parents: Python list of graph prerequisites of this `Distribution`. name: A name for this distribution. Default: subclass name. Raises: ValueError: if any member of graph_parents is `None` or not a `Tensor`. """ graph_parents = [] if graph_parents is None else graph_parents for i, t in enumerate(graph_parents): if t is None or not contrib_framework.is_tensor(t): raise ValueError("Graph parent item %d is not a Tensor; %s." % (i, t)) parameters = parameters or {} self._dtype = dtype self._is_continuous = is_continuous self._is_reparameterized = is_reparameterized self._allow_nan_stats = allow_nan_stats self._validate_args = validate_args self._parameters = parameters self._graph_parents = graph_parents self._name = name or type(self).__name__
Example #7
Source File: linear_operator.py From auto-alt-text-lambda-api with MIT License | 4 votes |
def __init__(self, dtype, graph_parents=None, is_non_singular=None, is_self_adjoint=None, is_positive_definite=None, name=None): r"""Initialize the `LinearOperator`. **This is a private method for subclass use.** **Subclasses should copy-paste this `__init__` documentation.** Args: dtype: The type of the this `LinearOperator`. Arguments to `apply` and `solve` will have to be this type. graph_parents: Python list of graph prerequisites of this `LinearOperator` Typically tensors that are passed during initialization. is_non_singular: Expect that this operator is non-singular. is_self_adjoint: Expect that this operator is equal to its hermitian transpose. If `dtype` is real, this is equivalent to being symmetric. is_positive_definite: Expect that this operator is positive definite, meaning the real part of all eigenvalues is positive. We do not require the operator to be self-adjoint to be positive-definite. See: https://en.wikipedia.org/wiki/Positive-definite_matrix\ #Extension_for_non_symmetric_matrices name: A name for this `LinearOperator`. Raises: ValueError: if any member of graph_parents is `None` or not a `Tensor`. """ # Check and auto-set flags. if is_positive_definite: if is_non_singular is False: raise ValueError("A positive definite matrix is always non-singular.") is_non_singular = True graph_parents = [] if graph_parents is None else graph_parents for i, t in enumerate(graph_parents): if t is None or not contrib_framework.is_tensor(t): raise ValueError("Graph parent item %d is not a Tensor; %s." % (i, t)) self._dtype = dtype self._graph_parents = graph_parents self._is_non_singular = is_non_singular self._is_self_adjoint = is_self_adjoint self._is_positive_definite = is_positive_definite self._name = name or type(self).__name__ # We will cache some values to avoid repeatedly adding shape # manipulation ops to the graph. Cleaner. self._cached_shape_dynamic = None self._cached_batch_shape_dynamic = None self._cached_domain_dimension_dynamic = None self._cached_range_dimension_dynamic = None self._cached_tensor_rank_dynamic = None
Example #8
Source File: distribution.py From deep_image_model with Apache License 2.0 | 4 votes |
def __init__(self, dtype, is_continuous, is_reparameterized, validate_args, allow_nan_stats, parameters=None, graph_parents=None, name=None): """Constructs the `Distribution`. **This is a private method for subclass use.** Args: dtype: The type of the event samples. `None` implies no type-enforcement. is_continuous: Python boolean. If `True` this `Distribution` is continuous over its supported domain. is_reparameterized: Python boolean. If `True` this `Distribution` can be reparameterized in terms of some standard distribution with a function whose Jacobian is constant for the support of the standard distribution. validate_args: Python boolean. Whether to validate input with asserts. If `validate_args` is `False`, and the inputs are invalid, correct behavior is not guaranteed. allow_nan_stats: Python boolean. If `False`, raise an exception if a statistic (e.g., mean, mode) is undefined for any batch member. If True, batch members with valid parameters leading to undefined statistics will return `NaN` for this statistic. parameters: Python dictionary of parameters used to instantiate this `Distribution`. graph_parents: Python list of graph prerequisites of this `Distribution`. name: A name for this distribution. Default: subclass name. Raises: ValueError: if any member of graph_parents is `None` or not a `Tensor`. """ graph_parents = [] if graph_parents is None else graph_parents for i, t in enumerate(graph_parents): if t is None or not contrib_framework.is_tensor(t): raise ValueError("Graph parent item %d is not a Tensor; %s." % (i, t)) parameters = parameters or {} self._dtype = dtype self._is_continuous = is_continuous self._is_reparameterized = is_reparameterized self._allow_nan_stats = allow_nan_stats self._validate_args = validate_args self._parameters = parameters self._graph_parents = graph_parents self._name = name or type(self).__name__
Example #9
Source File: distribution.py From keras-lambda with MIT License | 4 votes |
def __init__(self, dtype, is_continuous, is_reparameterized, validate_args, allow_nan_stats, parameters=None, graph_parents=None, name=None): """Constructs the `Distribution`. **This is a private method for subclass use.** Args: dtype: The type of the event samples. `None` implies no type-enforcement. is_continuous: Python boolean. If `True` this `Distribution` is continuous over its supported domain. is_reparameterized: Python boolean. If `True` this `Distribution` can be reparameterized in terms of some standard distribution with a function whose Jacobian is constant for the support of the standard distribution. validate_args: Python boolean. Whether to validate input with asserts. If `validate_args` is `False`, and the inputs are invalid, correct behavior is not guaranteed. allow_nan_stats: Python boolean. If `False`, raise an exception if a statistic (e.g., mean, mode) is undefined for any batch member. If True, batch members with valid parameters leading to undefined statistics will return `NaN` for this statistic. parameters: Python dictionary of parameters used to instantiate this `Distribution`. graph_parents: Python list of graph prerequisites of this `Distribution`. name: A name for this distribution. Default: subclass name. Raises: ValueError: if any member of graph_parents is `None` or not a `Tensor`. """ graph_parents = [] if graph_parents is None else graph_parents for i, t in enumerate(graph_parents): if t is None or not contrib_framework.is_tensor(t): raise ValueError("Graph parent item %d is not a Tensor; %s." % (i, t)) parameters = parameters or {} self._dtype = dtype self._is_continuous = is_continuous self._is_reparameterized = is_reparameterized self._allow_nan_stats = allow_nan_stats self._validate_args = validate_args self._parameters = parameters self._graph_parents = graph_parents self._name = name or type(self).__name__
Example #10
Source File: linear_operator.py From keras-lambda with MIT License | 4 votes |
def __init__(self, dtype, graph_parents=None, is_non_singular=None, is_self_adjoint=None, is_positive_definite=None, name=None): r"""Initialize the `LinearOperator`. **This is a private method for subclass use.** **Subclasses should copy-paste this `__init__` documentation.** Args: dtype: The type of the this `LinearOperator`. Arguments to `apply` and `solve` will have to be this type. graph_parents: Python list of graph prerequisites of this `LinearOperator` Typically tensors that are passed during initialization. is_non_singular: Expect that this operator is non-singular. is_self_adjoint: Expect that this operator is equal to its hermitian transpose. If `dtype` is real, this is equivalent to being symmetric. is_positive_definite: Expect that this operator is positive definite, meaning the real part of all eigenvalues is positive. We do not require the operator to be self-adjoint to be positive-definite. See: https://en.wikipedia.org/wiki/Positive-definite_matrix\ #Extension_for_non_symmetric_matrices name: A name for this `LinearOperator`. Raises: ValueError: if any member of graph_parents is `None` or not a `Tensor`. """ # Check and auto-set flags. if is_positive_definite: if is_non_singular is False: raise ValueError("A positive definite matrix is always non-singular.") is_non_singular = True graph_parents = [] if graph_parents is None else graph_parents for i, t in enumerate(graph_parents): if t is None or not contrib_framework.is_tensor(t): raise ValueError("Graph parent item %d is not a Tensor; %s." % (i, t)) self._dtype = dtype self._graph_parents = graph_parents self._is_non_singular = is_non_singular self._is_self_adjoint = is_self_adjoint self._is_positive_definite = is_positive_definite self._name = name or type(self).__name__ # We will cache some values to avoid repeatedly adding shape # manipulation ops to the graph. Cleaner. self._cached_shape_dynamic = None self._cached_batch_shape_dynamic = None self._cached_domain_dimension_dynamic = None self._cached_range_dimension_dynamic = None self._cached_tensor_rank_dynamic = None