Python tensorflow.contrib.framework.load_variable() Examples

The following are 27 code examples of tensorflow.contrib.framework.load_variable(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tensorflow.contrib.framework , or try the search function .
Example #1
Source File: composable_model.py    From lambda-packs with MIT License 6 votes vote down vote up
def get_weights(self, model_dir):
    """Returns weights per feature of the linear part.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The weights created by this model (without the optimizer weights).
    """
    all_variables = [name for name, _ in list_variables(model_dir)]
    values = {}
    optimizer_regex = r".*/" + self._get_optimizer().get_name() + r"(_\d)?$"
    for name in all_variables:
      if (name.startswith(self._scope + "/") and
          name != self._scope + "/bias_weight" and
          not re.match(optimizer_regex, name)):
        values[name] = load_variable(model_dir, name)
    if len(values) == 1:
      return values[list(values.keys())[0]]
    return values 
Example #2
Source File: composable_model.py    From auto-alt-text-lambda-api with MIT License 6 votes vote down vote up
def get_weights(self, model_dir):
    """Returns weights per feature of the linear part.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The weights created by this model (without the optimizer weights).
    """
    all_variables = [name for name, _ in list_variables(model_dir)]
    values = {}
    optimizer_regex = r".*/" + self._get_optimizer().get_name() + r"(_\d)?$"
    for name in all_variables:
      if (name.startswith(self._scope + "/") and
          name != self._scope + "/bias_weight" and
          not re.match(optimizer_regex, name)):
        values[name] = load_variable(model_dir, name)
    if len(values) == 1:
      return values[list(values.keys())[0]]
    return values 
Example #3
Source File: composable_model.py    From keras-lambda with MIT License 6 votes vote down vote up
def get_weights(self, model_dir):
    """Returns weights per feature of the linear part.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The weights created by this model (without the optimizer weights).
    """
    all_variables = [name for name, _ in list_variables(model_dir)]
    values = {}
    optimizer_regex = r".*/" + self._get_optimizer().get_name() + r"(_\d)?$"
    for name in all_variables:
      if (name.startswith(self._scope + "/") and
          name != self._scope + "/bias_weight" and
          not re.match(optimizer_regex, name)):
        values[name] = load_variable(model_dir, name)
    if len(values) == 1:
      return values[list(values.keys())[0]]
    return values 
Example #4
Source File: composable_model.py    From deep_image_model with Apache License 2.0 6 votes vote down vote up
def get_weights(self, model_dir):
    """Returns weights per feature of the linear part.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The weights created by this model (without the optimizer weights).
    """
    all_variables = [name for name, _ in list_variables(model_dir)]
    values = {}
    optimizer_regex = r".*/" + self._get_optimizer().get_name() + r"(_\d)?$"
    for name in all_variables:
      if (name.startswith(self._scope + "/") and
          name != self._scope + "/bias_weight" and
          not re.match(optimizer_regex, name)):
        values[name] = load_variable(model_dir, name)
    if len(values) == 1:
      return values[list(values.keys())[0]]
    return values 
Example #5
Source File: estimator.py    From keras-lambda with MIT License 5 votes vote down vote up
def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None,
          monitors=None, max_steps=None):
    # pylint: disable=g-doc-args,g-doc-return-or-yield
    """See `Trainable`.

    Raises:
      ValueError: If `x` or `y` are not `None` while `input_fn` is not `None`.
      ValueError: If both `steps` and `max_steps` are not `None`.
    """
    if (steps is not None) and (max_steps is not None):
      raise ValueError('Can not provide both steps and max_steps.')
    _verify_input_args(x, y, input_fn, None, batch_size)
    if x is not None:
      SKCompat(self).fit(x, y, batch_size, steps, max_steps, monitors)
      return self

    if max_steps is not None:
      try:
        start_step = load_variable(self._model_dir, ops.GraphKeys.GLOBAL_STEP)
        if max_steps <= start_step:
          logging.info('Skipping training since max_steps has already saved.')
          return self
      except:  # pylint: disable=bare-except
        pass

    hooks = monitor_lib.replace_monitors_with_hooks(monitors, self)
    if steps is not None or max_steps is not None:
      hooks.append(basic_session_run_hooks.StopAtStepHook(steps, max_steps))

    loss = self._train_model(input_fn=input_fn, hooks=hooks)
    logging.info('Loss for final step: %s.', loss)
    return self 
Example #6
Source File: composable_model.py    From keras-lambda with MIT License 5 votes vote down vote up
def get_bias(self, model_dir):
    """Returns the bias of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The bias weights created by this model.
    """
    return [
        load_variable(
            model_dir, name=(self._scope + "/hiddenlayer_%d/biases" % i))
        for i, _ in enumerate(self._hidden_units)
    ] + [load_variable(
        model_dir, name=(self._scope + "/logits/biases"))] 
Example #7
Source File: composable_model.py    From keras-lambda with MIT License 5 votes vote down vote up
def get_weights(self, model_dir):
    """Returns the weights of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The weights created by this model.
    """
    return [
        load_variable(
            model_dir, name=(self._scope + "/hiddenlayer_%d/weights" % i))
        for i, _ in enumerate(self._hidden_units)
    ] + [load_variable(
        model_dir, name=(self._scope + "/logits/weights"))] 
Example #8
Source File: composable_model.py    From keras-lambda with MIT License 5 votes vote down vote up
def get_bias(self, model_dir):
    """Returns bias of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The bias weights created by this model.
    """
    return load_variable(model_dir, name=(self._scope + "/bias_weight")) 
Example #9
Source File: Saver.py    From TrackR-CNN with MIT License 5 votes vote down vote up
def _initialize_deep_lab_rgb_weights(self, fn):
    vars_ = tf.global_variables()
    var_w = [x for x in vars_ if x.name == "xception_65/entry_flow/conv1_1/weights:0"]
    assert len(var_w) == 1, len(var_w)
    var_w = var_w[0]
    from tensorflow.contrib.framework import load_variable
    w = load_variable(fn, "xception_65/entry_flow/conv1_1/weights")
    val_new_w = self.session.run(var_w)
    val_new_w[:, :, :3, :] = w
    placeholder_w = tf.placeholder(tf.float32)
    assign_op_w = tf.assign(var_w, placeholder_w)
    self.session.run(assign_op_w, feed_dict={placeholder_w: val_new_w}) 
Example #10
Source File: Saver.py    From PReMVOS with MIT License 5 votes vote down vote up
def _initialize_deep_lab_rgb_weights(self, fn):
    vars_ = tf.global_variables()
    var_w = [x for x in vars_ if x.name == "xception_65/entry_flow/conv1_1/weights:0"]
    assert len(var_w) == 1, len(var_w)
    var_w = var_w[0]
    w = load_variable(fn, "xception_65/entry_flow/conv1_1/weights")
    val_new_w = self.session.run(var_w)
    val_new_w[:, :, :3, :] = w
    placeholder_w = tf.placeholder(tf.float32)
    assign_op_w = tf.assign(var_w, placeholder_w)
    self.session.run(assign_op_w, feed_dict={placeholder_w: val_new_w}) 
Example #11
Source File: estimator.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def get_variable_value(self, name):
    """Returns value of the variable given by name.

    Args:
      name: string, name of the tensor.

    Returns:
      Numpy array - value of the tensor.
    """
    return load_variable(self.model_dir, name) 
Example #12
Source File: dnn.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def bias_(self):
    hiddenlayer_bias = [load_variable(
        self._model_dir, name=("dnn/hiddenlayer_%d/biases" % i))
                        for i, _ in enumerate(self._hidden_units)]
    logits_bias = [load_variable(self._model_dir, name="dnn/logits/biases")]
    if self._estimator.params["enable_centered_bias"]:
      centered_bias = [
          load_variable(self._model_dir, name=_CENTERED_BIAS_WEIGHT)]
    else:
      centered_bias = []
    return hiddenlayer_bias + logits_bias + centered_bias 
Example #13
Source File: dnn.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def weights_(self):
    hiddenlayer_weights = [load_variable(
        self._model_dir, name=("dnn/hiddenlayer_%d/weights" % i))
                           for i, _ in enumerate(self._hidden_units)]
    logits_weights = [load_variable(self._model_dir, name="dnn/logits/weights")]
    return hiddenlayer_weights + logits_weights 
Example #14
Source File: dnn.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def get_variable_value(self, name):
    """Returns value of the variable given by name.

    Args:
      name: string, name of the tensor.

    Returns:
      `Tensor` object.
    """
    return load_variable(self._model_dir, name) 
Example #15
Source File: composable_model.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def get_weights(self, model_dir):
    """Returns the weights of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The weights created by this model.
    """
    return [
        load_variable(
            model_dir, name=(self._scope+"/hiddenlayer_%d/weights" % i))
        for i, _ in enumerate(self._hidden_units)
    ] + [load_variable(model_dir, name=(self._scope+"/logits/weights"))] 
Example #16
Source File: composable_model.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def get_bias(self, model_dir):
    """Returns bias of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The bias weights created by this model.
    """
    return load_variable(model_dir, name=(self._scope+"/bias_weight")) 
Example #17
Source File: svm.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def bias_(self):
    return load_variable(self._model_dir, name="linear/bias_weight") 
Example #18
Source File: svm.py    From deep_image_model with Apache License 2.0 5 votes vote down vote up
def weights_(self):
    values = {}
    optimizer_regex = r".*/"+self._optimizer.get_name() + r"(_\d)?$"
    for name, _ in list_variables(self._model_dir):
      if (name.startswith("linear/") and
          name != "linear/bias_weight" and
          not re.match(optimizer_regex, name)):
        values[name] = load_variable(self._model_dir, name)
    if len(values) == 1:
      return values[list(values.keys())[0]]
    return values 
Example #19
Source File: Saver.py    From MOTSFusion with MIT License 5 votes vote down vote up
def _initialize_deep_lab_rgb_weights(self, fn):
    vars_ = tf.global_variables()
    var_w = [x for x in vars_ if x.name == "xception_65/entry_flow/conv1_1/weights:0"]
    assert len(var_w) == 1, len(var_w)
    var_w = var_w[0]
    w = load_variable(fn, "xception_65/entry_flow/conv1_1/weights")
    val_new_w = self.session.run(var_w)
    val_new_w[:, :, :3, :] = w
    placeholder_w = tf.placeholder(tf.float32)
    assign_op_w = tf.assign(var_w, placeholder_w)
    self.session.run(assign_op_w, feed_dict={placeholder_w: val_new_w}) 
Example #20
Source File: estimator.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None,
          monitors=None, max_steps=None):
    # pylint: disable=g-doc-args,g-doc-return-or-yield
    """See `Trainable`.

    Raises:
      ValueError: If `x` or `y` are not `None` while `input_fn` is not `None`.
      ValueError: If both `steps` and `max_steps` are not `None`.
    """
    if (steps is not None) and (max_steps is not None):
      raise ValueError('Can not provide both steps and max_steps.')
    _verify_input_args(x, y, input_fn, None, batch_size)
    if x is not None:
      SKCompat(self).fit(x, y, batch_size, steps, max_steps, monitors)
      return self

    if max_steps is not None:
      try:
        start_step = load_variable(self._model_dir, ops.GraphKeys.GLOBAL_STEP)
        if max_steps <= start_step:
          logging.info('Skipping training since max_steps has already saved.')
          return self
      except:  # pylint: disable=bare-except
        pass

    hooks = monitor_lib.replace_monitors_with_hooks(monitors, self)
    if steps is not None or max_steps is not None:
      hooks.append(basic_session_run_hooks.StopAtStepHook(steps, max_steps))

    loss = self._train_model(input_fn=input_fn, hooks=hooks)
    logging.info('Loss for final step: %s.', loss)
    return self 
Example #21
Source File: composable_model.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_bias(self, model_dir):
    """Returns the bias of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The bias weights created by this model.
    """
    return [
        load_variable(
            model_dir, name=(self._scope + "/hiddenlayer_%d/biases" % i))
        for i, _ in enumerate(self._hidden_units)
    ] + [load_variable(
        model_dir, name=(self._scope + "/logits/biases"))] 
Example #22
Source File: composable_model.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_weights(self, model_dir):
    """Returns the weights of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The weights created by this model.
    """
    return [
        load_variable(
            model_dir, name=(self._scope + "/hiddenlayer_%d/weights" % i))
        for i, _ in enumerate(self._hidden_units)
    ] + [load_variable(
        model_dir, name=(self._scope + "/logits/weights"))] 
Example #23
Source File: composable_model.py    From auto-alt-text-lambda-api with MIT License 5 votes vote down vote up
def get_bias(self, model_dir):
    """Returns bias of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The bias weights created by this model.
    """
    return load_variable(model_dir, name=(self._scope + "/bias_weight")) 
Example #24
Source File: estimator.py    From lambda-packs with MIT License 5 votes vote down vote up
def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None,
          monitors=None, max_steps=None):
    # pylint: disable=g-doc-args,g-doc-return-or-yield
    """See `Trainable`.

    Raises:
      ValueError: If `x` or `y` are not `None` while `input_fn` is not `None`.
      ValueError: If both `steps` and `max_steps` are not `None`.
    """
    if (steps is not None) and (max_steps is not None):
      raise ValueError('Can not provide both steps and max_steps.')
    _verify_input_args(x, y, input_fn, None, batch_size)
    if x is not None:
      SKCompat(self).fit(x, y, batch_size, steps, max_steps, monitors)
      return self

    if max_steps is not None:
      try:
        start_step = load_variable(self._model_dir, ops.GraphKeys.GLOBAL_STEP)
        if max_steps <= start_step:
          logging.info('Skipping training since max_steps has already saved.')
          return self
      except:  # pylint: disable=bare-except
        pass

    hooks = monitor_lib.replace_monitors_with_hooks(monitors, self)
    if steps is not None or max_steps is not None:
      hooks.append(basic_session_run_hooks.StopAtStepHook(steps, max_steps))

    loss = self._train_model(input_fn=input_fn, hooks=hooks)
    logging.info('Loss for final step: %s.', loss)
    return self 
Example #25
Source File: composable_model.py    From lambda-packs with MIT License 5 votes vote down vote up
def get_bias(self, model_dir):
    """Returns the bias of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The bias weights created by this model.
    """
    return [
        load_variable(
            model_dir, name=(self._scope + "/hiddenlayer_%d/biases" % i))
        for i, _ in enumerate(self._hidden_units)
    ] + [load_variable(
        model_dir, name=(self._scope + "/logits/biases"))] 
Example #26
Source File: composable_model.py    From lambda-packs with MIT License 5 votes vote down vote up
def get_weights(self, model_dir):
    """Returns the weights of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The weights created by this model.
    """
    return [
        load_variable(
            model_dir, name=(self._scope + "/hiddenlayer_%d/weights" % i))
        for i, _ in enumerate(self._hidden_units)
    ] + [load_variable(
        model_dir, name=(self._scope + "/logits/weights"))] 
Example #27
Source File: composable_model.py    From lambda-packs with MIT License 5 votes vote down vote up
def get_bias(self, model_dir):
    """Returns bias of the model.

    Args:
      model_dir: Directory where model parameters, graph and etc. are saved.

    Returns:
      The bias weights created by this model.
    """
    return load_variable(model_dir, name=(self._scope + "/bias_weight"))