Python absl.logging.error() Examples

The following are 30 code examples of absl.logging.error(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module absl.logging , or try the search function .
Example #1
Source File: uploader_subcommand.py    From tensorboard with Apache License 2.0 6 votes vote down vote up
def execute(self, server_info, channel):
        api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub(
            channel
        )
        experiment_id = self.experiment_id
        if not experiment_id:
            raise base_plugin.FlagsError(
                "Must specify a non-empty experiment ID to delete."
            )
        try:
            uploader_lib.delete_experiment(api_client, experiment_id)
        except uploader_lib.ExperimentNotFoundError:
            _die(
                "No such experiment %s. Either it never existed or it has "
                "already been deleted." % experiment_id
            )
        except uploader_lib.PermissionDeniedError:
            _die(
                "Cannot delete experiment %s because it is owned by a "
                "different user." % experiment_id
            )
        except grpc.RpcError as e:
            _die("Internal error deleting experiment: %s" % e)
        print("Deleted experiment %s." % experiment_id) 
Example #2
Source File: squad_evaluate_v1_1.py    From Live-feed-object-device-identification-using-Tensorflow-and-OpenCV with Apache License 2.0 6 votes vote down vote up
def evaluate(dataset, predictions):
  """Evaluates predictions for a dataset."""
  f1 = exact_match = total = 0
  for article in dataset:
    for paragraph in article["paragraphs"]:
      for qa in paragraph["qas"]:
        total += 1
        if qa["id"] not in predictions:
          message = "Unanswered question " + qa["id"] + " will receive score 0."
          logging.error(message)
          continue
        ground_truths = [entry["text"] for entry in qa["answers"]]
        prediction = predictions[qa["id"]]
        exact_match += _metric_max_over_ground_truths(_exact_match_score,
                                                      prediction, ground_truths)
        f1 += _metric_max_over_ground_truths(_f1_score, prediction,
                                             ground_truths)

  exact_match = exact_match / total
  f1 = f1 / total

  return {"exact_match": exact_match, "f1": f1} 
Example #3
Source File: device_model.py    From loaner with Apache License 2.0 6 votes vote down vote up
def lock(self, user_email):
    """Disables a device via the Directory API.

    Args:
      user_email: str, email address of the user making the request.
    """
    logging.info(
        'Contacting Directory to lock (disable) Device %s.',
        self.identifier)
    client = directory.DirectoryApiClient(user_email)
    try:
      client.disable_chrome_device(self.chrome_device_id)
    except directory.DeviceAlreadyDisabledError as err:
      logging.error(_ALREADY_DISABLED_MSG, self.identifier, err)
    else:
      self.stream_to_bq(user_email, 'Disabling device %s.' % self.identifier)
    self.locked = True
    self.put() 
Example #4
Source File: device_model.py    From loaner with Apache License 2.0 6 votes vote down vote up
def device_audit_check(self):
    """Checks a device to make sure it passes all prechecks for audit.

    Raises:
      DeviceNotEnrolledError: when a device is not enrolled in the application.
      UnableToMoveToShelfError: when a deivce can not be checked into a shelf.
      DeviceAuditError:when a device encounters an error during auditing
    """
    if not self.enrolled:
      raise DeviceNotEnrolledError(DEVICE_NOT_ENROLLED_MSG % self.identifier)
    if self.damaged:
      raise UnableToMoveToShelfError(_DEVICE_DAMAGED_MSG % self.identifier)
    try:
      events.raise_event('device_audit', device=self)
    except events.EventActionsError as err:
      # For any action that is implemented for device_audit that is
      # required for the rest of the logic an error should be raised.
      # If all actions are not required, eg sending a notification email only,
      # the error should only be logged.
      raise DeviceAuditEventError(err) 
Example #5
Source File: run_reminder_events.py    From loaner with Apache License 2.0 6 votes vote down vote up
def _remind_for_devices(self):
    """Find devices marked as being in a remindable state and raise event."""
    for device in device_model.Device.query(
        device_model.Device.next_reminder.time <= datetime.datetime.utcnow()
    ).fetch():
      logging.info(
          _DEVICE_REMINDING_NOW_MSG, device.identifier,
          device.next_reminder.level)
      try:
        events.raise_event(
            event_name=event_models.ReminderEvent.make_name(
                device.next_reminder.level),
            device=device)
      except events.EventActionsError as err:
        # We log the error so that a single device does not disrupt all other
        # devices that need reminders set.
        logging.error(_EVENT_ACTION_ERROR_MSG, err) 
Example #6
Source File: observation_processor.py    From football with Apache License 2.0 6 votes vote down vote up
def finalize(self):
    dump_info = {}
    if self._video_writer:
      self._video_writer.release()
      self._video_writer = None
      os.close(self._video_fd)
      try:
        # For some reason sometimes the file is missing, so the code fails.
        if WRITE_FILES:
          shutil.copy2(self._video_tmp, self._name + self._video_suffix)
        dump_info['video'] = '%s%s' % (self._name, self._video_suffix)
        logging.info('Video written to %s%s', self._name, self._video_suffix)
        os.remove(self._video_tmp)
      except:
        logging.error(traceback.format_exc())
    if self._dump_file:
      self._dump_file.close()
      self._dump_file = None
      if self._step_cnt == 0:
        logging.warning('No data to write to the dump.')
      else:
        dump_info['dump'] = '%s.dump' % self._name
        logging.info('Dump written to %s.dump', self._name)
    return dump_info 
Example #7
Source File: gng_impl.py    From loaner with Apache License 2.0 6 votes vote down vote up
def main(argv):
  del argv  # Unused.
  utils.clear_screen()
  utils.write('Welcome to the Grab n Go management script!\n')

  try:
    _Manager.new(
        FLAGS.config_file_path,
        FLAGS.prefer_gcs,
        project_key=FLAGS.project,
        version=FLAGS.app_version,
    ).run()
  except KeyboardInterrupt as err:
    logging.error('Manager received CTRL-C, exiting: %s', err)
    exit_code = 1
  else:
    exit_code = 0

  sys.exit(exit_code) 
Example #8
Source File: storage.py    From loaner with Apache License 2.0 6 votes vote down vote up
def get_bucket(self, bucket_name=None):
    """Retrieves a Google Cloud Storage Bucket object.

    Args:
      bucket_name: str, the name of the Google Cloud Storage Bucket to retrieve.

    Returns:
      A dictionary object representing a Google Cloud Storage Bucket.
          type: google.cloud.storage.bucket.Bucket

    Raises:
      NotFoundError: when a resource is not found.
    """
    bucket_name = bucket_name or self._config.bucket
    try:
      return self._client.get_bucket(bucket_name)
    except exceptions.NotFound as err:
      logging.error(_GET_BUCKET_ERROR_MSG, bucket_name, err)
      raise NotFoundError(_GET_BUCKET_ERROR_MSG % (bucket_name, err)) 
Example #9
Source File: shelf_model.py    From loaner with Apache License 2.0 6 votes vote down vote up
def audit(self, user_email, num_of_devices):
    """Marks a shelf audited.

    Args:
      user_email: str, email of the user auditing the shelf.
      num_of_devices: int, the number of devices on shelf.
    """
    self.last_audit_time = datetime.datetime.utcnow()
    self.last_audit_by = user_email
    self.audit_requested = False
    logging.info(_AUDIT_MSG, self.identifier, num_of_devices)
    event_action = 'shelf_audited'
    try:
      self = events.raise_event(event_action, shelf=self)
    except events.EventActionsError as err:
      # For any action that is implemented for shelf_audited that is required
      # for the rest of the logic an error should be raised. If all
      # actions are not required, eg sending a notification email only,
      # the error should only be logged.
      logging.error(_EVENT_ACTION_ERROR_MSG, event_action, err)
    self.put()
    self.stream_to_bq(
        user_email, _AUDIT_MSG % (self.identifier, num_of_devices)) 
Example #10
Source File: shelf_model.py    From loaner with Apache License 2.0 6 votes vote down vote up
def disable(self, user_email):
    """Marks a shelf as disabled.

    Args:
      user_email: str, email of the user disabling the shelf.
    """
    self.enabled = False
    logging.info(_DISABLE_MSG, self.identifier)
    event_action = 'shelf_disable'
    try:
      self = events.raise_event(event_action, shelf=self)
    except events.EventActionsError as err:
      # For any action that is implemented for shelf_disable that is required
      # for the rest of the logic an error should be raised. If all
      # actions are not required, eg sending a notification email only,
      # the error should only be logged.
      logging.error(_EVENT_ACTION_ERROR_MSG, event_action, err)
    self.put()
    self.stream_to_bq(user_email, _DISABLE_MSG % self.identifier) 
Example #11
Source File: scenario_builder.py    From football with Apache License 2.0 6 votes vote down vote up
def __init__(self, config):
    # Game config controls C++ engine and is derived from the main config.
    self._scenario_cfg = libgame.ScenarioConfig.make()
    self._config = config
    self._active_team = Team.e_Left
    scenario = None
    try:
      scenario = importlib.import_module('gfootball.scenarios.{}'.format(config['level']))
    except ImportError as e:
      logging.error('Loading scenario "%s" failed' % config['level'])
      logging.error(e)
      exit(1)
    scenario.build_scenario(self)
    self.SetTeam(libgame.e_Team.e_Left)
    self._FakePlayersForEmptyTeam(self._scenario_cfg.left_team)
    self.SetTeam(libgame.e_Team.e_Right)
    self._FakePlayersForEmptyTeam(self._scenario_cfg.right_team)
    self._BuildScenarioConfig() 
Example #12
Source File: log_before_import_test.py    From abseil-py with Apache License 2.0 6 votes vote down vote up
def captured_stderr_filename():
  """Captures stderr and writes them to a temporary file.

  This uses os.dup/os.dup2 to redirect the stderr fd for capturing standard
  error of logging at import-time. We cannot mock sys.stderr because on the
  first log call, a default log handler writing to the mock sys.stderr is
  registered, and it will never be removed and subsequent logs go to the mock
  in addition to the real stder.

  Yields:
    The filename of captured stderr.
  """
  stderr_capture_file_fd, stderr_capture_file_name = tempfile.mkstemp()
  original_stderr_fd = os.dup(sys.stderr.fileno())
  os.dup2(stderr_capture_file_fd, sys.stderr.fileno())
  try:
    yield stderr_capture_file_name
  finally:
    os.close(stderr_capture_file_fd)
    os.dup2(original_stderr_fd, sys.stderr.fileno())


# Pre-initialization (aka "import" / __main__ time) test. 
Example #13
Source File: tpu_runtime_utils.py    From benchmarks with Apache License 2.0 6 votes vote down vote up
def get_tpu_version(tpu_address):
  """Returns the current software version on tpu."""
  logging.info('Trying to connect to tpu %s', tpu_address)
  tpu_client = client.Client(tpu=tpu_address)
  tpu_client.wait_for_healthy()
  workers = tpu_client.network_endpoints()
  if workers:
    ip_addr = workers[0]['ipAddress']
    url = 'http://{}:8475/requestversion'.format(ip_addr)
    return _get_version_info(url)
  else:
    logging.error('No tpu endpoint info')
    return {
        'url': '',
        'hash': '',
        'branch': '',
        'piper_id': '',
    } 
Example #14
Source File: __init__.py    From abseil-py with Apache License 2.0 6 votes vote down vote up
def set_stderrthreshold(s):
  """Sets the stderr threshold to the value passed in.

  Args:
    s: str|int, valid strings values are case-insensitive 'debug',
        'info', 'warning', 'error', and 'fatal'; valid integer values are
        logging.DEBUG|INFO|WARNING|ERROR|FATAL.

  Raises:
      ValueError: Raised when s is an invalid value.
  """
  if s in converter.ABSL_LEVELS:
    FLAGS.stderrthreshold = converter.ABSL_LEVELS[s]
  elif isinstance(s, str) and s.upper() in converter.ABSL_NAMES:
    FLAGS.stderrthreshold = s
  else:
    raise ValueError(
        'set_stderrthreshold only accepts integer absl logging level '
        'from -3 to 1, or case-insensitive string values '
        "'debug', 'info', 'warning', 'error', and 'fatal'. "
        'But found "{}" ({}).'.format(s, type(s))) 
Example #15
Source File: app_engine.py    From loaner with Apache License 2.0 6 votes vote down vote up
def get(self):
    """Retrieves the information associated with a given Project ID.

    Returns:
      A dictionary object representing a deployed Google App Engine application
          (Type: google.appengine.v1.Application).

    Raises:
      NotFoundError: when unable to find a Google App Engine application for the
          provided Google Cloud Project ID.
    """
    try:
      return self._client.apps().get(appsId=self._config.project).execute()
    except errors.HttpError as err:
      logging.error(_GET_ERROR_MSG, self._config.project, err)
      raise NotFoundError(_GET_ERROR_MSG % (self._config.project, err)) 
Example #16
Source File: uploader_subcommand.py    From tensorboard with Apache License 2.0 6 votes vote down vote up
def execute(self, server_info, channel):
        api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub(
            channel
        )
        outdir = self.output_dir
        try:
            exporter = exporter_lib.TensorBoardExporter(api_client, outdir)
        except exporter_lib.OutputDirectoryExistsError:
            msg = "Output directory already exists: %r" % outdir
            raise base_plugin.FlagsError(msg)
        num_experiments = 0
        try:
            for experiment_id in exporter.export():
                num_experiments += 1
                print("Downloaded experiment %s" % experiment_id)
        except exporter_lib.GrpcTimeoutException as e:
            print(
                "\nUploader has failed because of a timeout error.  Please reach "
                "out via e-mail to tensorboard.dev-support@google.com to get help "
                "completing your export of experiment %s." % e.experiment_id
            )
        print(
            "Done. Downloaded %d experiments to: %s" % (num_experiments, outdir)
        ) 
Example #17
Source File: text_ops.py    From delta with Apache License 2.0 6 votes vote down vote up
def char_cut_tf(input_str):
  """Cut sentence char by char with tensoflow operations."""
  input_str = tf.convert_to_tensor(input_str)
  rank = len(input_str.get_shape())
  if rank == 1:
    output_str = tf.strings.unicode_split(input_str,
                                          "UTF-8").to_tensor(default_value="")
    output_str = tf.strings.reduce_join(output_str, axis=1, separator=" ")
  elif rank == 0:
    output_str = tf.strings.unicode_split(input_str, "UTF-8")
    output_str = tf.strings.reduce_join(output_str, axis=0, separator=" ")
  else:
    logging.error("Please check the shape of input_str!")
    raise Exception("Error input shape for input_str.")
  output_str = tf.strings.strip(output_str)
  return output_str 
Example #18
Source File: postprocess_utils.py    From delta with Apache License 2.0 6 votes vote down vote up
def ids_to_sentences(ids, vocab_file_path):
  """
  transform array of numbers to array of tags/words
  ids:  [[1,2],[3,4]...]
  """

  vocab_dict = load_vocab_dict(vocab_file_path)
  id_to_vocab = {int(v): k for k, v in vocab_dict.items()}

  sentences = []
  for sent in ids:
    sent_char = []
    for s_char in sent:
      if s_char not in id_to_vocab:
        logging.error("label not in vocabs")
      else:
        sent_char.append(id_to_vocab[s_char])
    sentences.append(sent_char)
  assert len(sentences) == len(ids)
  return sentences 
Example #19
Source File: register.py    From delta with Apache License 2.0 6 votes vote down vote up
def import_all_modules_for_register(config=None, only_nlp=False):
  """Import all modules for register."""
  if only_nlp:
    all_modules = ALL_NLP_MODULES
  else:
    all_modules = ALL_MODULES

  add_custom_modules(all_modules, config)

  logging.debug(f"All modules: {all_modules}")
  errors = []
  for base_dir, modules in all_modules:
    for name in modules:
      try:
        if base_dir != "":
          full_name = base_dir + "." + name
        else:
          full_name = name
        importlib.import_module(full_name)
        logging.debug(f"{full_name} loaded.")
      except ImportError as error:
        errors.append((name, error))
  _handle_errors(errors) 
Example #20
Source File: recurrent.py    From delta with Apache License 2.0 6 votes vote down vote up
def __init__(self, config, **kwargs):
    super().__init__(**kwargs)
    logging.info("Initialize RnnEncoder {}...".format(self.name))

    model_config = config['model']['net']['structure']
    self.dropout_rate = model_config['dropout_rate']
    self.cell_dim = model_config['cell_dim']
    self.cell_type = model_config['cell_type']
    if self.cell_type.lower() == 'gru':
      rnn_class = tf.keras.layers.GRU
    elif self.cell_type.lower() == 'lstm':
      rnn_class = tf.keras.layers.LSTM
    elif self.cell_type.lower() == 'cudnngru':
      rnn_class = tf.keras.layers.CuDNNGRU
    elif self.cell_type.lower() == 'cudnnlstm':
      rnn_class = tf.keras.layers.CuDNNLSTM
    else:
      error_info = "Cell type: {} not supported now! Please check!".format(
          self.cell_type)
      logging.error(error_info)
      raise ValueError(error_info)

    self.sen_encoder = tf.keras.layers.Bidirectional(
        rnn_class(self.cell_dim, return_sequences=True, return_state=True))
    logging.info("Initialize RnnEncoder {} Done.".format(self.name)) 
Example #21
Source File: convert.py    From pytruth with Apache License 2.0 6 votes vote down vote up
def _Check(self):
    """Verifies the existence and read+write access to all paths.

    Returns:
      Boolean, True if all paths are OK, otherwise False.
    """
    success = True
    for path in self._paths:
      if not os.path.isfile(path):
        logging.error('No such file: %s', path)
        success = False
      elif not os.access(path, os.R_OK):
        logging.error('No read access: %s', path)
        success = False
      elif not FLAGS.output and not os.access(path, os.W_OK):
        logging.error('No write access: %s', path)
        success = False

    return success 
Example #22
Source File: app.py    From abseil-py with Apache License 2.0 6 votes vote down vote up
def parse_flags_with_usage(args):
  """Tries to parse the flags, print usage, and exit if unparseable.

  Args:
    args: [str], a non-empty list of the command line arguments including
        program name.

  Returns:
    [str], a non-empty list of remaining command line arguments after parsing
    flags, including program name.
  """
  try:
    return FLAGS(args)
  except flags.Error as error:
    sys.stderr.write('FATAL Flags parsing error: %s\n' % error)
    sys.stderr.write('Pass --helpshort or --helpfull to see help on flags.\n')
    sys.exit(1) 
Example #23
Source File: app.py    From abseil-py with Apache License 2.0 6 votes vote down vote up
def _run_init(
    argv,
    flags_parser,
):
  """Does one-time initialization and re-parses flags on rerun."""
  if _run_init.done:
    return flags_parser(argv)
  command_name.make_process_name_useful()
  # Set up absl logging handler.
  logging.use_absl_handler()
  args = _register_and_parse_flags_with_usage(
      argv=argv,
      flags_parser=flags_parser,
  )
  if faulthandler:
    try:
      faulthandler.enable()
    except Exception:  # pylint: disable=broad-except
      # Some tests verify stderr output very closely, so don't print anything.
      # Disabled faulthandler is a low-impact error.
      pass
  _run_init.done = True
  return args 
Example #24
Source File: protocol.py    From pysc2 with Apache License 2.0 6 votes vote down vote up
def read(self):
    """Read a Response, do some validation, and return it."""
    if FLAGS.sc2_verbose_protocol:
      self._log("-------------- [%s] Reading response --------------",
                self._port)
      start = time.time()
    response = self._read()
    if FLAGS.sc2_verbose_protocol:
      self._log("-------------- [%s] Read %s in %0.1f msec --------------\n%s",
                self._port, response.WhichOneof("response"),
                1000 * (time.time() - start), self._packet_str(response))
    if not response.HasField("status"):
      raise ProtocolError("Got an incomplete response without a status.")
    prev_status = self._status
    self._status = Status(response.status)  # pytype: disable=not-callable
    if response.error:
      err_str = ("Error in RPC response (likely a bug). "
                 "Prev status: %s, new status: %s, error:\n%s" % (
                     prev_status, self._status, "\n".join(response.error)))
      logging.error(err_str)
      raise ProtocolError(err_str)
    return response 
Example #25
Source File: __init__.py    From abseil-py with Apache License 2.0 6 votes vote down vote up
def value(self, v):
    if v in _CPP_LEVEL_TO_NAMES:
      # --stderrthreshold also accepts numberic strings whose values are
      # Abseil C++ log levels.
      cpp_value = int(v)
      v = _CPP_LEVEL_TO_NAMES[v]  # Normalize to strings.
    elif v.lower() in _CPP_NAME_TO_LEVELS:
      v = v.lower()
      if v == 'warn':
        v = 'warning'  # Use 'warning' as the canonical name.
      cpp_value = int(_CPP_NAME_TO_LEVELS[v])
    else:
      raise ValueError(
          '--stderrthreshold must be one of (case-insensitive) '
          "'debug', 'info', 'warning', 'error', 'fatal', "
          "or '0', '1', '2', '3', not '%s'" % v)

    self._value = v 
Example #26
Source File: logging_functional_test_helper.py    From abseil-py with Apache License 2.0 6 votes vote down vote up
def _test_stderrthreshold():
  """Tests modifying --stderrthreshold after flag parsing will work."""

  def log_things():
    logging.debug('FLAGS.stderrthreshold=%s, debug log', FLAGS.stderrthreshold)
    logging.info('FLAGS.stderrthreshold=%s, info log', FLAGS.stderrthreshold)
    logging.warning('FLAGS.stderrthreshold=%s, warning log',
                    FLAGS.stderrthreshold)
    logging.error('FLAGS.stderrthreshold=%s, error log', FLAGS.stderrthreshold)

  FLAGS.stderrthreshold = 'debug'
  log_things()
  FLAGS.stderrthreshold = 'info'
  log_things()
  FLAGS.stderrthreshold = 'warning'
  log_things()
  FLAGS.stderrthreshold = 'error'
  log_things() 
Example #27
Source File: app.py    From abseil-py with Apache License 2.0 5 votes vote down vote up
def _call_exception_handlers(exception):
  """Calls any installed exception handlers."""
  for handler in EXCEPTION_HANDLERS:
    try:
      if handler.wants(exception):
        handler.handle(exception)
    except:  # pylint: disable=bare-except
      try:
        # We don't want to stop for exceptions in the exception handlers but
        # we shouldn't hide them either.
        logging.error(traceback.format_exc())
      except:  # pylint: disable=bare-except
        # In case even the logging statement fails, ignore.
        pass 
Example #28
Source File: recurrent.py    From delta with Apache License 2.0 5 votes vote down vote up
def __init__(self, config, emb_layer, vocab_size, **kwargs):
    super().__init__(**kwargs)
    logging.info("Initialize RnnDecoder {}...".format(self.name))
    self.is_infer = config['model']['is_infer']
    model_config = config['model']['net']['structure']
    self.dropout_rate = model_config['dropout_rate']
    self.cell_dim = model_config['cell_dim']
    self.decode_cell_type = model_config['decode_cell_type']
    self.max_dec_len = model_config['max_dec_len']
    self.dec_end_id = 5
    self.dec_start_id = 4
    self.beam_size = model_config['beam_size']
    self.length_penalty = model_config['length_penalty']
    self.swap_memory = model_config['swap_memory']
    self.time_major = model_config['time_major']
    self.initial_decode_state = model_config['initial_decode_state']
    self.attn_Type = model_config['attn_Type']
    if self.decode_cell_type.lower() == 'gru':
      rnn_class = tf.nn.rnn_cell.GRUCell
    elif self.decode_cell_type.lower() == 'lstm':
      rnn_class = tf.nn.rnn_cell.LSTMCell
    else:
      error_info = "Cell type: {} not supported now! Please check!".format(
          self.decode_cell_type)
      logging.error(error_info)
      raise ValueError(error_info)

    self.cell = rnn_class(2 * self.cell_dim)
    self.embed = emb_layer
    self.vocab_size = vocab_size
    self.embed_d = tf.keras.layers.Dropout(self.dropout_rate) 
Example #29
Source File: app_engine.py    From loaner with Apache License 2.0 5 votes vote down vote up
def create(self, location):
    """Creates a new Google App Engine application in a given location.

    Args:
      location: str, the location in which the Google App Engine application is
          to be hosted.

    Returns:
      A dictionary object representing the newly created Google App Engine
          application (Type: google.appengine.v1.Application).

    Raises:
      CreationError: when creation fails (e.g. failed to authenticate, improper
          scopes, project already exists, etc).
      NotFoundError: if the provided location is not a valid location.
    """
    if location not in LOCATIONS:
      raise NotFoundError(
          'the location provided {!r} was not found in the list of approved '
          'locations {}'.format(location, LOCATIONS))
    try:
      return self._client.apps().create(
          body={
              'id': self._config.project,
              'locationId': location,
          },
      ).execute()
    except errors.HttpError as err:
      logging.error(_CREATE_ERROR_MSG, self._config.project, location, err)
      raise CreationError(
          _CREATE_ERROR_MSG % (self._config.project, location, err)) 
Example #30
Source File: native_module.py    From hub with Apache License 2.0 5 votes vote down vote up
def add_signature(name=None, inputs=None, outputs=None):
  """Adds a signature to the module definition.

  DEPRECATION NOTE: This belongs to the hub.Module API and file format for TF1.
  For TF2, switch to plain SavedModels.

  NOTE: This must be called within a `module_fn` that is defining a hub.Module.

  Args:
    name: Signature name as a string. If omitted, it is interpreted as 'default'
      and is the signature used when `Module.__call__` `signature` is not
      specified.
    inputs: A dict from input name to Tensor or SparseTensor to feed when
      applying the signature. If a single tensor is passed, it is interpreted
      as a dict with a single 'default' entry.
    outputs: A dict from output name to Tensor or SparseTensor to return from
      applying the signature. If a single tensor is passed, it is interpreted
      as a dict with a single 'default' entry.

  Raises:
    ValueError: if the arguments are invalid.
  """
  if not name:
    name = "default"
  if inputs is None:
    inputs = {}
  if outputs is None:
    outputs = {}
  if not isinstance(inputs, dict):
    inputs = {"default": inputs}
  if not isinstance(outputs, dict):
    outputs = {"default": outputs}
  message = find_signature_inputs_from_multivalued_ops(inputs)
  if message: logging.error(message)
  message = find_signature_input_colocation_error(name, inputs)
  if message: raise ValueError(message)
  saved_model_lib.add_signature(name, inputs, outputs)