Python cloudstorage.NotFoundError() Examples

The following are 8 code examples of cloudstorage.NotFoundError(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module cloudstorage , or try the search function .
Example #1
Source File: handlers_frontend_test.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def test_content_gcs_missing(self):
    content = 'Foo'
    compressed = zlib.compress(content)
    namespace = 'default-gzip'
    hashhex = hashlib.sha1(content).hexdigest()

    def read_file(bucket, key):
      self.assertEqual(u'sample-app', bucket)
      self.assertEqual(namespace + '/' + hashhex, key)
      raise cloudstorage.NotFoundError('Someone deleted the file from GCS')
    self.mock(gcs, 'read_file', read_file)

    key = model.get_entry_key(namespace, hashhex)
    model.new_content_entry(
        key,
        is_isolated=False,
        compressed_size=len(compressed),
        expanded_size=len(content),
        is_verified=True).put()

    self.set_as_reader()
    self.app.get(
        '/content?namespace=default-gzip&digest=%s' % hashhex, status=404)
    self.assertEqual(None, key.get()) 
Example #2
Source File: input_readers.py    From locality-sensitive-hashing with MIT License 5 votes vote down vote up
def next(self):
    """Returns the next input from this input reader, a block of bytes.

    Non existent files will be logged and skipped. The file might have been
    removed after input splitting.

    Returns:
      The next input from this input reader in the form of a cloudstorage
      ReadBuffer that supports a File-like interface (read, readline, seek,
      tell, and close). An error may be raised if the file can not be opened.

    Raises:
      StopIteration: The list of files has been exhausted.
    """
    options = {}
    if self._buffer_size:
      options["read_buffer_size"] = self._buffer_size
    if self._account_id:
      options["_account_id"] = self._account_id
    while True:
      filename = self._next_file()
      if filename is None:
        raise StopIteration()
      try:
        start_time = time.time()
        handle = cloudstorage.open(filename, **options)

        ctx = context.get()
        if ctx:
          operation.counters.Increment(
              COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)

        return handle
      except cloudstorage.NotFoundError:
        logging.warning("File %s may have been removed. Skipping file.",
                        filename) 
Example #3
Source File: _gcs.py    From locality-sensitive-hashing with MIT License 5 votes vote down vote up
def next(self):
    """Returns a handler to the next file.

    Non existent files will be logged and skipped. The file might have been
    removed after input splitting.

    Returns:
      The next input from this input reader in the form of a cloudstorage
      ReadBuffer that supports a File-like interface (read, readline, seek,
      tell, and close). An error may be raised if the file can not be opened.

    Raises:
      StopIteration: The list of files has been exhausted.
    """
    options = {}
    if self._buffer_size:
      options["read_buffer_size"] = self._buffer_size
    if self._account_id:
      options["_account_id"] = self._account_id
    while True:
      filename = self._next_file()
      if filename is None:
        raise StopIteration()
      if (self._path_filter and
          not self._path_filter.accept(self._slice_ctx, filename)):
        continue
      try:
        start_time = time.time()
        handle = cloudstorage.open(filename, **options)
        self._slice_ctx.incr(self.COUNTER_IO_READ_MSEC,
                             int(time.time() - start_time) * 1000)
        self._slice_ctx.incr(self.COUNTER_FILE_READ)
        return handle
      except cloudstorage.NotFoundError:
        logging.warning("File %s may have been removed. Skipping file.",
                        filename)
        self._slice_ctx.incr(self.COUNTER_FILE_MISSING) 
Example #4
Source File: main.py    From python-docs-samples with Apache License 2.0 5 votes vote down vote up
def delete_files(self):
        self.response.write('Deleting files...\n')
        for filename in self.tmp_filenames_to_clean_up:
            self.response.write('Deleting file {}\n'.format(filename))
            try:
                cloudstorage.delete(filename)
            except cloudstorage.NotFoundError:
                pass
# [END delete_files] 
Example #5
Source File: _handlers.py    From GAEPyPI with GNU General Public License v3.0 5 votes vote down vote up
def get(self, name, version, filename):
        try:
            package = Package(self.get_storage(), name, version)
            with package.get_file(filename) as gcs_file:
                self.response.content_type = 'application/octet-stream'
                self.response.headers.add('Content-Disposition', 'attachment; filename={0}'.format(filename))
                self.response.write(gcs_file.read())
        except NotFoundError:
            self.write404() 
Example #6
Source File: input_readers.py    From appengine-mapreduce with Apache License 2.0 5 votes vote down vote up
def next(self):
    """Returns the next input from this input reader, a block of bytes.

    Non existent files will be logged and skipped. The file might have been
    removed after input splitting.

    Returns:
      The next input from this input reader in the form of a cloudstorage
      ReadBuffer that supports a File-like interface (read, readline, seek,
      tell, and close). An error may be raised if the file can not be opened.

    Raises:
      StopIteration: The list of files has been exhausted.
    """
    options = {}
    if self._buffer_size:
      options["read_buffer_size"] = self._buffer_size
    if self._account_id:
      options["_account_id"] = self._account_id
    while True:
      filename = self._next_file()
      if filename is None:
        raise StopIteration()
      try:
        start_time = time.time()
        handle = cloudstorage.open(filename, **options)

        ctx = context.get()
        if ctx:
          operation.counters.Increment(
              COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)

        return handle
      except cloudstorage.NotFoundError:
        # Fail the job if we're strict on missing input.
        if getattr(self, "_fail_on_missing_input", False):
          raise errors.FailJobError(
              "File missing in GCS, aborting: %s" % filename)
        # Move on otherwise.
        logging.warning("File %s may have been removed. Skipping file.",
                        filename) 
Example #7
Source File: _gcs.py    From appengine-mapreduce with Apache License 2.0 5 votes vote down vote up
def next(self):
    """Returns a handler to the next file.

    Non existent files will be logged and skipped. The file might have been
    removed after input splitting.

    Returns:
      The next input from this input reader in the form of a cloudstorage
      ReadBuffer that supports a File-like interface (read, readline, seek,
      tell, and close). An error may be raised if the file can not be opened.

    Raises:
      StopIteration: The list of files has been exhausted.
    """
    options = {}
    if self._buffer_size:
      options["read_buffer_size"] = self._buffer_size
    if self._account_id:
      options["_account_id"] = self._account_id
    while True:
      filename = self._next_file()
      if filename is None:
        raise StopIteration()
      if (self._path_filter and
          not self._path_filter.accept(self._slice_ctx, filename)):
        continue
      try:
        start_time = time.time()
        handle = cloudstorage.open(filename, **options)
        self._slice_ctx.incr(self.COUNTER_IO_READ_MSEC,
                             int(time.time() - start_time) * 1000)
        self._slice_ctx.incr(self.COUNTER_FILE_READ)
        return handle
      except cloudstorage.NotFoundError:
        logging.warning("File %s may have been removed. Skipping file.",
                        filename)
        self._slice_ctx.incr(self.COUNTER_FILE_MISSING) 
Example #8
Source File: input_readers.py    From python-compat-runtime with Apache License 2.0 4 votes vote down vote up
def next(self):
    """Returns the next input from this input reader, a block of bytes.

    Non existent files will be logged and skipped. The file might have been
    removed after input splitting.

    Returns:
      The next input from this input reader in the form of a cloudstorage
      ReadBuffer that supports a File-like interface (read, readline, seek,
      tell, and close). An error may be raised if the file can not be opened.

    Raises:
      StopIteration: The list of files has been exhausted.
    """
    options = {}
    if self._buffer_size:
      options["read_buffer_size"] = self._buffer_size
    if self._account_id:
      options["_account_id"] = self._account_id
    while True:
      filename = self._next_file()
      if filename is None:
        raise StopIteration()
      try:
        start_time = time.time()
        handle = cloudstorage.open(filename, **options)

        ctx = context.get()
        if ctx:
          operation.counters.Increment(
              COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx)

        return handle
      except cloudstorage.NotFoundError:
        self._on_missing_input_file(filename)

        if getattr(self, "_fail_on_missing_input", False):
          raise errors.FailJobError(
              "File missing in GCS, aborting: %s" % filename)

        logging.warning("File %s may have been removed. Skipping file.",
                        filename)