Python multiprocessing.queues.Empty() Examples

The following are 7 code examples of multiprocessing.queues.Empty(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module multiprocessing.queues , or try the search function .
Example #1
Source File: worker.py    From doufen with MIT License 7 votes vote down vote up
def __call__(self, *args, **kwargs):
        queue_in = self.queue_in
        queue_out = self.queue_out
        logger = logging.getLogger()
        logger.addHandler(QueueHandler(queue_out))
        logger.setLevel(logging.DEBUG if self._debug else logging.INFO)
        db.init(self._settings['db_path'], False)

        self._ready()

        heartbeat_sequence = 1
        while True:
            try:
                task = queue_in.get(timeout=HEARTBEAT_INTERVAL)
                if isinstance(task, tasks.Task):
                    self._work(str(task))
                    self._done(task(**self._settings))
            except queues.Empty:
                self._heartbeat(heartbeat_sequence)
                heartbeat_sequence += 1
            except Exception as e:
                self._error(e, traceback.format_exc())
            except KeyboardInterrupt:
                break 
Example #2
Source File: cdx-index-client.py    From cdx-index-client with MIT License 5 votes vote down vote up
def do_work(job_queue, counter=None):
    """ Process work function, read more fetch page jobs
    from queue until all jobs are finished
    """
    signal.signal(signal.SIGINT, signal.SIG_IGN)
    while not job_queue.empty():
        try:
            job = job_queue.get_nowait()
            fetch_result_page(job)

            num_done = 0
            with counter.get_lock():
                counter.value += 1
                num_done = counter.value

            logging.info('{0} page(s) of {1} finished'.format(num_done,
                                                              job['num_pages']))
        except Empty:
            pass

        except KeyboardInterrupt:
            break

        except Exception:
            if not job:
                raise

            retries = job.get('retries', 0)
            if retries < job['max_retries']:
                logging.error('Retrying Page {0}'.format(job['page']))
                job['retries'] = retries + 1
                job_queue.put_nowait(job)
            else:
                logging.error('Max retries exceeded for page {0}'.
                              format(job['page'])) 
Example #3
Source File: queue.py    From pypeln with MIT License 5 votes vote down vote up
def __iter__(self) -> tp.Iterator[T]:

        while not self.is_done():

            if self.namespace.exception:
                exception, trace = self.exception_queue.get()

                try:
                    exception = exception(f"\n\n{trace}")
                except:
                    exception = Exception(f"\n\nOriginal: {exception}\n\n{trace}")

                raise exception

            try:
                x = self.get(timeout=pypeln_utils.TIMEOUT)
            except Empty:
                continue

            if isinstance(x, pypeln_utils.Done):
                with self.lock:
                    self.namespace.remaining -= 1

                continue

            yield x 
Example #4
Source File: queue.py    From pypeln with MIT License 5 votes vote down vote up
def __iter__(self) -> tp.Iterator[T]:

        while not self.is_done():

            if self.namespace.exception:
                exception, trace = self.exception_queue.get()

                try:
                    exception = exception(f"\n\n{trace}")
                except:
                    exception = Exception(f"\n\nOriginal: {exception}\n\n{trace}")

                raise exception

            try:
                x = self.get(timeout=pypeln_utils.TIMEOUT)
            except Empty:
                continue

            if isinstance(x, pypeln_utils.Done):
                with self.lock:
                    self.namespace.remaining -= 1

                continue

            yield x 
Example #5
Source File: pool.py    From Detectron-PYTORCH with Apache License 2.0 5 votes vote down vote up
def next(self, timeout=None):
        # with self._get_lock:
        #     if self._get_index == self._length:
        #         raise StopIteration
        #     item = self._items.get(timeout=timeout)
        #     self._get_index += 1
        #
        #     success, value = item
        #     if success:
        #         return value
        #     raise value

        self._cond.acquire()
        try:
            try:
                item = self._items.get_nowait()
                self._empty_sema.release()
            except Empty:
                if self._index == self._length:
                    raise StopIteration
                self._cond.wait(timeout)
                try:
                    item = self._items.get(timeout=timeout)
                    self._empty_sema.release()
                except Empty:
                    if self._index == self._length:
                        raise StopIteration
                    raise TimeoutError
        finally:
            self._cond.release()

        success, value = item
        if success:
            return value
        raise value 
Example #6
Source File: server.py    From doufen with MIT License 4 votes vote down vote up
def _watch_worker(self):
        """
        监控工作队列
        """
        while True:
            try:
                ret = self._worker_output.get_nowait()
                if isinstance(ret, logging.LogRecord):
                    logging.root.handle(ret)
                    self.application.broadcast(json.dumps({
                        'sender': 'logger',
                        'message': ret.getMessage(),
                        'level': ret.levelname,
                    }))
                elif isinstance(ret, Worker.ReturnReady):
                    logging.info('"{0}" is ready'.format(ret.name))
                    self.application.broadcast(json.dumps({
                        'sender': 'worker',
                        'src': ret.name,
                        'event': 'ready',
                    }))
                    self._launch_task()
                elif isinstance(ret, Worker.ReturnDone):
                    logging.info('"{0}" has done'.format(ret.name))
                    self._workers[ret.name].toggle_task()
                    self.application.broadcast(json.dumps({
                        'sender': 'worker',
                        'src': ret.name,
                        'event': 'done',
                    }))
                    self._launch_task()
                elif isinstance(ret, Worker.ReturnWorking):
                    logging.info('"{0}" is working for "{1}"'.format(ret.name, ret.task))
                    self._workers[ret.name].toggle_task(ret.task)
                    self.application.broadcast(json.dumps({
                        'sender': 'worker',
                        'src': ret.name,
                        'event': 'working',
                        'target': str(ret.task),
                    }))
                elif isinstance(ret, Worker.ReturnError):
                    logging.error('"{0}" error: {1}\n{2}'.format(ret.name, ret.exception, ret.traceback))
                    self._workers[ret.name].toggle_task()
                    self.application.broadcast(json.dumps({
                        'sender': 'worker',
                        'src': ret.name,
                        'event': 'error',
                        'message': str(ret.exception),
                    }))
                    self._launch_task()
                elif isinstance(ret, Worker.ReturnHeartbeat):
                    logging.info('"{0}" heartbeat:{1}'.format(ret.name, ret.sequence))
            except queues.Empty:
                pass
            # 每隔0.1秒读取一下队列
            yield tornado.gen.sleep(0.1) 
Example #7
Source File: queuefile.py    From quay with Apache License 2.0 4 votes vote down vote up
def read(self, size=-1):
        # If the queuefile was closed or we have finished, send back any remaining data.
        if self._closed or self._done:
            if size == -1:
                buf = self._buffer
                self._buffer = b""
                return buf

            buf = self._buffer[0:size]
            self._buffer = self._buffer[size:]
            return buf

        # Loop until we reach the requested data size (or forever if all data was requested).
        while (len(self._buffer) < size) or (size == -1):
            exception = None
            try:
                result = self._queue.get(block=True, timeout=self._timeout)
                exception = result.exception
            except Empty as em:
                exception = em

            # Check for any exceptions raised by the queue process.
            if exception is not None:
                self._closed = True
                self.raised_exception = True

                # Fire off the exception to any registered handlers. If no handlers were registered,
                # then raise the exception locally.
                handled = False
                for handler in self._exception_handlers:
                    handler(exception)
                    handled = True

                if handled:
                    return b""
                else:
                    raise exception

            # Check for no further data. If the QueueProcess has finished producing data, then break
            # out of the loop to return the data already acquired.
            if result.data is None:
                self._done = True
                break

            # Add the data to the buffer.
            self._buffer += result.data
            self._total_size += len(result.data)

        # Return the requested slice of the buffer.
        if size == -1:
            buf = self._buffer
            self._buffer = b""
            return buf

        buf = self._buffer[0:size]
        self._buffer = self._buffer[size:]
        return buf