Python queue.Full() Examples

The following are 30 code examples of queue.Full(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module queue , or try the search function .
Example #1
Source File: events_handler.py    From plugin.video.netflix with MIT License 6 votes vote down vote up
def add_event_to_queue(self, event_type, event_data, player_state):
        """Adds an event in the queue of events to be processed"""
        videoid = common.VideoId.from_dict(event_data['videoid'])
        # pylint: disable=unused-variable
        previous_data, previous_player_state = self.cache_data_events.get(videoid.value, ({}, None))
        manifest = get_manifest(videoid)
        url = manifest['links']['events']['href']

        if previous_data.get('xid') in self.banned_events_ids:
            common.warn('EVENT [{}] - Not added to the queue. The xid {} is banned due to a previous failed request',
                        event_type, previous_data.get('xid'))
            return

        from resources.lib.services.msl.msl_request_builder import MSLRequestBuilder
        request_data = MSLRequestBuilder.build_request_data(url,
                                                            self._build_event_params(event_type,
                                                                                     event_data,
                                                                                     player_state,
                                                                                     manifest))
        try:
            self.queue_events.put_nowait(Event(request_data, event_data))
        except queue.Full:
            common.warn('EVENT [{}] - Not added to the queue. The event queue is full.', event_type) 
Example #2
Source File: pooling.py    From plugin.video.netflix with MIT License 6 votes vote down vote up
def _queue_connection(self, cnx):
        """Put connection back in the queue

        This method is putting a connection back in the queue. It will not
        acquire a lock as the methods using _queue_connection() will have it
        set.

        Raises PoolError on errors.
        """
        if not isinstance(cnx, MySQLConnection):
            raise errors.PoolError(
                "Connection instance not subclass of MySQLConnection.")

        try:
            self._cnx_queue.put(cnx, block=False)
        except queue.Full:
            errors.PoolError("Failed adding connection; queue is full") 
Example #3
Source File: llcp-test-server.py    From nfcpy with European Union Public License 1.1 6 votes vote down vote up
def serve(self, socket):
        echo_queue = queue.Queue(2)
        echo_thread = threading.Thread(target=self.echo,
                                       args=(socket, echo_queue))
        echo_thread.start()
        peer = socket.getpeername()
        log.info("serving connection from sap {0}".format(peer))
        while socket.poll("recv"):
            data = socket.recv()
            if data is None:
                break
            log.info("rcvd {0} byte from sap {1}".format(len(data), peer))
            if echo_queue.full():
                socket.setsockopt(nfc.llcp.SO_RCVBSY, True)
            echo_queue.put(data)
        log.info("remote peer {0} closed closed connection".format(peer))
        try:
            echo_queue.put_nowait(int(0))
        except queue.Full:
            pass
        echo_thread.join()
        socket.close()
        log.info("serve thread terminated") 
Example #4
Source File: input_dataset.py    From calamari with Apache License 2.0 6 votes vote down vote up
def run(self) -> None:
        data = []
        current_idx = 0
        while True:
            while True:
                try:
                    data.append(self.input_queue.get(timeout=0.1))
                except queue.Empty:
                    continue
                except KeyboardInterrupt:
                    return

                break

            data.sort(key=lambda data: data[0])
            while len(data) > 0 and data[0][0] <= current_idx:
                try:
                    self.output_queue.put(data[0], timeout=0.1)
                    self.output_queue.task_done()
                    del data[0]
                    current_idx += 1
                except queue.Full:
                    continue
                except KeyboardInterrupt:
                    return 
Example #5
Source File: input_dataset.py    From calamari with Apache License 2.0 6 votes vote down vote up
def run(self) -> None:
        while True:
            try:
                data = self.input_queue.get(timeout=0.1)
            except queue.Empty:
                continue
            except KeyboardInterrupt:
                # allow keyboard interrupt
                return

            out = self.apply_single(*data)
            if out:
                while True:
                    try:
                        self.output_queue.put(out, timeout=0.1)
                        break
                    except queue.Full:
                        continue
                    except KeyboardInterrupt:
                        return

            self.output_queue.task_done() 
Example #6
Source File: multiprocessing_sampler.py    From garage with MIT License 6 votes vote down vote up
def shutdown_worker(self):
        """Shutdown the workers."""
        for (q, w) in zip(self._to_worker, self._workers):
            # Loop until either the exit message is accepted or the process has
            # closed.  These might cause us to block, but ensures that the
            # workers are closed.
            while True:
                try:
                    # Set a timeout in case the child process crashed.
                    q.put(('exit', ()), timeout=1)
                    break
                except queue.Full:
                    # If the child process has crashed, we're done here.
                    # Otherwise it should eventually accept our message.
                    if not w.is_alive():
                        break
            # If this hangs forever, most likely a queue needs
            # cancel_join_thread called on it, or a subprocess has tripped the
            # "closing dowel with TensorboardOutput blocks forever bug."
            w.join()
        for q in self._to_worker:
            q.close()
        self._to_sampler.close() 
Example #7
Source File: graphyte.py    From graphyte with MIT License 6 votes vote down vote up
def send(self, metric, value, timestamp=None, tags={}):
        """Send given metric and (int or float) value to Graphite host.
        Performs send on background thread if "interval" was specified when
        creating this Sender.

        If a "tags" dict is specified, send the tags to the Graphite host along with the metric.
        """
        if timestamp is None:
            timestamp = time.time()
        message = self.build_message(metric, value, timestamp, tags)

        if self.interval is None:
            self.send_socket(message)
        else:
            try:
                self._queue.put_nowait(message)
            except queue.Full:
                logger.error('queue full when sending {!r}'.format(message)) 
Example #8
Source File: dpooling.py    From python-mysql-pool with MIT License 6 votes vote down vote up
def _queue_connection(self, cnx):
        """Put connection back in the queue

        This method is putting a connection back in the queue. It will not
        acquire a lock as the methods using _queue_connection() will have it
        set.

        Raises PoolError on errors.
        """
        if not isinstance(cnx, MySQLConnection):
            raise errors.PoolError(
                "Connection instance not subclass of MySQLConnection.")

        try:
            self._cnx_queue.put(cnx, block=False)
        except queue.Full:
            errors.PoolError("Failed adding connection; queue is full") 
Example #9
Source File: serialdriver.py    From crazyflie-lib-python with GNU General Public License v2.0 5 votes vote down vote up
def send_packet(self, pk):
        try:
            self.out_queue.put(pk, True, 2)
        except queue.Full:
            if self.link_error_callback:
                self.link_error_callback(
                    'RadioDriver: Could not send packet to copter') 
Example #10
Source File: connectionpool.py    From deepWordBug with Apache License 2.0 5 votes vote down vote up
def _put_conn(self, conn):
        """
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        """
        try:
            self.pool.put(conn, block=False)
            return  # Everything is dandy, done.
        except AttributeError:
            # self.pool is None.
            pass
        except Full:
            # This should never happen if self.block == True
            log.warning(
                "Connection pool is full, discarding connection: %s" %
                self.host)

        # Connection never got put back into the pool, close it.
        if conn:
            conn.close() 
Example #11
Source File: __init__.py    From universe with MIT License 5 votes vote down vote up
def record(self, error, wrap=True):
        logger.debug('Error in thread %s: %s', threading.current_thread().name, error)
        if wrap:
            error = format_error(error)

        try:
            self.queue.put_nowait(error)
        except queue.Full:
            pass 
Example #12
Source File: protocol_loop.py    From ddt4all with GNU General Public License v3.0 5 votes vote down vote up
def close(self):
        if self.is_open:
            self.is_open = False
            try:
                self.queue.put_nowait(None)
            except queue.Full:
                pass
        super(Serial, self).close() 
Example #13
Source File: jobs.py    From cozy with Apache License 2.0 5 votes vote down vote up
def _copy_items(self):
        while not self.stop_requested:
            try:
                self.sideq.put(self.q.get(timeout=1), timeout=1)
            except Empty:
                pass
            except Full:
                pass 
Example #14
Source File: _threading.py    From satori with Apache License 2.0 5 votes vote down vote up
def put_nowait(self, item):
        """Put an item into the queue without blocking.

        Only enqueue the item if a free slot is immediately available.
        Otherwise raise the Full exception.
        """
        return self.put(item, False) 
Example #15
Source File: subbrute.py    From AttackSurfaceMapper with GNU General Public License v3.0 5 votes vote down vote up
def add_nameserver(self, nameserver):
        keep_trying = True
        while keep_trying:
            try:
                self.resolver_q.put(nameserver, timeout=1)
                trace("Added nameserver:", nameserver)
                keep_trying = False
            except Exception as e:
                if type(e) == Queue.Full or str(type(e)) == "<class 'queue.Full'>":
                    keep_trying = True 
Example #16
Source File: read_ahead_cursor.py    From amazon-qldb-driver-python with Apache License 2.0 5 votes vote down vote up
def _populate_queue(self):
        """
        Fill the buffer queue with pages. If ClientError is received, it is put in the queue and execution stops.
        If the parent transaction is closed, stop fetching results.
        """
        try:
            next_page_token = self._page.get('NextPageToken')
            while next_page_token is not None:
                statement_result = self._session._fetch_page(self._transaction_id, next_page_token)
                page = statement_result.get('Page')
                while True:
                    try:
                        # Timeout of 50ms.
                        self._queue.put(page, timeout=0.05)
                        next_page_token = page.get('NextPageToken')
                        break
                    except Full:
                        # When timeout is reached, check if the read-ahead retrieval thread should end.
                        if not self._is_open:
                            logger.debug('Cursor was closed; read-ahead retriever thread stopping.')
                            raise ResultClosedError(self._session.token)
        except (ClientError, ResultClosedError) as error:
            while not self._queue.empty():
                self._queue.get_nowait()
            logger.debug('Queued an exception: {}'.format(error))
            self._queue.put(error) 
Example #17
Source File: multicore.py    From imgaug with MIT License 5 votes vote down vote up
def _load_batches(cls, load_batch_func, queue_internal, join_signal,
                      seedval):
        # pylint: disable=broad-except
        if seedval is not None:
            random.seed(seedval)
            np.random.seed(seedval)
            iarandom.seed(seedval)

        try:
            gen = (
                load_batch_func()
                if not ia.is_generator(load_batch_func)
                else load_batch_func
            )
            for batch in gen:
                assert isinstance(batch, Batch), (
                    "Expected batch returned by load_batch_func to "
                    "be of class imgaug.Batch, got %s." % (
                        type(batch),))
                batch_pickled = pickle.dumps(batch, protocol=-1)
                while not join_signal.is_set():
                    try:
                        queue_internal.put(batch_pickled, timeout=0.005)
                        break
                    except QueueFull:
                        pass
                if join_signal.is_set():
                    break
        except Exception:
            traceback.print_exc()
        finally:
            queue_internal.put("")
        time.sleep(0.01) 
Example #18
Source File: tracking_process.py    From stytra with GNU General Public License v3.0 5 votes vote down vote up
def send_to_gui(self, frametime, frame):
        """ Sends the current frame to the GUI queue at the appropriate framerate"""
        if self.framerate_rec.current_framerate:
            every_x = max(
                int(self.framerate_rec.current_framerate / self.gui_framerate), 1
            )
        else:
            every_x = 1
        if self.i == 0:
            try:
                self.gui_queue.put(frame, timestamp=frametime)
            except Full:
                self.message_queue.put("E:GUI queue full")

        self.i = (self.i + 1) % every_x 
Example #19
Source File: queue.py    From satori with Apache License 2.0 5 votes vote down vote up
def put(self, item, block=True, timeout=None):
        if self.hub is getcurrent():
            if self.getters:
                getter = self.getters.popleft()
                getter.switch(item)
                return
            raise Full

        if not block:
            timeout = 0

        waiter = Waiter()
        item = (item, waiter)
        self.putters.append(item)
        timeout = Timeout._start_new_or_dummy(timeout, Full)
        try:
            if self.getters:
                self._schedule_unlock()
            result = waiter.get()
            if result is not waiter:
                raise InvalidSwitchError("Invalid switch into Channel.put: %r" % (result, ))
        except:
            _safe_remove(self.putters, item)
            raise
        finally:
            timeout.cancel() 
Example #20
Source File: queue.py    From satori with Apache License 2.0 5 votes vote down vote up
def put_nowait(self, item):
        """Put an item into the queue without blocking.

        Only enqueue the item if a free slot is immediately available.
        Otherwise raise the :class:`Full` exception.
        """
        self.put(item, False) 
Example #21
Source File: queue.py    From satori with Apache License 2.0 5 votes vote down vote up
def put(self, item, block=True, timeout=None):
        """Put an item into the queue.

        If optional arg *block* is true and *timeout* is ``None`` (the default),
        block if necessary until a free slot is available. If *timeout* is
        a positive number, it blocks at most *timeout* seconds and raises
        the :class:`Full` exception if no free slot was available within that time.
        Otherwise (*block* is false), put an item on the queue if a free slot
        is immediately available, else raise the :class:`Full` exception (*timeout*
        is ignored in that case).
        """
        if self.maxsize is None or self.qsize() < self.maxsize:
            # there's a free slot, put an item right away
            self._put(item)
            if self.getters:
                self._schedule_unlock()
        elif self.hub is getcurrent():
            # We're in the mainloop, so we cannot wait; we can switch to other greenlets though.
            # Check if possible to get a free slot in the queue.
            while self.getters and self.qsize() and self.qsize() >= self.maxsize:
                getter = self.getters.popleft()
                getter.switch(getter)
            if self.qsize() < self.maxsize:
                self._put(item)
                return
            raise Full
        elif block:
            waiter = ItemWaiter(item, self)
            self.putters.append(waiter)
            timeout = Timeout._start_new_or_dummy(timeout, Full)
            try:
                if self.getters:
                    self._schedule_unlock()
                result = waiter.get()
                if result is not waiter:
                    raise InvalidSwitchError("Invalid switch into Queue.put: %r" % (result, ))
            finally:
                timeout.cancel()
                _safe_remove(self.putters, waiter)
        else:
            raise Full 
Example #22
Source File: connectionpool.py    From oss-ftp with MIT License 5 votes vote down vote up
def _put_conn(self, conn):
        """
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        """
        try:
            self.pool.put(conn, block=False)
            return  # Everything is dandy, done.
        except AttributeError:
            # self.pool is None.
            pass
        except Full:
            # This should never happen if self.block == True
            log.warning(
                "Connection pool is full, discarding connection: %s" %
                self.host)

        # Connection never got put back into the pool, close it.
        if conn:
            conn.close() 
Example #23
Source File: _threading.py    From satori with Apache License 2.0 5 votes vote down vote up
def put(self, item, block=True, timeout=None):
        """Put an item into the queue.

        If optional args 'block' is true and 'timeout' is None (the default),
        block if necessary until a free slot is available. If 'timeout' is
        a positive number, it blocks at most 'timeout' seconds and raises
        the Full exception if no free slot was available within that time.
        Otherwise ('block' is false), put an item on the queue if a free slot
        is immediately available, else raise the Full exception ('timeout'
        is ignored in that case).
        """
        self.not_full.acquire()
        try:
            if self.maxsize > 0:
                if not block:
                    if self._qsize() >= self.maxsize:
                        raise Full
                elif timeout is None:
                    while self._qsize() >= self.maxsize:
                        self.not_full.wait()
                elif timeout < 0:
                    raise ValueError("'timeout' must be a positive number")
                else:
                    endtime = _time() + timeout
                    while self._qsize() >= self.maxsize:
                        remaining = endtime - _time()
                        if remaining <= 0.0:
                            raise Full
                        self.not_full.wait(remaining)
            self._put(item)
            self.unfinished_tasks += 1
            self.not_empty.notify()
        finally:
            self.not_full.release() 
Example #24
Source File: connectionpool.py    From Safejumper-for-Desktop with GNU General Public License v2.0 5 votes vote down vote up
def _put_conn(self, conn):
        """
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        """
        try:
            self.pool.put(conn, block=False)
            return  # Everything is dandy, done.
        except AttributeError:
            # self.pool is None.
            pass
        except Full:
            # This should never happen if self.block == True
            log.warning(
                "Connection pool is full, discarding connection: %s",
                self.host)

        # Connection never got put back into the pool, close it.
        if conn:
            conn.close() 
Example #25
Source File: thread_pool.py    From g3ar with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def feed(self, function, *vargs, **kwargs):
        """"""
        try:
            self._task_queue.put_nowait(tuple([function, vargs, kwargs]))
            return True
        except Full:
            #format_exc()
            return False
    
    #---------------------------------------------------------------------- 
Example #26
Source File: thread_pool.py    From g3ar with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def feed(self, function, *vargs, **kwargs):
        """"""
        try:
            self._task_queue.put_nowait(tuple([function, vargs, kwargs]))
            return True
        except Full:
            #format_exc()
            return False

    #---------------------------------------------------------------------- 
Example #27
Source File: connectionpool.py    From crunchy-xml-decoder with GNU General Public License v2.0 5 votes vote down vote up
def _put_conn(self, conn):
        """
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        """
        try:
            self.pool.put(conn, block=False)
            return  # Everything is dandy, done.
        except AttributeError:
            # self.pool is None.
            pass
        except Full:
            # This should never happen if self.block == True
            log.warning(
                "Connection pool is full, discarding connection: %s" %
                self.host)

        # Connection never got put back into the pool, close it.
        if conn:
            conn.close() 
Example #28
Source File: connectionpool.py    From oss-ftp with MIT License 5 votes vote down vote up
def _put_conn(self, conn):
        """
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        """
        try:
            self.pool.put(conn, block=False)
            return  # Everything is dandy, done.
        except AttributeError:
            # self.pool is None.
            pass
        except Full:
            # This should never happen if self.block == True
            log.warning(
                "Connection pool is full, discarding connection: %s" %
                self.host)

        # Connection never got put back into the pool, close it.
        if conn:
            conn.close() 
Example #29
Source File: connectionpool.py    From oss-ftp with MIT License 5 votes vote down vote up
def _put_conn(self, conn):
        """
        Put a connection back into the pool.

        :param conn:
            Connection object for the current host and port as returned by
            :meth:`._new_conn` or :meth:`._get_conn`.

        If the pool is already full, the connection is closed and discarded
        because we exceeded maxsize. If connections are discarded frequently,
        then maxsize should be increased.

        If the pool is closed, then the connection will be closed and discarded.
        """
        try:
            self.pool.put(conn, block=False)
            return # Everything is dandy, done.
        except AttributeError:
            # self.pool is None.
            pass
        except Full:
            # This should never happen if self.block == True
            log.warning(
                "Connection pool is full, discarding connection: %s" %
                self.host)

        # Connection never got put back into the pool, close it.
        if conn:
            conn.close() 
Example #30
Source File: multiproc_data.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def _proc_loop(proc_id, alive, queue, fn):
        """
        Thread loop for generating data

        Parameters
        ----------
        proc_id: int
            Process id
        alive: multiprocessing.Value
            variable for signaling whether process should continue or not
        queue: multiprocessing.Queue
            queue for passing data back
        fn: function
            function object that returns a sample to be pushed into the queue
        """
        print("proc {} started".format(proc_id))
        try:
            while alive.value:
                data = fn()
                put_success = False
                while alive.value and not put_success:
                    try:
                        queue.put(data, timeout=0.5)
                        put_success = True
                    except QFullExcept:
                        # print("Queue Full")
                        pass
        except KeyboardInterrupt:
            print("W: interrupt received, stopping process {} ...".format(proc_id))
        print("Closing process {}".format(proc_id))
        queue.close()