Python gevent.queue.Queue() Examples
The following are 30
code examples of gevent.queue.Queue().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
gevent.queue
, or try the search function
.
Example #1
Source File: test_http_concurrent_limit.py From huskar with MIT License | 6 votes |
def test_anonymous_no_concurrent_limit_because_remain_count( client, client_ip, mocker, url): mocker.patch.object(settings, 'CONCURRENT_LIMITER_SETTINGS', { '__anonymous__': { 'ttl': 100, 'capacity': 100, } }) def worker(queue): response = client.get(url) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) with raises(Empty): queue.get_nowait()
Example #2
Source File: reader.py From gnsq with BSD 3-Clause "New" or "Revised" License | 6 votes |
def __init__(self, *args, **kwargs): """Use :class:`~gnsq.Consumer` instead. .. deprecated:: 1.0.0 """ setattr(self, 'async', kwargs.pop('async', False)) max_concurrency = kwargs.pop('max_concurrency', 0) if max_concurrency < 0: self.max_concurrency = cpu_count() else: self.max_concurrency = max_concurrency if self.max_concurrency: self.queue = Queue() else: self.queue = None super(Reader, self).__init__(*args, **kwargs)
Example #3
Source File: test_http_concurrent_limit.py From huskar with MIT License | 6 votes |
def test_logged_no_concurrent_limit_because_remain_count( client, client_ip, test_user, test_token, mocker): mocker.patch.object(settings, 'CONCURRENT_LIMITER_SETTINGS', { test_user.username: { 'ttl': 100, 'capacity': 100, } }) def worker(queue): response = client.get( '/api/busy_with_login', headers={ 'Authorization': test_token, }) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) with raises(Empty): queue.get_nowait()
Example #4
Source File: get_free_proxies.py From nike_purchase_system with GNU General Public License v3.0 | 6 votes |
def main(): print('请注意,代理数是固定的,原理是从66ip.cn上获取的ip,它是按时间排序的,所以建议一段时间获取一次,可以同步该网站的更新。') num = input('请输入你获取的代理数(建议500-2000):') r = requests.get( 'http://www.66ip.cn/mo.php?sxb=&tqsl={}&port=&export=&ktip=&sxa=&submit=%CC%E1++%C8%A1&textarea=http%3A%2F%2Fwww.66ip.cn%2F%3Fsxb%3D%26tqsl%3D10%26ports%255B%255D2%3D%26ktip%3D%26sxa%3D%26radio%3Dradio%26submit%3D%25CC%25E1%2B%2B%25C8%25A1'.format(num)) r.encoding = 'gb2312' result = re.findall(r'\d+[.]\d+[.]\d+[.]\d+[:]\d+', r.text) gevent_pool = gevent.pool.Pool(len(result)) print(len(result)) queue = Queue() for proxy in result: gevent_pool.apply_async(validate, (proxy, queue)) gevent_pool.join() useful_proxies = [] queue.put(StopIteration) with open('免费可用.txt', 'w') as f: for item in queue: useful_proxies.append(item) f.write(item + '\n') print('通过数量:', len(useful_proxies)) print('通过率:', str(len(useful_proxies)/len(result)*100) + '%') return useful_proxies
Example #5
Source File: eth_service.py From pyethapp with BSD 3-Clause "New" or "Revised" License | 6 votes |
def __init__(self, app): self.config = app.config self.db = app.services.db assert self.db is not None super(ChainService, self).__init__(app) log.info('initializing chain') coinbase = app.services.accounts.coinbase self.chain = Chain(self.db, new_head_cb=self._on_new_head, coinbase=coinbase) log.info('chain at', number=self.chain.head.number) self.synchronizer = Synchronizer(self, force_sync=None) self.block_queue = Queue(maxsize=self.block_queue_size) self.transaction_queue = Queue(maxsize=self.transaction_queue_size) self.add_blocks_lock = False self.add_transaction_lock = gevent.lock.Semaphore() self.broadcast_filter = DuplicatesFilter() self.on_new_head_cbs = [] self.on_new_head_candidate_cbs = []
Example #6
Source File: watcher.py From huskar with MIT License | 6 votes |
def __init__(self, tree_hub, from_application_name=None, from_cluster_name=None, with_initial=False, life_span=None, metrics_tag_from=None): self.hub = tree_hub # The optional route context self.from_application_name = from_application_name self.from_cluster_name = from_cluster_name self.with_initial = with_initial self.queue = Queue() self.holders = set() self.cluster_maps = collections.defaultdict(ClusterMap) self.cluster_whitelist = collections.defaultdict(set) self.watch_map = collections.defaultdict(set) self.life_span = life_span self._metrics_tag_from = metrics_tag_from
Example #7
Source File: test_http_rate_limit.py From huskar with MIT License | 6 votes |
def test_logged_no_rate_limit_because_remain_count( client, client_ip, test_user, test_token, mocker): mocker.patch.object(settings, 'RATE_LIMITER_SETTINGS', { test_user.username: { 'rate': 100, 'capacity': 300, } }) def worker(queue): response = client.get( '/api/need_login', headers={ 'Authorization': test_token, }) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) with raises(Empty): queue.get_nowait()
Example #8
Source File: test_http_rate_limit.py From huskar with MIT License | 6 votes |
def test_anonymous_no_rate_limit_because_remain_count( client, client_ip, mocker, url): mocker.patch.object(settings, 'RATE_LIMITER_SETTINGS', { '__anonymous__': { 'rate': 100, 'capacity': 300, } }) def worker(queue): response = client.get(url) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) with raises(Empty): queue.get_nowait()
Example #9
Source File: crawl.py From girlfriend with MIT License | 6 votes |
def _concurrent_execute(self, context, start_req, parser, pool, pool_size): queue = Queue() # 任务队列 # 将初始化请求加入任务队列 for r in start_req: queue.put_nowait(r) if pool is None: pool = GeventPool(pool_size) greenlets = [] while True: try: req = self._check_req(queue.get(timeout=1)) if req.parser is None: req.parser = parser greenlets.append(pool.spawn(req, context, queue)) except Empty: break return [greenlet.get() for greenlet in greenlets]
Example #10
Source File: test_http_rate_limit.py From huskar with MIT License | 6 votes |
def test_anonymous_with_rate_limit(client, client_ip, mocker, configs, url): cfg = deepcopy(configs[0]) if '127.0.0.1' in cfg: cfg[client_ip] = cfg.pop('127.0.0.1') mocker.patch.object(settings, 'RATE_LIMITER_SETTINGS', cfg) def worker(queue): response = client.get(url) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(5): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) assert queue.get_nowait() == 429
Example #11
Source File: tests_basic.py From channelstream with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_messages(self, test_uuids): user = User("test_user") connection = Connection("test_user", conn_id=test_uuids[1]) connection.queue = Queue() connection2 = Connection("test_user", conn_id=test_uuids[2]) connection2.queue = Queue() user.add_connection(connection) user.add_connection(connection2) user.add_message( { "type": "message", "no_history": False, "pm_users": [], "exclude_users": [], } ) assert len(user.connections) == 2 assert len(user.connections[0].queue.get()) == 1 assert len(user.connections[1].queue.get()) == 1
Example #12
Source File: base.py From powerpool with BSD 2-Clause "Simplified" License | 6 votes |
def _run_queue_item(self, item): name, args, kwargs = item if __debug__: self.logger.debug("Queue running {} with args '{}' kwargs '{}'" .format(name, args, kwargs)) try: func = getattr(self, name, None) if func is None: raise NotImplementedError( "Item {} has been enqueued that has no valid function!" .format(name)) func(*args, **kwargs) except self.queue_exceptions as e: self.logger.error("Unable to process queue item, retrying! " "{} Name: {}; Args: {}; Kwargs: {};" .format(e, name, args, kwargs)) return "retry" except Exception: # Log any unexpected problem, but don't retry because we might # end up endlessly retrying with same failure self.logger.error("Unkown error, queue data discarded!" "Name: {}; Args: {}; Kwargs: {};" .format(name, args, kwargs), exc_info=True)
Example #13
Source File: test_http_rate_limit.py From huskar with MIT License | 5 votes |
def test_default_anonymous_no_rate_limit( client, client_ip, switch_on, mocker, url): def fake_switch(name, default=True): if name == SWITCH_ENABLE_RATE_LIMITER: return switch_on return default mocker.patch.object(switch, 'is_switched_on', fake_switch) if not switch_on: mocker.patch.object(settings, 'RATE_LIMITER_SETTINGS', { '__default__': { 'rate': 1, 'capacity': 3, } }) def worker(queue): response = client.get(url) if response.status_code == 429: queue.put(True) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) with raises(Empty): queue.get_nowait()
Example #14
Source File: test_http_rate_limit.py From huskar with MIT License | 5 votes |
def test_logged_no_rate_limit_because_redis_error( client, client_ip, test_user, test_token, mocker, error_method): mocker.patch.object(settings, 'RATE_LIMITER_SETTINGS', { test_user.username: { 'rate': 1, 'capacity': 3, } }) mocker.patch.object(redis_client, error_method, side_effect=Exception) def worker(queue): response = client.get( '/api/need_login', headers={ 'Authorization': test_token, }) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) with raises(Empty): queue.get_nowait()
Example #15
Source File: test_http_rate_limit.py From huskar with MIT License | 5 votes |
def test_logged_with_rate_limit( client, client_ip, mocker, test_user, test_token, configs, use_username): if '127.0.0.1' in configs: configs[client_ip] = configs.pop('127.0.0.1') if use_username: configs.update({ test_user.username: { 'rate': 1, 'capacity': 3, } }) mocker.patch.object(settings, 'RATE_LIMITER_SETTINGS', configs) def worker(queue): response = client.get('/api/need_login', headers={ 'Authorization': test_token, }) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(10): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) assert queue.get_nowait() == 429
Example #16
Source File: base.py From powerpool with BSD 2-Clause "Simplified" License | 5 votes |
def _flush_queue(self, exit_exc=None, caller=None): sleep(1) self.logger.info("Flushing a queue of size {}" .format(self.queue.qsize())) self.queue.put(StopIteration) for item in self.queue: self._run_queue_item(item) self.logger.info("Queue flush complete, Exit.")
Example #17
Source File: test_http_concurrent_limit.py From huskar with MIT License | 5 votes |
def test_logged_concurrent_limit_with_redis_error( client, client_ip, test_user, test_token, mocker, error_method, cause_limit): mocker.patch.object(settings, 'CONCURRENT_LIMITER_SETTINGS', { test_user.username: { 'ttl': 100, 'capacity': 1, } }) mocker.patch.object(redis_client, error_method, side_effect=Exception) def worker(queue): response = client.get( '/api/busy_with_login', headers={ 'Authorization': test_token, }) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) if cause_limit: assert queue.get_nowait() == 429 else: with raises(Empty): queue.get_nowait()
Example #18
Source File: test_http_concurrent_limit.py From huskar with MIT License | 5 votes |
def test_login_with_concurrent_limit( client, client_ip, mocker, test_user, test_token, configs, use_username): if '127.0.0.1' in configs: configs[client_ip] = configs.pop('127.0.0.1') if use_username: configs.update({ test_user.username: { 'ttl': 100, 'capacity': 1, } }) mocker.patch.object(settings, 'CONCURRENT_LIMITER_SETTINGS', configs) def worker(queue): response = client.get('/api/busy_with_login', headers={ 'Authorization': test_token, }) if response.status_code != 200: queue.put(response.status_code) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) assert queue.get_nowait() == 429
Example #19
Source File: agent_server.py From powerpool with BSD 2-Clause "Simplified" License | 5 votes |
def __init__(self, sock, address, id, server, config, logger, reporter): self.logger = logger self.sock = sock self.server = server self.config = config self.reporter = reporter # Seconds before sending keepalive probes sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, 120) # Interval in seconds between keepalive probes sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, 1) # Failed keepalive probles before declaring other end dead sock.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, 5) self._disconnected = False self._authenticated = False self._client_state = None self._authed = {} self._client_version = None self._connection_time = time() self._id = id # where we put all the messages that need to go out self.write_queue = Queue() self.fp = None self._stopped = False
Example #20
Source File: base.py From powerpool with BSD 2-Clause "Simplified" License | 5 votes |
def _start_queue(self): self.queue = Queue()
Example #21
Source File: test_http_concurrent_limit.py From huskar with MIT License | 5 votes |
def test_default_anonymous_no_concurrent_limit( client, client_ip, switch_on, mocker, url): def fake_switch(name, default=True): if name == SWITCH_ENABLE_CONCURRENT_LIMITER: return switch_on return default mocker.patch.object(switch, 'is_switched_on', fake_switch) if not switch_on: mocker.patch.object(settings, 'CONCURRENT_LIMITER_SETTINGS', { '__default__': { 'ttl': 100, 'capacity': 1, } }) def worker(queue): response = client.get(url) if response.status_code == 429: queue.put(429) greenlets = [] queue = Queue() for _ in range(3): greenlets.append(gevent.spawn(worker, queue)) gevent.joinall(greenlets) with raises(Empty): queue.get_nowait()
Example #22
Source File: celery_reporter.py From powerpool with BSD 2-Clause "Simplified" License | 5 votes |
def __init__(self, config): self._configure(config) super(CeleryReporter, self).__init__() # setup our celery agent and monkey patch from celery import Celery self.celery = Celery() self.celery.conf.update(self.config['celery']) self.queue = Queue() self._aggr_shares = {}
Example #23
Source File: concurrent.py From py-bson-rpc with Mozilla Public License 2.0 | 5 votes |
def _new_queue(*args, **kwargs): from six.moves.queue import Queue return Queue(*args, **kwargs)
Example #24
Source File: concurrent.py From py-bson-rpc with Mozilla Public License 2.0 | 5 votes |
def _new_gevent_queue(*args, **kwargs): from gevent.queue import Queue return Queue(*args, **kwargs)
Example #25
Source File: concurrent.py From py-bson-rpc with Mozilla Public License 2.0 | 5 votes |
def _new_queue(*args, **kwargs): from six.moves.queue import Queue return Queue(*args, **kwargs)
Example #26
Source File: concurrent.py From py-bson-rpc with Mozilla Public License 2.0 | 5 votes |
def _new_gevent_queue(*args, **kwargs): from gevent.queue import Queue return Queue(*args, **kwargs)
Example #27
Source File: dbpool.py From sqlchain with MIT License | 5 votes |
def __init__(self,connectionstring,poolsize,modulename='pyodbc'): self.conns = [DBConnection_(socket_.socketpair()) for x in xrange(poolsize)] self.threads = [threading.Thread(target=self.worker, args=(self.conns[x],)) for x in xrange(poolsize)] self.queue = queue.Queue(poolsize) for i in xrange(poolsize): self.threads[i].daemon = True self.threads[i].start() self.conns[i].connect(connectionstring,modulename) self.queue.put(self.conns[i]) if KEEPALIVE_PERIOD > 0: self.monitor = gevent.spawn(self.keepalive)
Example #28
Source File: broker_web.py From algobroker with BSD 2-Clause "Simplified" License | 5 votes |
def subscribe(): def gen(): q = Queue() subscriptions.append(q) try: while True: result = q.get() id = result['id'] ev = ServerSentEvent(result, id) yield ev.encode() except GeneratorExit: # Or maybe use flask signals subscriptions.remove(q) return Response(gen(), mimetype="text/event-stream")
Example #29
Source File: actions.py From sync-engine with GNU Affero General Public License v3.0 | 5 votes |
def __init__(self, syncback_id, process_number, total_processes, poll_interval=1, retry_interval=30, num_workers=NUM_PARALLEL_ACCOUNTS, batch_size=10): self.process_number = process_number self.total_processes = total_processes self.poll_interval = poll_interval self.retry_interval = retry_interval self.batch_size = batch_size self.keep_running = True self.workers = gevent.pool.Group() # Dictionary account_id -> semaphore to serialize action syncback for # any particular account. # TODO(emfree): We really only need to serialize actions that operate # on any given object. But IMAP actions are already effectively # serialized by using an IMAP connection pool of size 1, so it doesn't # matter too much. self.account_semaphores = defaultdict(lambda: BoundedSemaphore(1)) # This SyncbackService performs syncback for only and all the accounts # on shards it is reponsible for; shards are divided up between # running SyncbackServices. self.log = logger.new(component='syncback') syncback_assignments = config.get("SYNCBACK_ASSIGNMENTS", {}) if syncback_id in syncback_assignments: self.keys = [key for key in engine_manager.engines if key in syncback_assignments[syncback_id] and key % total_processes == process_number] else: self.log.warn("No shards assigned to syncback server", syncback_id=syncback_id) self.keys = [] self.log = logger.new(component='syncback') self.num_workers = num_workers self.num_idle_workers = 0 self.worker_did_finish = gevent.event.Event() self.worker_did_finish.clear() self.task_queue = Queue() self.running_action_ids = set() gevent.Greenlet.__init__(self)
Example #30
Source File: crispin.py From sync-engine with GNU Affero General Public License v3.0 | 5 votes |
def __init__(self, account_id, num_connections, readonly): log.info('Creating Crispin connection pool', account_id=account_id, num_connections=num_connections) self.account_id = account_id self.readonly = readonly self._queue = Queue(num_connections, items=num_connections * [None]) self._sem = BoundedSemaphore(num_connections) self._set_account_info()