Python gevent.pool.Pool() Examples

The following are 30 code examples of gevent.pool.Pool(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module gevent.pool , or try the search function .
Example #1
Source File: cli.py    From mailur with GNU General Public License v3.0 7 votes vote down vote up
def web():
    from gevent.subprocess import run
    from gevent.pool import Pool

    def api():
        run('bin/run-web', shell=True)

    def webpack():
        run('command -v yarn && yarn run dev || npm run dev', shell=True)

    try:
        pool = Pool()
        pool.spawn(api)
        pool.spawn(webpack)
        pool.join()
    except KeyboardInterrupt:
        time.sleep(1) 
Example #2
Source File: nike_register_gui.py    From nike_purchase_system with GNU General Public License v3.0 6 votes vote down vote up
def register(self):
        f = open('fail.txt', 'w')
        f.close()
        f = open('success.txt', 'w')
        f.close()
        p = Pool(200)
        f = open('邮箱.txt', 'w+')
        # emails = f.read().strip().split('\n')
        emails = list(map(lambda x: x.split('----')[0], f.read().strip().split('\n')))
        if emails and emails[0] == "":
            return
        # emails = ['nkzhuanyong' + str(i) + '@sina.com' for i in range(200)]
        passw = 'Aa123456'
        for i in emails:
            p.apply_async(self.reg, args=(i, passw))
        p.join()
        self.signalStatus.emit('完成') 
Example #3
Source File: subDomainsBrute.py    From ZeroScan with MIT License 6 votes vote down vote up
def _load_dns_servers(self):
        print '[+] Validate DNS servers ...'
        self.dns_servers = []
        pool = Pool(30)
        for server in open('dict/dns_servers.txt').xreadlines():
            server = server.strip()
            if server:
                pool.apply_async(self._test_server, (server,))
        pool.join()

        self.dns_count = len(self.dns_servers)
        sys.stdout.write('\n')
        print '[+] Found %s available DNS Servers in total' % self.dns_count
        if self.dns_count == 0:
            print '[ERROR] No DNS Servers available.'
            sys.exit(-1) 
Example #4
Source File: crawl.py    From girlfriend with MIT License 6 votes vote down vote up
def _concurrent_execute(self, context, start_req, parser, pool, pool_size):
        queue = Queue()  # 任务队列

        # 将初始化请求加入任务队列
        for r in start_req:
            queue.put_nowait(r)

        if pool is None:
            pool = GeventPool(pool_size)

        greenlets = []

        while True:
            try:
                req = self._check_req(queue.get(timeout=1))
                if req.parser is None:
                    req.parser = parser
                greenlets.append(pool.spawn(req, context, queue))
            except Empty:
                break

        return [greenlet.get() for greenlet in greenlets] 
Example #5
Source File: importer.py    From cdm with Apache License 2.0 6 votes vote down vote up
def load(self, table):
        cache = {}

        def save(row):
            (query, values) = self.get_insert(row, table)
            try:
                prepared = cache[query]
            except:
                prepared = self.session.prepare(query)
                cache[query] = prepared
            bound = prepared.bind(values)
            self.session.execute(bound)

        pool = Pool(100)
        i = 0
        print "Loading {}".format(table)
        with ProgressBar(max_value=len(self.dataframe)) as p:
            for _ in pool.imap_unordered(save, self.iter()):
                i += 1
                if i % 10 == 0:
                    p.update(i) 
Example #6
Source File: bulk_render.py    From arxiv-vanity with Apache License 2.0 6 votes vote down vote up
def run(self, id_filename):
        s3 = S3Boto3Storage().connection
        obj = s3.Object(self.output_bucket, id_filename)
        arxiv_id_str = obj.get()["Body"].read().decode("utf-8")
        arxiv_ids = [s.strip() for s in arxiv_id_str.split() if s.strip()]

        # We can't access database inside our gevent pool because of max
        # connections, so first figure out which IDs we actually want to
        # render.
        arxiv_ids, source_paths = self.filter_unrenderable_ids(arxiv_ids)

        # Stagger the starting of jobs a bit so we don't break Hyper.sh
        def slow_arxiv_ids():
            for arxiv_id in arxiv_ids:
                yield arxiv_id
                time.sleep(0.1)

        pool = Pool(self.concurrency)
        manifest = pool.imap_unordered(self.render, slow_arxiv_ids(), source_paths)
        # Failed renders are None
        manifest = (obj for obj in manifest if obj)
        # Read the iterator, starting the actual processing
        manifest = list(manifest)
        self.write_manifest(manifest) 
Example #7
Source File: imap.py    From mailur with GNU General Public License v3.0 6 votes vote down vote up
def call_async(self, fn, *args):
        if not self.batches:
            return self.call(fn, *args)

        def get_exceptions():
            return [j.exception for j in jobs if j.exception]

        jobs = []
        pool = Pool(self.threads)
        for f in self._call(fn, *args):
            if pool.wait_available():
                if get_exceptions():
                    break
                jobs.append(pool.spawn(f))
        pool.join()

        exceptions = get_exceptions()
        if exceptions:
            raise ValueError('Exception in the pool: %s' % exceptions)
        return (f.value for f in jobs) 
Example #8
Source File: imap.py    From mailur with GNU General Public License v3.0 6 votes vote down vote up
def multiappend(con, box, msgs, *, batch=None, threads=10):
    if not msgs:
        return

    if batch and len(msgs) > batch:
        def multiappend_inner(num, few):
            with con.new() as c:
                res = multiappend(c, box, few)
                log.debug('#%s multiappend %s messages', num, len(few))
                return res

        pool = Pool(threads)
        jobs = [
            pool.spawn(multiappend_inner, num, msgs[i:i+batch])
            for num, i in enumerate(range(0, len(msgs), batch))
        ]
        pool.join(raise_error=True)
        return ','.join(j.value for j in jobs)

    with con.lock:
        return _multiappend(con, box, msgs) 
Example #9
Source File: engine.py    From NoXss with MIT License 6 votes vote down vote up
def deduplicate(self, url_list):
        print 'Start to deduplicate for all urls.'
        filtered_path = self.file + '.filtered'
        if os.path.exists(filtered_path):
            print '%s has been filtered as %s.' % (self.file, filtered_path)
            with open(filtered_path)as f:
                filtered = f.read().split('\n')
                return filtered
        filtered = []
        # result = map(filter, url_list)
        from multiprocessing import cpu_count
        from multiprocessing.pool import Pool
        p=Pool(cpu_count())
        result=p.map(url_filter,url_list)
        for i in result:
            if isinstance(i, str):
                filtered.append(i)
        with open(filtered_path, 'w') as f:
            f.write('\n'.join(filtered))
        print 'Saved filtered urls to %s.' % filtered_path
        return filtered 
Example #10
Source File: engine.py    From NoXss with MIT License 6 votes vote down vote up
def verify_async(case_list,coroutine):
        """
        Verify used gevent lib
        :param case_list:
        :param coroutine:
        :return:
        """
        from gevent import monkey
        monkey.patch_all()
        result = []
        geventPool = pool.Pool(coroutine)
        tasks = [geventPool.spawn(Verify.request_and_verify, case) for case in case_list]
        gevent.joinall(tasks)
        for i in tasks:
            if i.value is not None:
                result.append(i.value)
        print_info('Total Verify-Case is: %s, %s error happened.' % (len(case_list), Verify.ERROR_COUNT))
        return result 
Example #11
Source File: baseserver.py    From PhonePi_SampleServer with MIT License 6 votes vote down vote up
def set_spawn(self, spawn):
        if spawn == 'default':
            self.pool = None
            self._spawn = self._spawn
        elif hasattr(spawn, 'spawn'):
            self.pool = spawn
            self._spawn = spawn.spawn
        elif isinstance(spawn, integer_types):
            from gevent.pool import Pool
            self.pool = Pool(spawn)
            self._spawn = self.pool.spawn
        else:
            self.pool = None
            self._spawn = spawn
        if hasattr(self.pool, 'full'):
            self.full = self.pool.full
        if self.pool is not None:
            self.pool._semaphore.rawlink(self._start_accepting_if_started) 
Example #12
Source File: kostebek.py    From kostebek with GNU General Public License v3.0 6 votes vote down vote up
def getRootDomains(self):

		url = "http://www.iana.org/domains/root/db"
		soup = self.soup(url)
		link = soup.find('table', {'id': 'tld-table'})
		tlds = [anchor.text for anchor in link.find_all('a')]
		
		#Get RootDomains
		
		jobs = []
		links = []
		p = pool.Pool(20)

		for allRootDomains in tlds:
			https = "https://www."+self.org+allRootDomains
			http = "http://www."+self.org+allRootDomains
			#Check http(s) urls 
			l = [https,http]
			for url in l:
				jobs.append(p.spawn(self.getUrl, url))
		gevent.joinall(jobs) 
Example #13
Source File: get_main_movies_base_data.py    From videoSpider with MIT License 6 votes vote down vote up
def task(pool_number, types=types, tags_dict=tags_dict, sorts=sorts):
    video_douban_ids = set(get_video_douban_ids())
    global video_douban_ids

    pool = Pool(pool_number)

    for type in types:
        for tag in tags_dict[type]:
            for sort in sorts:
                pool.spawn(
                    create_requests_and_save_datas,
                    type=type,
                    tag=tag,
                    sort=sort
                )
    pool.join()

    return list(video_douban_ids) 
Example #14
Source File: subdomain-analyzer.py    From SubDomain-Analyzer with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, output, threads, append_sub_domains, sub_domain_list, socket_timeout=5):
        # Root Domain (From Main)
        self.root_domain = None

        if not path.exists(sub_domain_list): # Checks if the `subdomain` file exists
            raise Exception('Sub-domains list not found.')
        # Settings
        self.sub_domain_file_name = sub_domain_list
        self.sub_domain_list = set(self.__get_sub_domains_list())
        self.append_sub_domains = append_sub_domains
        self.logger = self.create_logger(output)
        self.__ip_pool = Pool(size=threads)
        self.__domain_pool = Pool(size=threads)

        # Set socket timeout
        socket.setdefaulttimeout(socket_timeout) 
Example #15
Source File: baseserver.py    From PokemonGo-DesktopMap with MIT License 6 votes vote down vote up
def set_spawn(self, spawn):
        if spawn == 'default':
            self.pool = None
            self._spawn = self._spawn
        elif hasattr(spawn, 'spawn'):
            self.pool = spawn
            self._spawn = spawn.spawn
        elif isinstance(spawn, integer_types):
            from gevent.pool import Pool
            self.pool = Pool(spawn)
            self._spawn = self.pool.spawn
        else:
            self.pool = None
            self._spawn = spawn
        if hasattr(self.pool, 'full'):
            self.full = self.pool.full
        if self.pool is not None:
            self.pool._semaphore.rawlink(self._start_accepting_if_started) 
Example #16
Source File: baseserver.py    From PokemonGo-DesktopMap with MIT License 6 votes vote down vote up
def set_spawn(self, spawn):
        if spawn == 'default':
            self.pool = None
            self._spawn = self._spawn
        elif hasattr(spawn, 'spawn'):
            self.pool = spawn
            self._spawn = spawn.spawn
        elif isinstance(spawn, integer_types):
            from gevent.pool import Pool
            self.pool = Pool(spawn)
            self._spawn = self.pool.spawn
        else:
            self.pool = None
            self._spawn = spawn
        if hasattr(self.pool, 'full'):
            self.full = self.pool.full
        if self.pool is not None:
            self.pool._semaphore.rawlink(self._start_accepting_if_started) 
Example #17
Source File: upload_qiniu.py    From videoSpider with MIT License 6 votes vote down vote up
def upload_qiniu_by_filenames(access_key, secret_key, bucket_name, key_prefix,
                              pool_number, path, filenames, delete=False):
    q = Auth(access_key, secret_key)
    mime_type = "text/plain"
    params = {'x:a': 'a'}

    pool = Pool(pool_number)

    for filename in filenames:
        localfile = filename
        key = os.path.join(key_prefix, localfile.replace(path, '')[1:])
        token = q.upload_token(bucket_name, key)

        pool.spawn(
            down,
            token=token,
            key=key,
            localfile=localfile,
            mime_type=mime_type,
            delete=delete
        )

    pool.join() 
Example #18
Source File: register.py    From nike_purchase_system with GNU General Public License v3.0 5 votes vote down vote up
def register():
    f = open('fail.txt', 'w')
    f.close()
    f = open('success.txt', 'w')
    f.close()
    p = Pool(1000)
    f = open('邮箱.txt', 'r')
    # emails = f.read().strip().split('\n')
    emails = list(map(lambda x: x.split('----')[0], f.read().strip().split('\n')))
    for i in emails:
        p.apply_async(reg, args=(i,))
    p.join()
    print('over!') 
Example #19
Source File: anyserver.py    From termite-visualizations with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def gevent(app, address, **options):
        options = options['options']
        workers = options.workers
        from gevent import pywsgi
        from gevent.pool import Pool
        pywsgi.WSGIServer(address, app, spawn=workers and Pool(
            int(options.workers)) or 'default', log=None).serve_forever() 
Example #20
Source File: ct-exposer.py    From ct-exposer with GNU General Public License v3.0 5 votes vote down vote up
def main(domain, masscanOutput, urlOutput):
    domainsFound = {}
    domainsNotFound = {}
    if (not masscanOutput and not urlOutput):
        print("[+]: Downloading domain list...")
    response = collectResponse(domain)
    if (not masscanOutput and not urlOutput):
        print("[+]: Download of domain list complete.")
    domains = collectDomains(response)
    if (not masscanOutput and not urlOutput):
        print("[+]: Parsed %s domain(s) from list." % len(domains))
    
    pool = Pool(15)
    greenlets = [pool.spawn(resolve, domain) for domain in domains]
    pool.join(timeout = 1)
    for greenlet in greenlets:
        result=greenlet.value
        if (result):
            for ip in result.values():
                if ip is not 'none':
                    domainsFound.update(result)
                else:
                    domainsNotFound.update(result)

    if (urlOutput):
        printUrls(sorted(domains))
    if (masscanOutput):
        printMasscan(domainsFound)
    if (not masscanOutput and not urlOutput):
        print("\n[+]: Domains found:")
        printDomains(domainsFound)
        print("\n[+]: Domains with no DNS record:")
        printDomains(domainsNotFound) 
Example #21
Source File: agent_server.py    From powerpool with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def start(self, *args, **kwargs):
        self.logger = self.server.logger
        self.listener = (self.config['address'],
                         self.config['port'] +
                         self.config['agent']['port_diff'] +
                         self.server.manager.config['server_number'])
        StreamServer.__init__(self, self.listener, spawn=Pool())
        self.logger.info("Agent server starting up on {}".format(self.listener))
        StreamServer.start(self, *args, **kwargs)
        Component.start(self) 
Example #22
Source File: gevent_extractor.py    From trains with Apache License 2.0 5 votes vote down vote up
def __init__(self, names, max_workers=222):
        super(self.__class__, self).__init__(names, max_workers)
        self._pool = Pool(self._max_workers)
        self._exited_greenlets = 0 
Example #23
Source File: subfinder_gevent.py    From subfinder with MIT License 5 votes vote down vote up
def _init_pool(self):
        self.pool = Pool(10) 
Example #24
Source File: sql.py    From rowboat with MIT License 5 votes vote down vote up
def command_recover(self, event, duration, pool=4, mode=None):
        if mode == 'global':
            channels = list(self.state.channels.values())
        else:
            channels = list(event.guild.channels.values())

        start_at = parse_duration(duration, negative=True)

        pool = Pool(pool)

        total = len(channels)
        msg = event.msg.reply('Recovery Status: 0/{}'.format(total))
        recoveries = []

        def updater():
            last = len(recoveries)

            while True:
                if last != len(recoveries):
                    last = len(recoveries)
                    msg.edit('Recovery Status: {}/{}'.format(len(recoveries), total))
                gevent.sleep(5)

        u = self.spawn(updater)

        try:
            for channel in channels:
                pool.wait_available()
                r = Recovery(self.log, channel, start_at)
                pool.spawn(r.run)
                recoveries.append(r)
        finally:
            pool.join()
            u.kill()

        msg.edit('RECOVERY COMPLETED ({} total messages)'.format(
            sum([i._recovered for i in recoveries])
        )) 
Example #25
Source File: get_animations_base_data.py    From videoSpider with MIT License 5 votes vote down vote up
def task(pool_number, pages=range(1, 100)):
    animation_bilibili_ids = set(get_animation_bilibili_ids())
    global animation_bilibili_ids

    pool = Pool(pool_number)

    for page in pages:
        pool.spawn(
            create_requests_and_save_datas,
            page=page
        )

    pool.join()

    return animation_bilibili_ids 
Example #26
Source File: utilities.py    From rowboat with MIT License 5 votes vote down vote up
def jumbo(self, event, emojis):
        urls = []

        for emoji in emojis.split(' ')[:5]:
            if EMOJI_RE.match(emoji):
                _, eid = EMOJI_RE.findall(emoji)[0]
                urls.append('https://discordapp.com/api/emojis/{}.png'.format(eid))
            else:
                urls.append(get_emoji_url(emoji))

        width, height, images = 0, 0, []

        for r in Pool(6).imap(requests.get, urls):
            try:
                r.raise_for_status()
            except requests.HTTPError:
                return

            img = Image.open(BytesIO(r.content))
            height = img.height if img.height > height else height
            width += img.width + 10
            images.append(img)

        image = Image.new('RGBA', (width, height))
        width_offset = 0
        for img in images:
            image.paste(img, (width_offset, 0))
            width_offset += img.width + 10

        combined = BytesIO()
        image.save(combined, 'png', quality=55)
        combined.seek(0)
        return event.msg.reply('', attachments=[('emoji.png', combined)]) 
Example #27
Source File: server.py    From ghttproxy with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def __init__(self, ip, port, app, log='default'):
        self.ip = ip
        self.port = port
        self.app = app
        self.server = WSGIServer((self.ip, self.port), log=log,
            application=self.app.application, spawn=Pool(500), handler_class=ProxyHandler) 
Example #28
Source File: upload_qiniu.py    From videoSpider with MIT License 5 votes vote down vote up
def upload_qiniu_by_path(access_key, secret_key, bucket_name, key_prefix,
                         pool_number, path, delete=False):
    q = Auth(access_key, secret_key)
    mime_type = "text/plain"
    params = {'x:a': 'a'}

    pool = Pool(pool_number)

    for dirpath, dirnames, filenames in os.walk(path):
        print(dirpath)
        if len(filenames) > 0:
            for filename in filenames:
                if filename.startswith('.'):
                    continue
                localfile = os.path.join(dirpath, filename)
                key = os.path.join(key_prefix, localfile.replace(path, '')[1:])
                token = q.upload_token(bucket_name, key)

                pool.spawn(
                    down,
                    token=token,
                    key=key,
                    localfile=localfile,
                    mime_type=mime_type,
                    delete=delete
                ) 
Example #29
Source File: hls_downloader.py    From echo360 with MIT License 5 votes vote down vote up
def __init__(self, pool_size, retry=3, selenium_cookies=None):
        self.pool = Pool(pool_size)
        self.session = self._get_http_session(pool_size, pool_size, retry, selenium_cookies)
        self.retry = retry
        self.dir = ''
        self.succed = {}
        self.failed = []
        self.ts_total = 0
        self._result_file_name = None 
Example #30
Source File: down_celebrity_images.py    From videoSpider with MIT License 5 votes vote down vote up
def task(douban_ids, pool_number):
    pool = Pool(pool_number)

    for douban_id in douban_ids:
        pool.spawn(
            create_requests_and_save_datas,
            douban_id=douban_id
        )

    pool.join()