Python aiohttp.AsyncResolver() Examples

The following are 8 code examples of aiohttp.AsyncResolver(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module aiohttp , or try the search function .
Example #1
Source File: async_reader.py    From rssant with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(
        self,
        session=None,
        user_agent=DEFAULT_USER_AGENT,
        request_timeout=30,
        max_content_length=10 * 1024 * 1024,
        allow_private_address=False,
        allow_non_webpage=False,
        rss_proxy_url=None,
        rss_proxy_token=None,
        dns_service: DNSService = DNS_SERVICE,
    ):
        self._close_session = session is None
        self.session = session
        self.resolver: aiohttp.AsyncResolver = None
        self.user_agent = user_agent() if callable(user_agent) else user_agent
        self.request_timeout = request_timeout
        self.max_content_length = max_content_length
        self.allow_private_address = allow_private_address
        self.allow_non_webpage = allow_non_webpage
        self.rss_proxy_url = rss_proxy_url
        self.rss_proxy_token = rss_proxy_token
        self.dns_service = dns_service 
Example #2
Source File: bot.py    From bot with MIT License 5 votes vote down vote up
def _recreate(self) -> None:
        """Re-create the connector, aiohttp session, the APIClient and the Redis session."""
        # Use asyncio for DNS resolution instead of threads so threads aren't spammed.
        # Doesn't seem to have any state with regards to being closed, so no need to worry?
        self._resolver = aiohttp.AsyncResolver()

        # Its __del__ does send a warning but it doesn't always show up for some reason.
        if self._connector and not self._connector._closed:
            log.warning(
                "The previous connector was not closed; it will remain open and be overwritten"
            )

        if self.redis_session and not self.redis_session.closed:
            log.warning(
                "The previous redis pool was not closed; it will remain open and be overwritten"
            )

        # Create the redis session
        self.loop.create_task(self._create_redis_session())

        # Use AF_INET as its socket family to prevent HTTPS related problems both locally
        # and in production.
        self._connector = aiohttp.TCPConnector(
            resolver=self._resolver,
            family=socket.AF_INET,
        )

        # Client.login() will call HTTPClient.static_login() which will create a session using
        # this connector attribute.
        self.http.connector = self._connector

        # Its __del__ does send a warning but it doesn't always show up for some reason.
        if self.http_session and not self.http_session.closed:
            log.warning(
                "The previous session was not closed; it will remain open and be overwritten"
            )

        self.http_session = aiohttp.ClientSession(connector=self._connector)
        self.api_client.recreate(force=True, connector=self._connector) 
Example #3
Source File: dirrec.py    From FinalRecon with MIT License 5 votes vote down vote up
def run(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext):
	global responses
	tasks = []
	if len(filext) == 0:
		url = target + '/{}'
		resolver = aiohttp.AsyncResolver(nameservers=[dserv])
		conn = aiohttp.TCPConnector(limit=threads, resolver=resolver, family=socket.AF_INET, verify_ssl=sslv)
		timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout)
		async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:
			with open(wdlist, 'r') as wordlist:
				for word in wordlist:
					word = word.strip()
					task = asyncio.create_task(fetch(url.format(word), session, redir, sslv))
					tasks.append(task)
					await asyncio.sleep(0)
			responses = await asyncio.gather(*tasks)
	else:
		filext = ',' + filext
		filext = filext.split(',')
		for ext in filext:
			ext = ext.strip()
			if len(ext) == 0:
				url = target + '/{}'
			else:
				url = target + '/{}.' + ext
			resolver = aiohttp.AsyncResolver(nameservers=[dserv])
			conn = aiohttp.TCPConnector(limit=threads, resolver=resolver, family=socket.AF_INET, verify_ssl=sslv)
			timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout)
			async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:
				with open(wdlist, 'r') as wordlist:
					for word in wordlist:
						word = word.strip()
						task = asyncio.create_task(fetch(url.format(word), session, redir, sslv))
						tasks.append(task)
						await asyncio.sleep(0)
				responses = await asyncio.gather(*tasks) 
Example #4
Source File: dirrec.py    From FinalRecon with MIT License 5 votes vote down vote up
def wayback(dserv, tout):
	global found
	print('\n' + Y + '[!]' + C + ' Requesting Wayback Machine...' + W + '\n')
	tasks = []
	resolver = aiohttp.AsyncResolver(nameservers=[dserv])
	conn = aiohttp.TCPConnector(limit=10)
	timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout)
	async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:
		for f_url in found:
			tasks.append(asyncio.create_task(wm_fetch(f_url, session)))
		await asyncio.gather(*tasks) 
Example #5
Source File: crawl.py    From browsertrix with Apache License 2.0 5 votes vote down vote up
def startup(self) -> None:
        """Initialize the crawler manager's redis connection and
        http session used to make requests to shepherd
        """
        self.loop = get_event_loop()
        self.redis = await init_redis(
            env('REDIS_URL', default=DEFAULT_REDIS_URL), self.loop
        )
        self.session = ClientSession(
            connector=TCPConnector(
                resolver=AsyncResolver(loop=self.loop), loop=self.loop
            ),
            json_serialize=partial(json.dumps, ensure_ascii=False),
            loop=self.loop,
        ) 
Example #6
Source File: helper.py    From rssant with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def aiohttp_client_session(*, timeout=None, resolver=None, **kwargs):
    """use aiodns and support number timeout"""
    if timeout is None:
        timeout = 30
    if isinstance(timeout, (int, float)):
        timeout = aiohttp.ClientTimeout(total=timeout)
    if resolver is None:
        resolver = aiohttp.AsyncResolver()
    # Fix: No route to host. https://github.com/saghul/aiodns/issues/22
    family = socket.AF_INET
    connector = aiohttp.TCPConnector(resolver=resolver, family=family)
    return aiohttp.ClientSession(connector=connector, timeout=timeout, **kwargs) 
Example #7
Source File: async_reader.py    From rssant with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _async_init(self):
        if self.resolver is None:
            loop = asyncio.get_event_loop()
            if self.dns_service is None:
                self.resolver = aiohttp.AsyncResolver(loop=loop)
            else:
                self.resolver = self.dns_service.aiohttp_resolver(loop=loop)
        if self.session is None:
            self.session = aiohttp_client_session(
                resolver=self.resolver, timeout=self.request_timeout) 
Example #8
Source File: bot.py    From seasonalbot with MIT License 5 votes vote down vote up
def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.http_session = ClientSession(
            connector=TCPConnector(resolver=AsyncResolver(), family=socket.AF_INET)
        )
        self._guild_available = asyncio.Event()

        self.loop.create_task(self.send_log("SeasonalBot", "Connected!"))