Python aiohttp.client_exceptions() Examples

The following are 4 code examples of aiohttp.client_exceptions(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module aiohttp , or try the search function .
Example #1
Source File: animal.py    From yui with GNU Affero General Public License v3.0 6 votes vote down vote up
def get_cat_image_url(timeout: float) -> str:
    api_url = 'http://thecatapi.com/api/images/get'
    async with aiohttp.ClientSession() as session:
        while True:
            try:
                async with session.get(
                    api_url, params={'format': 'xml', 'type': 'jpg,png'}
                ) as res:
                    if res.status != 200:
                        raise APIServerError
                    xml_result = await res.read()
                    tree = etree.fromstring(xml_result)
                    url = tree.find('data/images/image/url').text
            except aiohttp.client_exceptions.ServerDisconnectedError:
                await asyncio.sleep(0.1)
                continue
            try:
                async with async_timeout.timeout(timeout=timeout):
                    async with session.get(url) as res:
                        async with res:
                            if res.status == 200:
                                return url
            except (aiohttp.ClientConnectorError, asyncio.TimeoutError):
                continue 
Example #2
Source File: animal.py    From yui with GNU Affero General Public License v3.0 6 votes vote down vote up
def get_dog_image_url(timeout: float) -> str:
    api_url = 'https://dog.ceo/api/breeds/image/random'
    async with aiohttp.ClientSession() as session:
        while True:
            try:
                async with session.get(api_url) as res:
                    if res.status != 200:
                        raise APIServerError
                    data = await res.json(loads=json.loads)
                    url = data['message']
            except aiohttp.client_exceptions.ServerDisconnectedError:
                await asyncio.sleep(0.1)
                continue
            try:
                async with async_timeout.timeout(timeout=timeout):
                    async with session.get(url) as res:
                        async with res:
                            if res.status == 200:
                                return url
            except (aiohttp.ClientConnectorError, asyncio.TimeoutError):
                continue 
Example #3
Source File: commands.py    From yui with GNU Affero General Public License v3.0 5 votes vote down vote up
def add(self, bot, event: Message, sess, url: str):
        async with aiohttp.ClientSession() as session:
            try:
                async with session.get(url) as res:
                    data: bytes = await res.read()
            except aiohttp.client_exceptions.InvalidURL:
                await bot.say(event.channel, f'`{url}`은 올바른 URL이 아니에요!')
                return
            except aiohttp.client_exceptions.ClientConnectorError:
                await bot.say(event.channel, f'`{url}`에 접속할 수 없어요!')
                return

        if not data:
            await bot.say(event.channel, f'`{url}`은 빈 웹페이지에요!')
            return

        f = feedparser.parse(data)

        if f.bozo != 0:
            await bot.say(event.channel, f'`{url}`은 올바른 RSS 문서가 아니에요!')
            return

        feed = RSSFeedURL()
        feed.channel = event.channel.id
        feed.url = url
        feed.updated_at = max(
            [
                dateutil.parser.parse(entry.published).astimezone(UTC)
                for entry in f.entries
            ]
        )

        with sess.begin():
            sess.add(feed)

        await bot.say(
            event.channel, f'<#{event.channel.id}> 채널에서 `{url}`을 구독하기 시작했어요!'
        ) 
Example #4
Source File: test_rproxy.py    From dffml with MIT License 4 votes vote down vote up
def handler_proxy(self, req):
        headers = req.headers.copy()
        self.logger.debug("headers: %s", headers)

        subdomain = self.subdomain(headers)
        path = req.path
        self.logger.debug("subdomain: %r, path: %r", subdomain, path)

        upstream = self.get_upstream(subdomain, path)
        if not upstream:
            return web.Response(status=HTTPStatus.NOT_FOUND)

        if (
            headers.get("connection", "").lower() == "upgrade"
            and headers.get("upgrade", "").lower() == "websocket"
            and req.method == "GET"
        ):
            # Handle websocket proxy
            try:
                async with aiohttp.ClientSession(
                    cookies=req.cookies
                ) as client_session:
                    async with client_session.ws_connect(
                        upstream
                    ) as ws_client:
                        ws_server = web.WebSocketResponse()
                        await ws_server.prepare(req)
                        self.loop.create_task(
                            asyncio.wait(
                                [
                                    self.wsforward(ws_server, ws_client),
                                    self.wsforward(ws_client, ws_server),
                                ],
                                return_when=asyncio.FIRST_COMPLETED,
                            )
                        )
                        return ws_server
            except aiohttp.client_exceptions.WSServerHandshakeError:
                return web.Response(status=HTTPStatus.NOT_FOUND)
        else:
            # Handle regular HTTP request proxy
            self.logger.debug(
                "upstream for (%r): %s", upstream, (subdomain, path)
            )
            async with client.request(
                req.method,
                upstream,
                headers=headers,
                allow_redirects=False,
                data=await req.read(),
            ) as res:
                self.logger.debug(
                    "upstream url(%s) status: %d", upstream, res.status
                )
                return web.Response(
                    headers=res.headers,
                    status=res.status,
                    body=await res.read(),
                )
            return ws_server