Python aiohttp.ClientOSError() Examples
The following are 6
code examples of aiohttp.ClientOSError().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
aiohttp
, or try the search function
.
Example #1
Source File: providers.py From ProxyBroker with Apache License 2.0 | 7 votes |
def _get(self, url, data=None, headers=None, method='GET'): page = '' try: timeout = aiohttp.ClientTimeout(total=self._timeout) async with self._sem_provider, self._session.request( method, url, data=data, headers=headers, timeout=timeout ) as resp: page = await resp.text() if resp.status != 200: log.debug( 'url: %s\nheaders: %s\ncookies: %s\npage:\n%s' % (url, resp.headers, resp.cookies, page) ) raise BadStatusError('Status: %s' % resp.status) except ( UnicodeDecodeError, BadStatusError, asyncio.TimeoutError, aiohttp.ClientOSError, aiohttp.ClientResponseError, aiohttp.ServerDisconnectedError, ) as e: page = '' log.debug('%s is failed. Error: %r;' % (url, e)) return page
Example #2
Source File: test_fetching.py From atomicpuppy with MIT License | 5 votes |
def fail_with_client_error(): raise aiohttp.ClientOSError("Darn it, can't connect")
Example #3
Source File: test_aio.py From prometheus-async with Apache License 2.0 | 4 votes |
def test_integration(self, deregister): """ Integration test with a real consul agent. Start a service, register it, close it, verify it's deregistered. """ tags = ("foo", "bar") service_id = str(uuid.uuid4()) # allow for parallel tests con = _LocalConsulAgentClient(token=None) ca = ConsulAgent( name="test-metrics", service_id=service_id, tags=tags, deregister=deregister, ) try: server = await aio.web.start_http_server( addr="127.0.0.1", service_discovery=ca ) except aiohttp.ClientOSError: pytest.skip("Missing consul agent.") svc = (await con.get_services())[service_id] assert "test-metrics" == svc["Service"] assert sorted(tags) == sorted(svc["Tags"]) assert server.socket.addr == svc["Address"] assert server.socket.port == svc["Port"] await server.close() services = await con.get_services() if deregister: # Assert service is gone iff we are supposed to deregister. assert service_id not in services else: assert service_id in services # Clean up behind ourselves. resp = await con.deregister_service(service_id) assert 200 == resp.status
Example #4
Source File: async_reader.py From rssant with BSD 3-Clause "New" or "Revised" License | 4 votes |
def read(self, url, *args, use_proxy=False, **kwargs) -> FeedResponse: headers = content = None try: if use_proxy: headers, content, url, status = await self._read_by_proxy(url, *args, **kwargs) else: headers, content, url, status = await self._read(url, *args, **kwargs) except (socket.gaierror, aiodns.error.DNSError): status = FeedResponseStatus.DNS_ERROR.value except (socket.timeout, TimeoutError, aiohttp.ServerTimeoutError, asyncio.TimeoutError, concurrent.futures.TimeoutError): status = FeedResponseStatus.CONNECTION_TIMEOUT.value except (ssl.SSLError, ssl.CertificateError, aiohttp.ServerFingerprintMismatch, aiohttp.ClientSSLError, aiohttp.ClientConnectorSSLError, aiohttp.ClientConnectorCertificateError): status = FeedResponseStatus.SSL_ERROR.value except (aiohttp.ClientProxyConnectionError, aiohttp.ClientHttpProxyError): status = FeedResponseStatus.PROXY_ERROR.value except (ConnectionError, aiohttp.ServerDisconnectedError, aiohttp.ServerConnectionError, aiohttp.ClientConnectionError, aiohttp.ClientConnectorError): status = FeedResponseStatus.CONNECTION_RESET.value except (aiohttp.WSServerHandshakeError, aiohttp.ClientOSError): status = FeedResponseStatus.CONNECTION_ERROR.value except aiohttp.ClientPayloadError: status = FeedResponseStatus.CHUNKED_ENCODING_ERROR.value except UnicodeDecodeError: status = FeedResponseStatus.CONTENT_DECODING_ERROR.value except FeedReaderError as ex: status = ex.status LOG.warning(type(ex).__name__ + " url=%s %s", url, ex) except (aiohttp.ClientResponseError, aiohttp.ContentTypeError) as ex: status = ex.status except (aiohttp.ClientError, aiohttp.InvalidURL): status = FeedResponseStatus.UNKNOWN_ERROR.value builder = FeedResponseBuilder(use_proxy=use_proxy) builder.url(url) builder.status(status) builder.content(content) builder.headers(headers) return builder.build()
Example #5
Source File: judge.py From ProxyBroker with Apache License 2.0 | 4 votes |
def check(self, real_ext_ip): # TODO: need refactoring try: self.ip = await self._resolver.resolve(self.host) except ResolveError: return if self.scheme == 'SMTP': self.is_working = True self.available[self.scheme].append(self) self.ev[self.scheme].set() return page = False headers, rv = get_headers(rv=True) connector = aiohttp.TCPConnector( loop=self._loop, ssl=self.verify_ssl, force_close=True ) try: timeout = aiohttp.ClientTimeout(total=self.timeout) async with aiohttp.ClientSession( connector=connector, timeout=timeout, loop=self._loop ) as session, session.get( url=self.url, headers=headers, allow_redirects=False ) as resp: page = await resp.text() except ( asyncio.TimeoutError, aiohttp.ClientOSError, aiohttp.ClientResponseError, aiohttp.ServerDisconnectedError, ) as e: log.debug('%s is failed. Error: %r;' % (self, e)) return page = page.lower() if resp.status == 200 and real_ext_ip in page and rv in page: self.marks['via'] = page.count('via') self.marks['proxy'] = page.count('proxy') self.is_working = True self.available[self.scheme].append(self) self.ev[self.scheme].set() log.debug('%s is verified' % self) else: log.debug( ( '{j} is failed. HTTP status code: {code}; ' 'Real IP on page: {ip}; Version: {word}; ' 'Response: {page}' ).format( j=self, code=resp.status, page=page, ip=(real_ext_ip in page), word=(rv in page), ) )
Example #6
Source File: core.py From Axeman with MIT License | 4 votes |
def retrieve_certificates(loop, url=None, ctl_offset=0, output_directory='/tmp/', concurrency_count=DOWNLOAD_CONCURRENCY): async with aiohttp.ClientSession(loop=loop, conn_timeout=10) as session: ctl_logs = await certlib.retrieve_all_ctls(session) if url: url = url.strip("'") for log in ctl_logs: if url and url not in log['url']: continue work_deque = deque() download_results_queue = asyncio.Queue(maxsize=MAX_QUEUE_SIZE) logging.info("Downloading certificates for {}".format(log['description'])) try: log_info = await certlib.retrieve_log_info(log, session) except (aiohttp.ClientConnectorError, aiohttp.ServerTimeoutError, aiohttp.ClientOSError, aiohttp.ClientResponseError) as e: logging.error("Failed to connect to CTL! -> {} - skipping.".format(e)) continue try: await certlib.populate_work(work_deque, log_info, start=ctl_offset) except Exception as e: logging.error("Log needs no update - {}".format(e)) continue download_tasks = asyncio.gather(*[ download_worker(session, log_info, work_deque, download_results_queue) for _ in range(concurrency_count) ]) processing_task = asyncio.ensure_future(processing_coro(download_results_queue, output_dir=output_directory)) queue_monitor_task = asyncio.ensure_future(queue_monitor(log_info, work_deque, download_results_queue)) asyncio.ensure_future(download_tasks) await download_tasks await download_results_queue.put(None) # Downloads are done, processing can stop await processing_task queue_monitor_task.cancel() logging.info("Completed {}, stored at {}!".format( log_info['description'], '/tmp/{}.csv'.format(log_info['url'].replace('/', '_')) )) logging.info("Finished downloading and processing {}".format(log_info['url']))