Python aiohttp.InvalidURL() Examples
The following are 15
code examples of aiohttp.InvalidURL().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
aiohttp
, or try the search function
.
Example #1
Source File: agent.py From rasa_core with Apache License 2.0 | 6 votes |
def _update_model_from_server(model_server: EndpointConfig, agent: 'Agent') -> None: """Load a zipped Rasa Core model from a URL and update the passed agent.""" if not is_url(model_server.url): raise aiohttp.InvalidURL(model_server.url) model_directory = tempfile.mkdtemp() new_model_fingerprint = await _pull_model_and_fingerprint( model_server, model_directory, agent.fingerprint) if new_model_fingerprint: _load_and_set_updated_model(agent, model_directory, new_model_fingerprint) else: logger.debug("No new model found at " "URL {}".format(model_server.url))
Example #2
Source File: setavatar.py From apex-sigma-core with GNU General Public License v3.0 | 6 votes |
def setavatar(cmd, pld): """ :param cmd: The command object referenced in the command. :type cmd: sigma.core.mechanics.command.SigmaCommand :param pld: The payload with execution data and details. :type pld: sigma.core.mechanics.payload.CommandPayload """ if pld.args or pld.msg.attachments: image_url = pld.msg.attachments[0].url if pld.msg.attachments else pld.args[0] try: try: async with aiohttp.ClientSession() as session: async with session.get(image_url) as image_response: img_data = await image_response.read() await cmd.bot.user.edit(avatar=img_data) response = ok('My avatar has been changed.') except aiohttp.InvalidURL: response = error('Invalid URL.') except discord.Forbidden: response = error('I was unable to change my avatar.') else: response = error('Give me a link or attach an image, please.') await pld.msg.channel.send(embed=response)
Example #3
Source File: agent.py From rasa-for-botfront with Apache License 2.0 | 6 votes |
def _update_model_from_server( model_server: EndpointConfig, agent: "Agent" ) -> None: """Load a zipped Rasa Core model from a URL and update the passed agent.""" if not is_url(model_server.url): raise aiohttp.InvalidURL(model_server.url) model_directory_and_fingerprint = await _pull_model_and_fingerprint( model_server, agent.fingerprint ) if model_directory_and_fingerprint: model_directory, new_model_fingerprint = model_directory_and_fingerprint _load_and_set_updated_model(agent, model_directory, new_model_fingerprint) else: logger.debug(f"No new model found at URL {model_server.url}")
Example #4
Source File: test_clients.py From uplink with MIT License | 5 votes |
def test_exceptions(self): import requests exceptions = requests_.RequestsClient.exceptions with pytest.raises(exceptions.BaseClientException): raise requests.RequestException() with pytest.raises(exceptions.BaseClientException): # Test polymorphism raise requests.exceptions.InvalidURL() with pytest.raises(exceptions.ConnectionError): raise requests.exceptions.ConnectionError() with pytest.raises(exceptions.ConnectionTimeout): raise requests.exceptions.ConnectTimeout() with pytest.raises(exceptions.ServerTimeout): raise requests.exceptions.ReadTimeout() with pytest.raises(exceptions.SSLError): raise requests.exceptions.SSLError() with pytest.raises(exceptions.InvalidURL): raise requests.exceptions.InvalidURL()
Example #5
Source File: test_clients.py From uplink with MIT License | 5 votes |
def test_exceptions(self): import aiohttp exceptions = aiohttp_.AiohttpClient.exceptions with pytest.raises(exceptions.BaseClientException): raise aiohttp.ClientError() with pytest.raises(exceptions.BaseClientException): # Test polymorphism raise aiohttp.InvalidURL("invalid") with pytest.raises(exceptions.ConnectionError): raise aiohttp.ClientConnectionError() with pytest.raises(exceptions.ConnectionTimeout): raise aiohttp.ClientConnectorError.__new__( aiohttp.ClientConnectorError ) with pytest.raises(exceptions.ServerTimeout): raise aiohttp.ServerTimeoutError() with pytest.raises(exceptions.SSLError): raise aiohttp.ClientSSLError.__new__(aiohttp.ClientSSLError) with pytest.raises(exceptions.InvalidURL): raise aiohttp.InvalidURL("invalid")
Example #6
Source File: utils.py From rasa_core with Apache License 2.0 | 5 votes |
def download_file_from_url(url: Text) -> Text: """Download a story file from a url and persists it into a temp file. Returns the file path of the temp file that contains the downloaded content.""" from rasa_nlu import utils as nlu_utils if not nlu_utils.is_url(url): raise InvalidURL(url) async with aiohttp.ClientSession() as session: async with session.get(url, raise_for_status=True) as resp: filename = nlu_utils.create_temporary_file(await resp.read(), mode="w+b") return filename
Example #7
Source File: urlscraper.py From cjworkbench with GNU Affero General Public License v3.0 | 5 votes |
def async_get_url(row, url): """ Return a Future (row, status, text). The Future will resolve within settings.SCRAPER_TIMEOUT seconds. `status` may be ' The Future will resolve within settings.SCRAPER_TIMEOUT seconds. The exception may be `asyncio.TimeoutError`, `ValueError` (invalid URL) or `aiohttp.client_exceptions.ClientError`. """ session = aiohttp.ClientSession() try: # aiohttp internally performs URL canonization before sending # request. DISABLE THIS: it breaks oauth and user's expectations. # # https://github.com/aio-libs/aiohttp/issues/3424 url = yarl.URL(url, encoded=True) # prevent magic response = await session.get(url, timeout=settings.SCRAPER_TIMEOUT) # We have the header. Now read the content. # response.text() times out according to SCRAPER_TIMEOUT above. See # https://docs.aiohttp.org/en/stable/client_quickstart.html#timeouts text = await response.text() return (row, str(response.status), text) except asyncio.TimeoutError: return (row, "Timed out", "") except aiohttp.InvalidURL: return (row, "Invalid URL", "") except aiohttp.ClientError as err: return (row, f"Can't connect: {err}", "") except asyncio.CancelledError: raise except Exception as err: return (row, f"Unknown error: {err}", "") # Asynchronously scrape many urls, and store the results in the table
Example #8
Source File: test_scrapetable.py From cjworkbench with GNU Affero General Public License v3.0 | 5 votes |
def test_relative_url_raises_invalid_url(self): async def inner(): async with scrapetable.spooled_data_from_url("/foo"): pass with self.assertRaises(aiohttp.InvalidURL): asyncio.run(inner())
Example #9
Source File: test_scrapetable.py From cjworkbench with GNU Affero General Public License v3.0 | 5 votes |
def test_schemaless_url_raises_invalid_url(self): async def inner(): async with scrapetable.spooled_data_from_url("//a/b"): pass with self.assertRaises(aiohttp.InvalidURL): asyncio.run(inner())
Example #10
Source File: test_scrapetable.py From cjworkbench with GNU Affero General Public License v3.0 | 5 votes |
def test_mailto_url_raises_invalid_url(self): async def inner(): async with scrapetable.spooled_data_from_url("mailto:user@example.org"): pass with self.assertRaises(aiohttp.InvalidURL): asyncio.run(inner())
Example #11
Source File: utils.py From rasa-for-botfront with Apache License 2.0 | 5 votes |
def download_file_from_url(url: Text) -> Text: """Download a story file from a url and persists it into a temp file. Returns the file path of the temp file that contains the downloaded content.""" from rasa.nlu import utils as nlu_utils if not nlu_utils.is_url(url): raise InvalidURL(url) async with aiohttp.ClientSession() as session: async with session.get(url, raise_for_status=True) as resp: filename = io_utils.create_temporary_file(await resp.read(), mode="w+b") return filename
Example #12
Source File: async_reader.py From rssant with BSD 3-Clause "New" or "Revised" License | 4 votes |
def read(self, url, *args, use_proxy=False, **kwargs) -> FeedResponse: headers = content = None try: if use_proxy: headers, content, url, status = await self._read_by_proxy(url, *args, **kwargs) else: headers, content, url, status = await self._read(url, *args, **kwargs) except (socket.gaierror, aiodns.error.DNSError): status = FeedResponseStatus.DNS_ERROR.value except (socket.timeout, TimeoutError, aiohttp.ServerTimeoutError, asyncio.TimeoutError, concurrent.futures.TimeoutError): status = FeedResponseStatus.CONNECTION_TIMEOUT.value except (ssl.SSLError, ssl.CertificateError, aiohttp.ServerFingerprintMismatch, aiohttp.ClientSSLError, aiohttp.ClientConnectorSSLError, aiohttp.ClientConnectorCertificateError): status = FeedResponseStatus.SSL_ERROR.value except (aiohttp.ClientProxyConnectionError, aiohttp.ClientHttpProxyError): status = FeedResponseStatus.PROXY_ERROR.value except (ConnectionError, aiohttp.ServerDisconnectedError, aiohttp.ServerConnectionError, aiohttp.ClientConnectionError, aiohttp.ClientConnectorError): status = FeedResponseStatus.CONNECTION_RESET.value except (aiohttp.WSServerHandshakeError, aiohttp.ClientOSError): status = FeedResponseStatus.CONNECTION_ERROR.value except aiohttp.ClientPayloadError: status = FeedResponseStatus.CHUNKED_ENCODING_ERROR.value except UnicodeDecodeError: status = FeedResponseStatus.CONTENT_DECODING_ERROR.value except FeedReaderError as ex: status = ex.status LOG.warning(type(ex).__name__ + " url=%s %s", url, ex) except (aiohttp.ClientResponseError, aiohttp.ContentTypeError) as ex: status = ex.status except (aiohttp.ClientError, aiohttp.InvalidURL): status = FeedResponseStatus.UNKNOWN_ERROR.value builder = FeedResponseBuilder(use_proxy=use_proxy) builder.url(url) builder.status(status) builder.content(content) builder.headers(headers) return builder.build()
Example #13
Source File: device.py From python-songpal with GNU General Public License v3.0 | 4 votes |
def create_post_request(self, method: str, params: Dict = None): """Call the given method over POST. :param method: Name of the method :param params: dict of parameters :return: JSON object """ if params is None: params = {} headers = {"Content-Type": "application/json"} payload = { "method": method, "params": [params], "id": next(self.idgen), "version": "1.0", } if self.debug > 1: _LOGGER.debug("> POST %s with body: %s", self.guide_endpoint, payload) try: async with aiohttp.ClientSession(headers=headers) as session: res = await session.post( self.guide_endpoint, json=payload, headers=headers ) if self.debug > 1: _LOGGER.debug("Received %s: %s" % (res.status, res.text)) if res.status != 200: res_json = await res.json(content_type=None) raise SongpalException( "Got a non-ok (status %s) response for %s" % (res.status, method), error=res_json.get("error"), ) res_json = await res.json(content_type=None) except (aiohttp.InvalidURL, aiohttp.ClientConnectionError) as ex: raise SongpalException("Unable to do POST request: %s" % ex) from ex if "error" in res_json: raise SongpalException( "Got an error for %s" % method, error=res_json["error"] ) if self.debug > 1: _LOGGER.debug("Got %s: %s", method, pf(res_json)) return res_json
Example #14
Source File: scrapetable.py From cjworkbench with GNU Affero General Public License v3.0 | 4 votes |
def spooled_data_from_url( url: str, headers: Dict[str, str] = {}, timeout: aiohttp.ClientTimeout = None, *, ssl: Optional[ssl.SSLContext] = None, ): """ Download `url` to a tempfile and yield `(bytesio, headers, charset)`. `bytesio` is backed by a temporary file: the file at path `bytesio.name` will exist within this context. Raise aiohttp.ClientError on generic error. Subclasses of note: * aiohttp.InvalidURL on invalid URL * aiohttp.ClientResponseError when HTTP status is not 200 * aiohttp.ClientPayloadError when server closes connection prematurely * aiohttp.ClientConnectionError (OSError) when connection fails Raise asyncio.TimeoutError when `timeout` seconds have expired. """ # aiohttp internally performs URL canonization before sending # request. DISABLE THIS: it breaks oauth and user's expectations. # # https://github.com/aio-libs/aiohttp/issues/3424 url = yarl.URL(url, encoded=True) # prevent magic if url.scheme not in ("http", "https"): raise aiohttp.InvalidURL("URL must start with http:// or https://") with tempfile_context(prefix="loadurl") as spool_path: async with aiohttp.ClientSession() as session: # raise aiohttp.ClientError, asyncio.TimeoutError async with session.get( url, headers=headers, timeout=timeout, ssl=ssl ) as response: # raise aiohttp.ClientResponseError response.raise_for_status() headers = response.headers charset = response.charset with spool_path.open("wb") as spool: # raise aiohttp.ClientPayloadError async for blob in response.content.iter_chunked(_ChunkSize): spool.write(blob) yield spool_path.open("rb"), headers, charset # dependency-injection, so unit tests can mock our functions
Example #15
Source File: test_urlscraper.py From cjworkbench with GNU Affero General Public License v3.0 | 4 votes |
def scraper_result_test(self, results, response_times): async def session_get(url, *, timeout=None): url = str(url) # undo yarl un-magick-ing # Silly mock HTTP GET computes the test's input based on its # expected output. This defeats the purpose of a test. row = results[results["url"] == url] if row.empty: raise ValueError("called with URL we did not expect") index = row.index[0] delay = response_times[index] await asyncio.sleep(delay) status = row.at[index, "status"] text = row.at[index, "html"] if status == "Timed out": raise asyncio.TimeoutError elif status == "Invalid URL": raise aiohttp.InvalidURL(url) elif status == "Can't connect: blah": raise aiohttp.client_exceptions.ClientConnectionError("blah") else: return MockResponse(int(status), text) with patch("aiohttp.ClientSession") as session: urls = results["url"].tolist() session_mock = session.return_value session_mock.get.side_effect = session_get # mock the output table format scraper expects out_table = pd.DataFrame( data={"url": urls, "status": ""}, columns=["url", "status", "html"] ) event_loop = asyncio.get_event_loop() event_loop.run_until_complete(urlscraper.scrape_urls(urls, out_table)) assert_frame_equal( out_table[["url", "status", "html"]], results[["url", "status", "html"]] ) # ensure aiohttp.get() called with the right sequence of urls # str() to un-magick the yarl.URL() magic call_urls = [ str(args[0]) for name, args, kwargs in session_mock.get.mock_calls ] self.assertEqual(set(call_urls), set(urls)) # basic tests, number of urls smaller than max simultaneous connections