Python urllib.request.getproxies() Examples
The following are 14
code examples of urllib.request.getproxies().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
urllib.request
, or try the search function
.
Example #1
Source File: transport.py From sentry-python with BSD 2-Clause "Simplified" License | 6 votes |
def _make_pool( self, parsed_dsn, # type: Dsn http_proxy, # type: Optional[str] https_proxy, # type: Optional[str] ca_certs, # type: Optional[Any] ): # type: (...) -> Union[PoolManager, ProxyManager] proxy = None # try HTTPS first if parsed_dsn.scheme == "https" and (https_proxy != ""): proxy = https_proxy or getproxies().get("https") # maybe fallback to HTTP proxy if not proxy and (http_proxy != ""): proxy = http_proxy or getproxies().get("http") opts = self._get_pool_options(ca_certs) if proxy: return urllib3.ProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts)
Example #2
Source File: helpers.py From Galaxy_Plugin_Bethesda with MIT License | 6 votes |
def proxies_from_env() -> Dict[str, ProxyInfo]: proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ('http', 'https')} netrc_obj = netrc_from_env() stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} ret = {} for proto, val in stripped.items(): proxy, auth = val if proxy.scheme == 'https': client_logger.warning( "HTTPS proxies %s are not supported, ignoring", proxy) continue if netrc_obj and auth is None: auth_from_netrc = None if proxy.host is not None: auth_from_netrc = netrc_obj.authenticators(proxy.host) if auth_from_netrc is not None: # auth_from_netrc is a (`user`, `account`, `password`) tuple, # `user` and `account` both can be username, # if `user` is None, use `account` *logins, password = auth_from_netrc login = logins[0] if logins[0] else logins[-1] auth = BasicAuth(cast(str, login), cast(str, password)) ret[proto] = ProxyInfo(proxy, auth) return ret
Example #3
Source File: utils.py From cloud-custodian with Apache License 2.0 | 6 votes |
def get_proxy_url(url): proxies = getproxies() url_parts = parse_url_config(url) proxy_keys = [ url_parts['scheme'] + '://' + url_parts['netloc'], url_parts['scheme'], 'all://' + url_parts['netloc'], 'all' ] for key in proxy_keys: if key in proxies: return proxies[key] return None
Example #4
Source File: util.py From odoo12-x64 with GNU General Public License v3.0 | 6 votes |
def get_soap_client(wsdlurl): # pragma: no cover (not part of normal test suite) """Get a SOAP client for performing requests. The client is cached.""" # this function isn't automatically tested because the functions using # it are not automatically tested if wsdlurl not in _soap_clients: # try zeep first try: from zeep import CachingClient client = CachingClient(wsdlurl).service except ImportError: # fall back to non-caching zeep client try: from zeep import Client client = Client(wsdlurl).service except ImportError: # other implementations require passing the proxy config try: from urllib import getproxies except ImportError: from urllib.request import getproxies # fall back to suds try: from suds.client import Client client = Client(wsdlurl, proxy=getproxies()).service except ImportError: # use pysimplesoap as last resort from pysimplesoap.client import SoapClient client = SoapClient(wsdl=wsdlurl, proxy=getproxies()) _soap_clients[wsdlurl] = client return _soap_clients[wsdlurl]
Example #5
Source File: lyrics.py From spotifylyrics with The Unlicense | 5 votes |
def LyricWikia(artist, title): proxy = request.getproxies() url = 'http://lyrics.wikia.com/api.php?action=lyrics&artist={artist}&song={title}&fmt=json&func=getSong'.format( artist=artist, title=title).replace(" ", "%20") r = requests.get(url, timeout=15, proxies=proxy) # We got some bad formatted JSON data... So we need to fix stuff :/ returned = r.text returned = returned.replace("\'", "\"") returned = returned.replace("song = ", "") returned = json.loads(returned) if returned["lyrics"] != "Not found": # set the url to the url we just received, and retrieving it timed = True url = returned["url"] + "/lrc" r = requests.get(url, timeout=15, proxies=proxy) if r.status_code == 404: timed = False url = returned["url"] r = requests.get(url, timeout=15, proxies=proxy) soup = BeautifulSoup(r.text, 'html.parser') soup = soup.find("div", {"class": "lyricbox"}) [elem.extract() for elem in soup.findAll('div')] [elem.replaceWith('\n') for elem in soup.findAll('br')] # with old BeautifulSoup the following is needed..? For recent versions, this isn't needed/doesn't work try: # soup = BeautifulSoup(str(soup), convertEntities=BeautifulSoup.HTML_ENTITIES) soup = BeautifulSoup(str(soup), 'html.parser') except: pass soup = BeautifulSoup(re.sub(r'(<!--[.\s\S]*-->)', '', str(soup)), 'html.parser') [elem.extract() for elem in soup.findAll('script')] return soup.getText(), url, timed else: return "error", "", False
Example #6
Source File: backend.py From spotifylyrics with The Unlicense | 5 votes |
def check_version() -> bool: proxy = request.getproxies() try: return get_version() >= \ float(requests.get("https://api.github.com/repos/SimonIT/spotifylyrics/tags", timeout=5, proxies=proxy) .json()[0]["name"]) except Exception: return True
Example #7
Source File: sessionmanager.py From pyrh with MIT License | 5 votes |
def __init__( self, username: str, password: str, challenge_type: Optional[str] = "email", headers: Optional[CaseInsensitiveDictType] = None, proxies: Optional[Proxies] = None, **kwargs: Any, ) -> None: self.session: requests.Session = requests.session() self.session.headers = HEADERS if headers is None else headers self.session.proxies = getproxies() if proxies is None else proxies self.session.verify = certifi.where() self.expires_at = datetime.strptime("1970", "%Y").replace( tzinfo=pytz.UTC ) # some time in the past self.username: str = username self.password: str = password if challenge_type not in ["email", "sms"]: raise ValueError("challenge_type must be email or sms") self.challenge_type: str = challenge_type self.device_token: str = kwargs.pop("device_token", str(uuid.uuid4())) self.oauth: OAuth = kwargs.pop("ouath", OAuth()) super().__init__(**kwargs)
Example #8
Source File: api.py From clf with MIT License | 5 votes |
def _get_proxies(self): proxies = getproxies() proxy = {} if self.proxy: parsed_proxy = urlparse(self.proxy) proxy[parsed_proxy.scheme] = parsed_proxy.geturl() proxies.update(proxy) return proxies
Example #9
Source File: scrape.py From UIP with GNU Affero General Public License v3.0 | 5 votes |
def make_soup(url): # pragma: no cover """Make soup, that is basically parsing the html document.""" response = requests.get( url, headers={'User-agent': 'UIP'}, # gets system proxy (if it is currently using one) proxies=getproxies()) html = response.content return BeautifulSoup(html, "html.parser")
Example #10
Source File: scrape.py From UIP with GNU Affero General Public License v3.0 | 5 votes |
def make_json(url): # pragma: no cover """Make a dictionary out of a json file.""" response = requests.get( url, headers={'User-agent': 'UIP'}, # gets system proxy (if it is currently using one) proxies=getproxies()) json_file = response.text data = json.loads(json_file) return data
Example #11
Source File: setup.py From OasisLMF with BSD 3-Clause "New" or "Revised" License | 5 votes |
def fetch_ktools_tar(self, location, url, attempts=3, timeout=15, cooldown=1): last_error = None proxy_config = urlrequest.getproxies() self.announce('Retrieving ktools from: {}'.format(url), INFO) self.announce('Proxy configuration: {}'.format(proxy_config), INFO) if proxy_config: # Handle Proxy config proxy_handler = urlrequest.ProxyHandler(proxy_config) opener = urlrequest.build_opener(proxy_handler) urlrequest.install_opener(opener) for i in range(attempts): try: if proxy_config: # Proxied connection req = urlrequest.urlopen(urlrequest.Request(url), timeout=timeout) break else: # Non proxied connection req = urlrequest.urlopen(url, timeout=timeout) break except URLError as e: self.announce('Fetch ktools tar failed: {} (attempt {})'.format(e, (i+1)), WARN) last_error = e sleep(cooldown) else: self.announce('Failed to get ktools tar after {} attempts'.format(attempts), ERROR) if last_error: raise last_error with open(location, 'wb') as f: f.write(req.read())
Example #12
Source File: util.py From odoo13-x64 with GNU General Public License v3.0 | 5 votes |
def get_soap_client(wsdlurl, timeout=30): # pragma: no cover (not part of normal test suite) """Get a SOAP client for performing requests. The client is cached. The timeout is in seconds.""" # this function isn't automatically tested because the functions using # it are not automatically tested if (wsdlurl, timeout) not in _soap_clients: # try zeep first try: from zeep.transports import Transport transport = Transport(timeout=timeout) from zeep import CachingClient client = CachingClient(wsdlurl, transport=transport).service except ImportError: # fall back to non-caching zeep client try: from zeep import Client client = Client(wsdlurl, transport=transport).service except ImportError: # other implementations require passing the proxy config try: from urllib import getproxies except ImportError: from urllib.request import getproxies # fall back to suds try: from suds.client import Client client = Client( wsdlurl, proxy=getproxies(), timeout=timeout).service except ImportError: # use pysimplesoap as last resort try: from pysimplesoap.client import SoapClient client = SoapClient( wsdl=wsdlurl, proxy=getproxies(), timeout=timeout) except ImportError: raise ImportError( 'No SOAP library (such as zeep) found') _soap_clients[(wsdlurl, timeout)] = client return _soap_clients[(wsdlurl, timeout)]
Example #13
Source File: client_reqrep.py From lambda-text-extractor with Apache License 2.0 | 5 votes |
def update_proxy(self, proxy, proxy_auth, proxy_from_env): if proxy_from_env and not proxy: proxy_url = getproxies().get(self.original_url.scheme) proxy = URL(proxy_url) if proxy_url else None if proxy and not proxy.scheme == 'http': raise ValueError("Only http proxies are supported") if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): raise ValueError("proxy_auth must be None or BasicAuth() tuple") self.proxy = proxy self.proxy_auth = proxy_auth
Example #14
Source File: client_reqrep.py From lambda-text-extractor with Apache License 2.0 | 5 votes |
def update_proxy(self, proxy, proxy_auth, proxy_from_env): if proxy_from_env and not proxy: proxy_url = getproxies().get(self.original_url.scheme) proxy = URL(proxy_url) if proxy_url else None if proxy and not proxy.scheme == 'http': raise ValueError("Only http proxies are supported") if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): raise ValueError("proxy_auth must be None or BasicAuth() tuple") self.proxy = proxy self.proxy_auth = proxy_auth