Python urllib2.build_opener() Examples
The following are 30
code examples of urllib2.build_opener().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
urllib2
, or try the search function
.
Example #1
Source File: liberty_crawler.py From agentless-system-crawler with Apache License 2.0 | 11 votes |
def retrieve_status_page(user, password, url): try: ssl._create_unverified_context except AttributeError: pass else: ssl._create_default_https_context = ssl._create_unverified_context password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, user, password) handler = urllib2.HTTPBasicAuthHandler(password_mgr) opener = urllib2.build_opener(handler) urllib2.install_opener(opener) req = urllib2.Request(url) try: response = urllib2.urlopen(req) return response.read() except Exception: raise CrawlError("can't access to http://%s", url)
Example #2
Source File: githubpy.py From osint-scraper with MIT License | 7 votes |
def get_access_token(self, code, state=None): ''' In callback url: http://host/callback?code=123&state=xyz use code and state to get an access token. ''' kw = dict(client_id=self._client_id, client_secret=self._client_secret, code=code) if self._redirect_uri: kw['redirect_uri'] = self._redirect_uri if state: kw['state'] = state opener = build_opener(HTTPSHandler) request = Request('https://github.com/login/oauth/access_token', data=_encode_params(kw)) request.get_method = _METHOD_MAP['POST'] request.add_header('Accept', 'application/json') try: response = opener.open(request, timeout=TIMEOUT) r = _parse_json(response.read()) if 'error' in r: raise ApiAuthError(str(r.error)) return str(r.access_token) except HTTPError as e: raise ApiAuthError('HTTPError when get access token')
Example #3
Source File: reverseip.py From sqliv with GNU General Public License v3.0 | 6 votes |
def reverseip(url): """return domains from given the same server""" # get only domain name url = urlparse(url).netloc if urlparse(url).netloc != '' else urlparse(url).path.split("/")[0] source = "http://domains.yougetsignal.com/domains.php" useragent = useragents.get() contenttype = "application/x-www-form-urlencoded; charset=UTF-8" # POST method opener = urllib2.build_opener( urllib2.HTTPHandler(), urllib2.HTTPSHandler()) data = urllib.urlencode([('remoteAddress', url), ('key', '')]) request = urllib2.Request(source, data) request.add_header("Content-type", contenttype) request.add_header("User-Agent", useragent) try: result = urllib2.urlopen(request).read() except urllib2.HTTPError, e: print >> sys.stderr, "[{}] HTTP error".format(e.code)
Example #4
Source File: proxy.py From plugin.video.ustvvod with GNU General Public License v2.0 | 6 votes |
def serveFile(self, fURL, sendData, httphandler = None, cookienum = 0): cj = cookielib.LWPCookieJar(ustvpaths.COOKIE % str(cookienum)) if httphandler is None: opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) else: opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj), httphandler) request = urllib2.Request(url = fURL) sheaders = self.decodeHeaderString(self.headers.headers) del sheaders['Host'] sheaders['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0' for key in sheaders: opener.addheaders = [(key, sheaders[key])] if os.path.isfile(ustvpaths.COOKIE % str(cookienum)): cj.load(ignore_discard = True) cj.add_cookie_header(request) response = opener.open(request, timeout = TIMEOUT) self.send_response(200) headers = response.info() for key in headers: try: self.send_header(key, headers[key]) except Exception, e: print "Exception: ", e pass
Example #5
Source File: urllib2.py From ironpython2 with Apache License 2.0 | 6 votes |
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, cafile=None, capath=None, cadefault=False, context=None): global _opener if cafile or capath or cadefault: if context is not None: raise ValueError( "You can't pass both context and any of cafile, capath, and " "cadefault" ) if not _have_ssl: raise ValueError('SSL support not available') context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=cafile, capath=capath) https_handler = HTTPSHandler(context=context) opener = build_opener(https_handler) elif context: https_handler = HTTPSHandler(context=context) opener = build_opener(https_handler) elif _opener is None: _opener = opener = build_opener() else: opener = _opener return opener.open(url, data, timeout)
Example #6
Source File: connection.py From plugin.video.ustvvod with GNU General Public License v2.0 | 6 votes |
def prepare_us_proxy(cookie_handler): if (addon.getSetting('us_proxy_socks5') == 'true'): if ((addon.getSetting('us_proxy_pass') is not '') and (addon.getSetting('us_proxy_user') is not '')): print 'Using socks5 authenticated proxy: ' + addon.getSetting('us_proxy') + ':' + addon.getSetting('us_proxy_port') socks_handler = SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, addon.getSetting('us_proxy'), int(addon.getSetting('us_proxy_port')), True, addon.getSetting('us_proxy_user'), addon.getSetting('us_proxy_pass')) opener = urllib2.build_opener(socks_handler, cookie_handler) else: print 'Using socks5 proxy: ' + addon.getSetting('us_proxy') + ':' + addon.getSetting('us_proxy_port') socks_handler = SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, addon.getSetting('us_proxy'), int(addon.getSetting('us_proxy_port')), True) opener = urllib2.build_opener(socks_handler, cookie_handler) elif (addon.getSetting('us_proxy_socks5') == 'false'): us_proxy = 'http://' + addon.getSetting('us_proxy') + ':' + addon.getSetting('us_proxy_port') proxy_handler = urllib2.ProxyHandler({'http' : us_proxy}) if ((addon.getSetting('us_proxy_pass') is not '') and (addon.getSetting('us_proxy_user') is not '')): print 'Using authenticated proxy: ' + us_proxy password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, us_proxy, addon.getSetting('us_proxy_user'), addon.getSetting('us_proxy_pass')) proxy_auth_handler = urllib2.ProxyBasicAuthHandler(password_mgr) opener = urllib2.build_opener(proxy_handler, proxy_auth_handler, cookie_handler) else: print 'Using proxy: ' + us_proxy opener = urllib2.build_opener(proxy_handler, cookie_handler) return opener
Example #7
Source File: captcha_handler.py From PySide_For_Amazon_Order with MIT License | 6 votes |
def __init__(self, user, pwd, softId="110614", softKey="469c0d8a805a40f39d3c1ec3c9281e9c", codeType="1004"): self.softId = softId self.softKey = softKey self.user = user self.pwd = pwd self.codeType = codeType self.uid = "100" self.initUrl = "http://common.taskok.com:9000/Service/ServerConfig.aspx" self.version = '1.1.1.2' self.cookieJar = cookielib.CookieJar() self.opener = urllib2.build_opener( urllib2.HTTPCookieProcessor(self.cookieJar)) self.loginUrl = None self.uploadUrl = None self.codeUrl = None self.params = [] self.uKey = None
Example #8
Source File: Youdao-Anki.py From Anki-Youdao with MIT License | 6 votes |
def totalPage(self): self.loadedCookies = self.loadCookies() if not self.loadedCookies: return False # page index start from 0 end at max-1 req = urllib2.Request('http://dict.youdao.com/wordbook/wordlist?p=0&tags=') opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.loadedCookies)) urllib2.install_opener(opener) response = urllib2.urlopen(req) source = response.read() if '密码错误' in source: return False else: try: return int(re.search('<a href="wordlist.p=(.*).tags=" class="next-page">最后一页</a>', source, re.M | re.I).group(1)) - 1 except Exception: return 1
Example #9
Source File: openload.py From bugatsinho.github.io with GNU General Public License v3.0 | 6 votes |
def read_openload(url): default_headers = dict() default_headers[ "User-Agent"] = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3163.100 Safari/537.36" default_headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8" default_headers["Accept-Language"] = "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3" default_headers["Accept-Charset"] = "UTF-8" default_headers["Accept-Encoding"] = "gzip" cj = cookielib.MozillaCookieJar() request_headers = default_headers.copy() url = urllib.quote(url, safe="%/:=&?~#+!$,;'@()*[]") handlers = [urllib2.HTTPHandler(debuglevel=False)] handlers.append(NoRedirectHandler()) handlers.append(urllib2.HTTPCookieProcessor(cj)) opener = urllib2.build_opener(*handlers) req = urllib2.Request(url, None, request_headers) handle = opener.open(req, timeout=None) return handle.headers.dict.get('location')
Example #10
Source File: sosac.py From plugin.video.sosac.ph with GNU General Public License v2.0 | 6 votes |
def probe_html5(self, result): class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): infourl = urllib.addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib2.build_opener(NoRedirectHandler()) urllib2.install_opener(opener) r = urllib2.urlopen(urllib2.Request(result['url'], headers=result['headers'])) if r.code == 200: result['url'] = r.read() return result
Example #11
Source File: openload.py From bugatsinho.github.io with GNU General Public License v3.0 | 6 votes |
def read_openload(url): default_headers = dict() default_headers[ "User-Agent"] = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3163.100 Safari/537.36" default_headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8" default_headers["Accept-Language"] = "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3" default_headers["Accept-Charset"] = "UTF-8" default_headers["Accept-Encoding"] = "gzip" cj = cookielib.MozillaCookieJar() request_headers = default_headers.copy() url = urllib.quote(url, safe="%/:=&?~#+!$,;'@()*[]") handlers = [urllib2.HTTPHandler(debuglevel=False)] handlers.append(NoRedirectHandler()) handlers.append(urllib2.HTTPCookieProcessor(cj)) opener = urllib2.build_opener(*handlers) req = urllib2.Request(url, None, request_headers) handle = opener.open(req, timeout=None) return handle.headers.dict.get('location')
Example #12
Source File: report_util.py From spore with MIT License | 6 votes |
def send_report(self, payload=None): if not payload: self.logger.debug('Timer triggered report') if self.msg_stack: payload = self.msg_stack.pop(-1) self.logger.debug('Timer triggered report') else: self.logger.debug('No more messages to send. Time stopped') self.timer.stop() return handler = urllib2.HTTPHandler() opener = urllib2.build_opener(handler) data = urllib.urlencode(payload) request = urllib2.Request(self.MAIL_URL, data=data) request.get_method = lambda: "POST" try: connection = opener.open(request) except urllib2.HTTPError, e: connection = e
Example #13
Source File: tomcat_crawler.py From agentless-system-crawler with Apache License 2.0 | 6 votes |
def retrieve_status_page(hostname, port, user, password): statusPage = "http://%s:%s/manager/status?XML=true" % (hostname, port) password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, statusPage, user, password) handler = urllib2.HTTPBasicAuthHandler(password_mgr) opener = urllib2.build_opener(handler) urllib2.install_opener(opener) req = urllib2.Request(statusPage) try: response = urllib2.urlopen(req) return response.read() except Exception: raise CrawlError("can't access to http://%s:%s", hostname, port)
Example #14
Source File: sport365.py From bugatsinho.github.io with GNU General Public License v3.0 | 6 votes |
def getUrlc(url, data=None, header={}, usecookies=True): cj = cookielib.LWPCookieJar() if usecookies: opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) if not header: header = {'User-Agent': UA} req = urllib2.Request(url, data, headers=header) try: response = urllib2.urlopen(req, timeout=15) link = response.read() response.close() except: link='' c = ''.join(['%s=%s' % (c.name, c.value) for c in cj]) if cj else '' return link, c
Example #15
Source File: sport365.py From bugatsinho.github.io with GNU General Public License v3.0 | 6 votes |
def getUrlrh(url, data=None, header={}, usecookies=True): cj = cookielib.LWPCookieJar() if usecookies: opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) if not header: header = {'User-Agent':UA} rh={} req = urllib2.Request(url, data, headers=header) try: response = urllib2.urlopen(req, timeout=15) for k in response.headers.keys(): rh[k]=response.headers[k] link = response.read() response.close() except: link='' c = ''.join(['%s=%s' % (c.name, c.value) for c in cj]) if cj else '' return link,rh
Example #16
Source File: urllib2.py From GDCTSCP with GNU Affero General Public License v3.0 | 6 votes |
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, cafile=None, capath=None, cadefault=False, context=None): global _opener if cafile or capath or cadefault: if context is not None: raise ValueError( "You can't pass both context and any of cafile, capath, and " "cadefault" ) if not _have_ssl: raise ValueError('SSL support not available') context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=cafile, capath=capath) https_handler = HTTPSHandler(context=context) opener = build_opener(https_handler) elif context: https_handler = HTTPSHandler(context=context) opener = build_opener(https_handler) elif _opener is None: _opener = opener = build_opener() else: opener = _opener return opener.open(url, data, timeout)
Example #17
Source File: openload.py From bugatsinho.github.io with GNU General Public License v3.0 | 6 votes |
def read_openload(url): default_headers = dict() default_headers[ "User-Agent"] = "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3163.100 Safari/537.36" default_headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8" default_headers["Accept-Language"] = "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3" default_headers["Accept-Charset"] = "UTF-8" default_headers["Accept-Encoding"] = "gzip" cj = cookielib.MozillaCookieJar() request_headers = default_headers.copy() url = urllib.quote(url, safe="%/:=&?~#+!$,;'@()*[]") handlers = [urllib2.HTTPHandler(debuglevel=False)] handlers.append(NoRedirectHandler()) handlers.append(urllib2.HTTPCookieProcessor(cj)) opener = urllib2.build_opener(*handlers) req = urllib2.Request(url, None, request_headers) handle = opener.open(req, timeout=None) return handle.headers.dict.get('location')
Example #18
Source File: weibologin.py From SinaMicroblog_Creeper-Spider_VerificationCode with GNU General Public License v2.0 | 5 votes |
def EnableCookie(self, enableProxy): #"Enable cookie & proxy (if needed)." cookiejar = cookielib.LWPCookieJar()#construct cookie cookie_support = urllib2.HTTPCookieProcessor(cookiejar) if enableProxy: proxy_support = urllib2.ProxyHandler({'http':'http://xxxxx.pac'})#use proxy opener = urllib2.build_opener(proxy_support, cookie_support, urllib2.HTTPHandler) print ("Proxy enabled") else: opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler) urllib2.install_opener(opener)#construct cookie's opener
Example #19
Source File: weibo_util.py From hexo_weibo_image with MIT License | 5 votes |
def login(form_data): url = 'http://login.sina.com.cn/sso/login.php?client=ssologin.js(v1.4.18)' headers = ('User-Agent', 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0') cookie = cookielib.MozillaCookieJar(cookie_file) handler = urllib2.HTTPCookieProcessor(cookie) opener = urllib2.build_opener(handler) opener.addheaders.append(headers) req = opener.open(url, form_data) redirect_result = req.read() login_pattern = r'location.replace\(\'(.*?)\'\)' login_url = re.search(login_pattern, redirect_result).group(1) opener.open(login_url).read() cookie.save(cookie_file, ignore_discard=True, ignore_expires=True)
Example #20
Source File: weibo_util.py From hexo_weibo_image with MIT License | 5 votes |
def request_image_url(image_path): cookie = cookielib.MozillaCookieJar() cookie.load(cookie_file, ignore_expires=True, ignore_discard=True) opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie)) image_url = 'http://picupload.service.weibo.com/interface/pic_upload.php?mime=image%2Fjpeg&data=base64&url=0&markpos=1&logo=&nick=0&marks=1&app=miniblog' b = base64.b64encode(file(image_path).read()) data = urllib.urlencode({'b64_data': b}) result = opener.open(image_url, data).read() result = re.sub(r"<meta.*</script>", "", result, flags=re.S) image_result = json.loads(result) image_id = image_result.get('data').get('pics').get('pic_1').get('pid') return 'http://ww3.sinaimg.cn/large/%s' % image_id
Example #21
Source File: pipstrap.py From sugardough with Apache License 2.0 | 5 votes |
def hashed_download(url, temp, digest): """Download ``url`` to ``temp``, make sure it has the SHA-256 ``digest``, and return its path.""" # Based on pip 1.4.1's URLOpener but with cert verification removed def opener(): opener = build_opener(HTTPSHandler()) # Strip out HTTPHandler to prevent MITM spoof: for handler in opener.handlers: if isinstance(handler, HTTPHandler): opener.handlers.remove(handler) return opener def read_chunks(response, chunk_size): while True: chunk = response.read(chunk_size) if not chunk: break yield chunk response = opener().open(url) path = join(temp, urlparse(url).path.split('/')[-1]) actual_hash = sha256() with open(path, 'wb') as file: for chunk in read_chunks(response, 4096): file.write(chunk) actual_hash.update(chunk) actual_digest = actual_hash.hexdigest() if actual_digest != digest: raise HashError(url, path, actual_digest, digest) return path
Example #22
Source File: urllib2.py From ironpython2 with Apache License 2.0 | 5 votes |
def build_opener(*handlers): """Create an opener object from a list of handlers. The opener will use several default handlers, including support for HTTP, FTP and when applicable, HTTPS. If any of the handlers passed as arguments are subclasses of the default handlers, the default handlers will not be used. """ import types def isclass(obj): return isinstance(obj, (types.ClassType, type)) opener = OpenerDirector() default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, HTTPDefaultErrorHandler, HTTPRedirectHandler, FTPHandler, FileHandler, HTTPErrorProcessor] if hasattr(httplib, 'HTTPS'): default_classes.append(HTTPSHandler) skip = set() for klass in default_classes: for check in handlers: if isclass(check): if issubclass(check, klass): skip.add(klass) elif isinstance(check, klass): skip.add(klass) for klass in skip: default_classes.remove(klass) for klass in default_classes: opener.add_handler(klass()) for h in handlers: if isclass(h): h = h() opener.add_handler(h) return opener
Example #23
Source File: main.py From seu-jwc-catcher with MIT License | 5 votes |
def get_verifycode(): global cookie cookie = cookielib.LWPCookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie), urllib2.HTTPHandler) urllib2.install_opener(opener) img = urllib2.urlopen('http://xk.urp.seu.edu.cn/jw_css/getCheckCode', timeout=8) f = open('verifycode.jpg', 'wb') f.write(img.read()) f.close() return 0
Example #24
Source File: __init__.py From script.module.inputstreamhelper with MIT License | 5 votes |
def __init__(self, protocol, drm=None): """Initialize InputStream Helper class""" self.protocol = protocol self.drm = drm from platform import uname log(0, 'Platform information: {uname}', uname=uname()) if self.protocol not in config.INPUTSTREAM_PROTOCOLS: raise InputStreamException('UnsupportedProtocol') self.inputstream_addon = config.INPUTSTREAM_PROTOCOLS[self.protocol] if self.drm: if self.drm not in config.DRM_SCHEMES: raise InputStreamException('UnsupportedDRMScheme') self.drm = config.DRM_SCHEMES[drm] # Add proxy support to HTTP requests proxies = get_proxies() if proxies: try: # Python 3 from urllib.request import build_opener, install_opener, ProxyHandler except ImportError: # Python 2 from urllib2 import build_opener, install_opener, ProxyHandler install_opener(build_opener(ProxyHandler(proxies)))
Example #25
Source File: getobj.py From spider with Apache License 2.0 | 5 votes |
def __init__(self,url): cookie_jar = cookielib.LWPCookieJar() cookie = urllib2.HTTPCookieProcessor(cookie_jar) self.opener = urllib2.build_opener(cookie) user_agent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.84 Safari/537.36" self.url=url self.send_headers={'User-Agent':user_agent}
Example #26
Source File: githubpy.py From osint-scraper with MIT License | 5 votes |
def _http(self, _method, _path, **kw): data = None params = None if _method=='GET' and kw: _path = '%s?%s' % (_path, _encode_params(kw)) if _method in ['POST', 'PATCH', 'PUT']: data = bytes(_encode_json(kw), 'utf-8') url = '%s%s' % (_URL, _path) opener = build_opener(HTTPSHandler) request = Request(url, data=data) request.get_method = _METHOD_MAP[_method] if self._authorization: request.add_header('Authorization', self._authorization) if _method in ['POST', 'PATCH', 'PUT']: request.add_header('Content-Type', 'application/x-www-form-urlencoded') try: response = opener.open(request, timeout=TIMEOUT) is_json = self._process_resp(response.headers) if is_json: return _parse_json(response.read().decode('utf-8')) except HTTPError as e: is_json = self._process_resp(e.headers) if is_json: json = _parse_json(e.read().decode('utf-8')) else: json = e.read().decode('utf-8') req = JsonObject(method=_method, url=url) resp = JsonObject(code=e.code, json=json) if resp.code==404: raise ApiNotFoundError(url, req, resp) raise ApiError(url, req, resp)
Example #27
Source File: xmlbuilder.py From meddle with MIT License | 5 votes |
def _create_opener(self): import urllib2 return urllib2.build_opener()
Example #28
Source File: urllib2.py From meddle with MIT License | 5 votes |
def build_opener(*handlers): """Create an opener object from a list of handlers. The opener will use several default handlers, including support for HTTP, FTP and when applicable, HTTPS. If any of the handlers passed as arguments are subclasses of the default handlers, the default handlers will not be used. """ import types def isclass(obj): return isinstance(obj, (types.ClassType, type)) opener = OpenerDirector() default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, HTTPDefaultErrorHandler, HTTPRedirectHandler, FTPHandler, FileHandler, HTTPErrorProcessor] if hasattr(httplib, 'HTTPS'): default_classes.append(HTTPSHandler) skip = set() for klass in default_classes: for check in handlers: if isclass(check): if issubclass(check, klass): skip.add(klass) elif isinstance(check, klass): skip.add(klass) for klass in skip: default_classes.remove(klass) for klass in default_classes: opener.add_handler(klass()) for h in handlers: if isclass(h): h = h() opener.add_handler(h) return opener
Example #29
Source File: geoiputils.py From ivre with GNU General Public License v3.0 | 5 votes |
def download_all(verbose=False): utils.makedirs(config.GEOIP_PATH) opener = build_opener() opener.addheaders = [('User-agent', 'IVRE/%s +https://ivre.rocks/' % VERSION)] for fname, url in viewitems(config.IPDATA_URLS): if url is None: if not fname.startswith('GeoLite2-'): continue if fname.startswith('GeoLite2-dumps.'): continue basename, ext = fname.split('.', 1) url = ('https://download.maxmind.com/app/geoip_download?' 'edition_id=%s&suffix=%s&license_key=%s' % ( basename, ext, config.MAXMIND_LICENSE_KEY, )) outfile = os.path.join(config.GEOIP_PATH, fname) if verbose: sys.stdout.write("Downloading %s to %s: " % (url, outfile)) sys.stdout.flush() with open(outfile, 'wb') as wdesc: udesc = opener.open(url) wdesc.write(udesc.read()) if verbose: sys.stdout.write("done.\n") if verbose: sys.stdout.write("Unpacking: ") sys.stdout.flush() for func, args, kargs in PARSERS: try: func(*args, **kargs) except Exception: utils.LOGGER.warning( "A parser failed: %s(%s, %s)", func.__name__, ', '.join(args), ', '.join('%s=%r' % k_v for k_v in viewitems(kargs)), exc_info=True, ) if verbose: sys.stdout.write("done.\n")
Example #30
Source File: urllib2.py From meddle with MIT License | 5 votes |
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): global _opener if _opener is None: _opener = build_opener() return _opener.open(url, data, timeout)