Python urllib2.urlopen() Examples
The following are 30
code examples of urllib2.urlopen().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
urllib2
, or try the search function
.
Example #1
Source File: serializekiller.py From serializekiller with The Unlicense | 10 votes |
def jenkins(url, port): try: cli_port = False ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE try: output = urllib2.urlopen('https://'+url+':'+port+"/jenkins/", context=ctx, timeout=8).info() cli_port = int(output['X-Jenkins-CLI-Port']) except urllib2.HTTPError, e: if e.getcode() == 404: try: output = urllib2.urlopen('https://'+url+':'+port, context=ctx, timeout=8).info() cli_port = int(output['X-Jenkins-CLI-Port']) except: pass except: pass
Example #2
Source File: senddata.py From PiPark with GNU General Public License v2.0 | 7 votes |
def post_request(vals, url): """ Build a post request. Args: vals: Dictionary of (field, values) for the POST request. url: URL to send the data to. Returns: Dictionary of JSON response or error info. """ # Build the request and send to server data = urllib.urlencode(vals) try: request = urllib2.Request(url, data) response = urllib2.urlopen(request) except urllib2.HTTPError, err: return {"error": err.reason, "error_code": err.code}
Example #3
Source File: L.E.S.M.A. - Fabrica de Noobs Speedtest.py From L.E.S.M.A with Apache License 2.0 | 7 votes |
def run(self): request = self.request try: if ((timeit.default_timer() - self.starttime) <= self.timeout and not SHUTDOWN_EVENT.isSet()): try: f = urlopen(request) except TypeError: # PY24 expects a string or buffer # This also causes issues with Ctrl-C, but we will concede # for the moment that Ctrl-C on PY24 isn't immediate request = build_request(self.request.get_full_url(), data=request.data.read(self.size)) f = urlopen(request) f.read(11) f.close() self.result = sum(self.request.data.total) else: self.result = 0 except (IOError, SpeedtestUploadTimeout): self.result = sum(self.request.data.total)
Example #4
Source File: download_images.py From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License | 7 votes |
def download_image(image_id, url, x1, y1, x2, y2, output_dir): """Downloads one image, crops it, resizes it and saves it locally.""" output_filename = os.path.join(output_dir, image_id + '.png') if os.path.exists(output_filename): # Don't download image if it's already there return True try: # Download image url_file = urlopen(url) if url_file.getcode() != 200: return False image_buffer = url_file.read() # Crop, resize and save image image = Image.open(BytesIO(image_buffer)).convert('RGB') w = image.size[0] h = image.size[1] image = image.crop((int(x1 * w), int(y1 * h), int(x2 * w), int(y2 * h))) image = image.resize((299, 299), resample=Image.ANTIALIAS) image.save(output_filename) except IOError: return False return True
Example #5
Source File: diagnose.py From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 | 7 votes |
def test_connection(name, url, timeout=10): """Simple connection test""" urlinfo = urlparse(url) start = time.time() try: ip = socket.gethostbyname(urlinfo.netloc) except Exception as e: print('Error resolving DNS for {}: {}, {}'.format(name, url, e)) return dns_elapsed = time.time() - start start = time.time() try: _ = urlopen(url, timeout=timeout) except Exception as e: print("Error open {}: {}, {}, DNS finished in {} sec.".format(name, url, e, dns_elapsed)) return load_elapsed = time.time() - start print("Timing for {}: {}, DNS: {:.4f} sec, LOAD: {:.4f} sec.".format(name, url, dns_elapsed, load_elapsed))
Example #6
Source File: can_haz_image.py From macops with Apache License 2.0 | 7 votes |
def DownloadFile(self, fileurl, dlfile): """Downloads a given file to a given path/filename. Args: fileurl: String with URL of file to download. dlfile: String with path of file to be written to. Raises: OSError: If file cannot be opened/written to, function raises OSError. URLError: If URL cannot be opened, fucntion raises URLError. """ if not os.path.isfile(dlfile) or dlfile == TMPINDEX: print 'Downloading %s ...' % fileurl file_to_dl = urllib2.urlopen(fileurl) tmpfile = open(dlfile, 'wb') shutil.copyfileobj(file_to_dl, tmpfile) else: print '%s exists' % dlfile
Example #7
Source File: get_genomes.py From CAMISIM with Apache License 2.0 | 6 votes |
def download_genome(genome, out_path): genome_path = os.path.join(out_path,"genomes") out_name = genome.rstrip().split('/')[-1] http_address = os.path.join(genome, out_name + "_genomic.fna.gz") opened = urllib2.urlopen(http_address) out = os.path.join(genome_path, out_name + ".fa") tmp_out = os.path.join(genome_path, out_name + "tmp.fa") out_gz = out + ".gz" with open(out_gz,'wb') as outF: outF.write(opened.read()) gf = gzip.open(out_gz) new_out = open(tmp_out,'wb') new_out.write(gf.read()) gf.close() os.remove(out_gz) new_out.close() split_by_N(tmp_out, out) return out
Example #8
Source File: fels.py From fetchLandsatSentinelFromGoogleCloud with MIT License | 6 votes |
def download_metadata_file(url, outputdir, program): """Download and unzip the catalogue files.""" zipped_index_path = os.path.join(outputdir, 'index_' + program + '.csv.gz') if not os.path.isfile(zipped_index_path): if not os.path.exists(os.path.dirname(zipped_index_path)): os.makedirs(os.path.dirname(zipped_index_path)) print("Downloading Metadata file...") content = urlopen(url) with open(zipped_index_path, 'wb') as f: shutil.copyfileobj(content, f) index_path = os.path.join(outputdir, 'index_' + program + '.csv') if not os.path.isfile(index_path): print("Unzipping Metadata file...") with gzip.open(zipped_index_path) as gzip_index, open(index_path, 'wb') as f: shutil.copyfileobj(gzip_index, f) return index_path
Example #9
Source File: yahoo.py From sqliv with GNU General Public License v3.0 | 6 votes |
def search(self, query, per_page=10, pages=1): """search urls from yahoo search""" # store searched urls urls = [] for page in range(pages): yahoosearch = self.yahoosearch % (query, per_page, (pages+1)*10) request = urllib2.Request(yahoosearch) request.add_header("Content-type", self.contenttype) request.add_header("User-Agent", self.useragent) result = urllib2.urlopen(request).read() urls += self.parse_links(result) return urls
Example #10
Source File: subtake.py From subtake with GNU General Public License v2.0 | 6 votes |
def subdomain_check(subdomains): # Basic query for subd in range(len(subdomains)): if subd != 0: try: #print("inside query") for rdata in dns.resolver.query(subdomains[subd], 'CNAME') : print "Checking subdomain takeover on: "+str(subdomains[subd]) try: #response = urlopen("http://"+str(rdata.target)) response = urlopen("http://"+str(subdomains[subd])) print(R+str(subdomains[subd])+" seems Up and running fine") except: print(S+"Success!!! Possible sub-domain takeover on: "+str(subdomains[subd])+E) except: print (R+"No CNAME for"+str(subdomains[subd])+"i.e. subdomain takeover not Possible")
Example #11
Source File: metadata.py From awesome-zio with Apache License 2.0 | 6 votes |
def query(owner, name): if fake: print ' {0}/{1}: ok'.format(owner, name) return (random.randint(1, 1000), random.randint(1, 300)) else: try: req = urllib2.Request('https://api.github.com/repos/{0}/{1}'.format(owner, name)) if user is not None and token is not None: b64 = base64.encodestring('{0}:{1}'.format(user, token)).replace('\n', '') req.add_header("Authorization", "Basic {0}".format(b64)) u = urllib2.urlopen(req) j = json.load(u) t = datetime.datetime.strptime(j['updated_at'], "%Y-%m-%dT%H:%M:%SZ") days = max(int((now - t).days), 0) print ' {0}/{1}: ok'.format(owner, name) return (int(j['stargazers_count']), days) except urllib2.HTTPError, e: print ' {0}/{1}: FAILED'.format(owner, name) return (None, None)
Example #12
Source File: reverseip.py From sqliv with GNU General Public License v3.0 | 6 votes |
def reverseip(url): """return domains from given the same server""" # get only domain name url = urlparse(url).netloc if urlparse(url).netloc != '' else urlparse(url).path.split("/")[0] source = "http://domains.yougetsignal.com/domains.php" useragent = useragents.get() contenttype = "application/x-www-form-urlencoded; charset=UTF-8" # POST method opener = urllib2.build_opener( urllib2.HTTPHandler(), urllib2.HTTPSHandler()) data = urllib.urlencode([('remoteAddress', url), ('key', '')]) request = urllib2.Request(source, data) request.add_header("Content-type", contenttype) request.add_header("User-Agent", useragent) try: result = urllib2.urlopen(request).read() except urllib2.HTTPError, e: print >> sys.stderr, "[{}] HTTP error".format(e.code)
Example #13
Source File: krasfs.py From tdw with GNU General Public License v3.0 | 6 votes |
def upd(category, sort, str): post = urllib.urlencode({'checkbox_ftp':'on', 'checkbox_tor':'on','word':str}) request = urllib2.Request('http://krasfs.ru/search.php?key=newkey')#url, post) request.add_header('User-Agent', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1) ; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET4.0C)') request.add_header('Host', 'www.krasfs.ru') request.add_header('Accept', '*/*') request.add_header('Accept-Language', 'ru-RU') request.add_header('Referer', 'http://www.krasfs.ru') try: f = urllib2.urlopen(request) html = f.read() html = html.replace(chr(10),"") n=html.find("<newkey>") k=html.find("</newkey>") key = html[n+8:k] except IOError, e: if hasattr(e, 'reason'): print 'We failed to reach a server. Reason: '+ e.reason elif hasattr(e, 'code'): print 'The server couldn\'t fulfill the request. Error code: '+ e.code key = "59165b78-bf91-11e1-86bf-c6ab051766ba"
Example #14
Source File: metadata.py From awesome-scala with Apache License 2.0 | 6 votes |
def query(owner, name): if fake: print(" {0}/{1}: ok".format(owner, name)) return (random.randint(1, 1000), random.randint(1, 300)) else: try: req = urllib2.Request( "https://api.github.com/repos/{0}/{1}".format(owner, name) ) if user is not None and token is not None: b64 = base64.encodestring("{0}:{1}".format(user, token)).replace( "\n", "" ) req.add_header("Authorization", "Basic {0}".format(b64)) u = urllib2.urlopen(req) j = json.load(u) t = datetime.datetime.strptime(j["updated_at"], "%Y-%m-%dT%H:%M:%SZ") days = max(int((now - t).days), 0) print(" {0}/{1}: ok".format(owner, name)) return (int(j["stargazers_count"]), days) except urllib2.HTTPError as e: print(" {0}/{1}: FAILED".format(owner, name)) return (None, None)
Example #15
Source File: build.py From llvm-zorg with Apache License 2.0 | 6 votes |
def http_download(url, dest): """Safely download url to dest. Print error and exit if download fails. """ try: print("GETting", url, "to", dest, "...", end=' ') f = urlopen(url) # Open our local file for writing with open(dest, "wb") as local_file: local_file.write(f.read()) except HTTPError as e: print() print("HTTP Error:", e.code, url) sys.exit(1) except URLError as e: print() print("URL Error:", e.reason, url) sys.exit(1) print("done.")
Example #16
Source File: update.py From CAMISIM with Apache License 2.0 | 6 votes |
def verifyChecksumForDir(dirName, settings, type): """ For each file listed in a list, verifies its checksum. """ dirName = os.path.join(settings.getLocalDst(type), dirName) urlChecksumFile = settings.getRemoteSrc() + '/' + os.path.basename(dirName) + '.checksum' print("Verification of the decompressed directory '%s' started." % dirName) try: for line in urllib2.urlopen(urlChecksumFile): f, checksum = line.split('\t') f = os.path.join(dirName, f) # if checksum.strip() != hashlib.md5(open(f).read()).hexdigest(): if checksum.strip() != getChecksumForFile(f): raise Exception("File '%s' is corrupted, it has a wrong checksum." % f) except Exception as e: print("Unable to verify directory: %s" % dirName) raise e print("Checksum verification completed successfully!")
Example #17
Source File: utils.py From script.module.inputstreamhelper with MIT License | 6 votes |
def _http_request(url, headers=None, time_out=10): """Perform an HTTP request and return request""" log(0, 'Request URL: {url}', url=url) try: if headers: request = Request(url, headers=headers) else: request = Request(url) req = urlopen(request, timeout=time_out) log(0, 'Response code: {code}', code=req.getcode()) if 400 <= req.getcode() < 600: raise HTTPError('HTTP %s Error for url: %s' % (req.getcode(), url), response=req) except (HTTPError, URLError) as err: log(2, 'Download failed with error {}'.format(err)) if yesno_dialog(localize(30004), '{line1}\n{line2}'.format(line1=localize(30063), line2=localize(30065))): # Internet down, try again? return _http_request(url, headers, time_out) return None return req
Example #18
Source File: fetch-dump1090-max-range.py From dump1090-tools with ISC License | 6 votes |
def get_max_range(baseurl): with closing(urlopen(baseurl + '/data/receiver.json', None, 5.0)) as f: receiver = json.load(f) if not (receiver.has_key('lat') and receiver.has_key('lon')): return None rlat = receiver['lat'] rlon = receiver['lon'] maxrange = None with closing(urlopen(baseurl + '/data/aircraft.json', None, 5.0)) as f: aircraft = json.load(f) for ac in aircraft['aircraft']: if ac.has_key('seen_pos') and ac['seen_pos'] < 300: alat = ac['lat'] alon = ac['lon'] distance = greatcircle(rlat, rlon, alat, alon) if maxrange is None or distance > maxrange: maxrange = distance return maxrange
Example #19
Source File: OTPpnAmgc.py From OpenTrader with GNU Lesser General Public License v3.0 | 6 votes |
def lPullYahooToTxtfile(sSymbol): ''' Use this to dynamically pull a sSymbol: ''' try: print 'Currently Pulling', sSymbol print str(datetime.datetime.fromtimestamp(int(time.time())).strftime('%Y-%m-%d %H:%M:%S')) #Keep in mind this is close high low open, lol. urlToVisit = 'http://chartapi.finance.yahoo.com/instrument/1.0/'+sSymbol+'/chartdata;type=quote;range=10y/csv' lStockLines = [] try: sourceCode = urllib2.urlopen(urlToVisit).read() splitSource = sourceCode.split('\n') for eachLine in splitSource: splitLine = eachLine.split(',') if len(splitLine) == 6: if 'values' not in eachLine: lStockLines.append(eachLine) return lStockLines except Exception as e: print str(e), 'failed to organize pulled data.' except StandardError, e: print str(e), 'failed to pull pricing data'
Example #20
Source File: get_s3_stats.py From hsds with Apache License 2.0 | 6 votes |
def get_remote_info_json(jfname): try: logging.info('loading example '+jfname) rfo = urllib.urlopen(jfname) di = json.loads(rfo.read()) nat, glbs = 0, 0 for k,v in di.items(): if k != 'dimensions' or k != 'variables': glbs +=1 for k,v in di['variables'].items(): for a in v: nat += 1 dims = [ l for k, v in di['dimensions'].items() for d, l in v.items() if d == 'length' ] return { 'num global attr' : glbs, 'num vars' : len(di['variables'].keys()), 'num dims' : \ len(di['dimensions'].keys()), 'ave attrs per var' : nat / len(di['variables'].keys()), \ 'dims sizes' : dims } except Exception, e: logging.warn("WARN get_remote_info_json on %s : %s, update S3 bucket" % (jfname, str(e))) return {} #---------------------------------------------------------------------------------
Example #21
Source File: get_s3_stats.py From hsds with Apache License 2.0 | 5 votes |
def get_json(invlink): logging.info("getting inventory from %s" % (invlink)) jdata = json.loads( urllib2.urlopen(invlink).read() ) return jdata #---------------------------------------------------------------------------------
Example #22
Source File: patch.py From toonapilib with MIT License | 5 votes |
def fromurl(url): """ Parse patch from an URL, return False if an error occured. Note that this also can throw urlopen() exceptions. """ ps = PatchSet( urllib_request.urlopen(url) ) if ps.errors == 0: return ps return False # --- Utility functions --- # [ ] reuse more universal pathsplit()
Example #23
Source File: utils.py From plugin.video.kmediatorrent with GNU General Public License v3.0 | 5 votes |
def url_get(url, params={}, headers={}, with_immunicity=True): import urllib2 from contextlib import closing from kmediatorrent import plugin from kmediatorrent.common import USER_AGENT if params: import urllib url = "%s?%s" % (url, urllib.urlencode(params)) req = urllib2.Request(url) req.add_header("User-Agent", USER_AGENT) for k, v in headers.items(): req.add_header(k, v) if with_immunicity and plugin.get_setting("immunicity", bool): from kmediatorrent import immunicity proxy = immunicity.get_proxy_for(url) if proxy: from urlparse import urlsplit parts = urlsplit(url) req.set_proxy(proxy, parts[0]) try: with closing(urllib2.urlopen(req)) as response: data = response.read() if response.headers.get("Content-Encoding", "") == "gzip": import zlib return zlib.decompressobj(16 + zlib.MAX_WBITS).decompress(data) return data except urllib2.HTTPError: return None
Example #24
Source File: immunicity.py From plugin.video.kmediatorrent with GNU General Public License v3.0 | 5 votes |
def config(): global _config if not _config: with shelf("kmediatorrent.immunicity.pac_config", ttl=CACHE) as pac_config: plugin.log.info("Fetching Immunicity PAC file") pac_data = urllib2.urlopen(PAC_URL).read() pac_config["server"] = re.search(r"var proxyserver = '(.*)'", pac_data).group(1) pac_config["domains"] = map(lambda x: x.replace(r"\Z(?ms)", ""), map(fnmatch.translate, re.findall(r"\"(.*?)\",", pac_data))) _config = pac_config return _config
Example #25
Source File: utils.py From DETAD with MIT License | 5 votes |
def get_blocked_videos(api=API): api_url = '{}?action=get_blocked'.format(api) req = urllib2.Request(api_url) response = urllib2.urlopen(req) return json.loads(response.read())
Example #26
Source File: call_service_control.py From endpoints-tools with Apache License 2.0 | 5 votes |
def call_report(access_token, operation_id, args): headers = {"Authorization": "Bearer {}".format(access_token), "X-Cloud-Trace-Context": "{};o=1".format(operation_id), "Content-Type": "application/json"} url = "https://servicecontrol.googleapis.com/v1/services/{}:report".format(args.service_name) data_obj = {"service_name": args.service_name, "operations": [{ "operation_id": operation_id, "operation_name": "/echo", "consumer_id": "api_key:{}".format(args.api_key), "start_time": { "seconds": int(time.time()) }, "end_time": { "seconds": int(time.time()) } }] } data = json.dumps(data_obj) t0 = time.time() try: request = urllib2.Request(url, data, headers) response = urllib2.urlopen(request) trace_id = response.info().getheader("X-GOOG-TRACE-ID") # print "response: {}".format(response.info()) except urllib2.HTTPError as e: print "{} Check failed code: {}, error {}".format(time.ctime(), e.code, e.reason) return latency = time.time() - t0 if trace_id and (latency >= 15.0): print "{}: report big latency {}, trace_id: {} operation_id: {}".format(time.ctime(), latency, trace_id, operation_id)
Example #27
Source File: ez_setup.py From iAI with MIT License | 5 votes |
def download_file_insecure(url, target): """Use Python to download the file, without connection authentication.""" src = urlopen(url) try: # Read all the data in one block. data = src.read() finally: src.close() # Write all the data in one block to avoid creating a partial file. with open(target, "wb") as dst: dst.write(data)
Example #28
Source File: serializekiller.py From serializekiller with The Unlicense | 5 votes |
def jboss(url, port, retry=False): try: ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE output = urllib2.urlopen( 'https://' + url + ':' + port + "/invoker/JMXInvokerServlet", context=ctx, timeout=8).read() except: try: output = urllib2.urlopen( 'http://' + url + ':' + port + "/invoker/JMXInvokerServlet", timeout=8).read() except: # OK. I give up. return False if "\xac\xed\x00\x05" in output: mutex.acquire() print " - (possibly) Vulnerable JBOSS: " + url + " (" + port + ")" saveToFile('[+] JBoss: ' + ':' + port + '\n') mutex.release() return True return False
Example #29
Source File: call_service_control.py From endpoints-tools with Apache License 2.0 | 5 votes |
def call_check(access_token, operation_id, args): headers = {"Authorization": "Bearer {}".format(access_token), "X-Cloud-Trace-Context": "{};o=1".format(operation_id), "Content-Type": "application/json"} url = "https://servicecontrol.googleapis.com/v1/services/{}:check".format(args.service_name) data_obj = {"service_name": args.service_name, "operation": { "operation_id": operation_id, "operation_name": "/echo", "consumer_id": "api_key:{}".format(args.api_key), "start_time": { "seconds": int(time.time()) } } } data = json.dumps(data_obj) t0 = time.time() try: request = urllib2.Request(url, data, headers) response = urllib2.urlopen(request) trace_id = response.info().getheader("X-GOOG-TRACE-ID") # print "response: {}".format(response.info()) except urllib2.HTTPError as e: print "{} Check failed code: {}, error {}".format(time.ctime(), e.code, e.reason) return latency = time.time() - t0 if trace_id and (latency >= 5.0): print "{}: check big latency {}, trace_id: {} operation_id: {}".format(time.ctime(), latency, trace_id, operation_id)
Example #30
Source File: push.py From RF-Monitor with GNU General Public License v2.0 | 5 votes |
def __send(self, uri, data): req = urllib2.Request(uri) req.add_header('Content-Type', 'application/json') event = None try: urllib2.urlopen(req, data) except ValueError as error: event = Event(Events.PUSH_ERROR, msg=error.message) except URLError as error: event = Event(Events.PUSH_ERROR, msg=error.reason.strerror) if event is not None: self._failed.append(data) post_event(self._handler, event)