Python twisted.web.client.downloadPage() Examples
The following are 30
code examples of twisted.web.client.downloadPage().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
twisted.web.client
, or try the search function
.
Example #1
Source File: test_webclient.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_downloadPageBrokenDownload(self): """ If the connection is closed before the number of bytes indicated by I{Content-Length} have been received, the L{Deferred} returned by L{downloadPage} fails with L{PartialDownloadError}. """ # test what happens when download gets disconnected in the middle path = FilePath(self.mktemp()) d = client.downloadPage(self.getURL("broken"), path.path) d = self.assertFailure(d, client.PartialDownloadError) def checkResponse(response): """ The HTTP status code from the server is propagated through the C{PartialDownloadError}. """ self.assertEqual(response.status, b"200") self.assertEqual(response.message, b"OK") return response d.addCallback(checkResponse) def cbFailed(ignored): self.assertEqual(path.getContent(), b"abc") d.addCallback(cbFailed) return d
Example #2
Source File: test_webclient.py From python-for-android with Apache License 2.0 | 6 votes |
def test_downloadTimeout(self): """ If the timeout indicated by the C{timeout} parameter to L{client.HTTPDownloader.__init__} elapses without the complete response being received, the L{defer.Deferred} returned by L{client.downloadPage} fires with a L{Failure} wrapping a L{defer.TimeoutError}. """ self.cleanupServerConnections = 2 # Verify the behavior if no bytes are ever written. first = client.downloadPage( self.getURL("wait"), self.mktemp(), timeout=0.01) # Verify the behavior if some bytes are written but then the request # never completes. second = client.downloadPage( self.getURL("write-then-wait"), self.mktemp(), timeout=0.01) return defer.gatherResults([ self.assertFailure(first, defer.TimeoutError), self.assertFailure(second, defer.TimeoutError)])
Example #3
Source File: test_webclient.py From python-for-android with Apache License 2.0 | 6 votes |
def test_downloadPageLogsFileCloseError(self): """ If there is an exception closing the file being written to after the connection is prematurely closed, that exception is logged. """ class BrokenFile: def write(self, bytes): pass def close(self): raise IOError(ENOSPC, "No file left on device") d = client.downloadPage(self.getURL("broken"), BrokenFile()) d = self.assertFailure(d, client.PartialDownloadError) def cbFailed(ignored): self.assertEquals(len(self.flushLoggedErrors(IOError)), 1) d.addCallback(cbFailed) return d
Example #4
Source File: test_webclient.py From python-for-android with Apache License 2.0 | 6 votes |
def test_downloadPageBrokenDownload(self): """ If the connection is closed before the number of bytes indicated by I{Content-Length} have been received, the L{Deferred} returned by L{downloadPage} fails with L{PartialDownloadError}. """ # test what happens when download gets disconnected in the middle path = FilePath(self.mktemp()) d = client.downloadPage(self.getURL("broken"), path.path) d = self.assertFailure(d, client.PartialDownloadError) def checkResponse(response): """ The HTTP status code from the server is propagated through the C{PartialDownloadError}. """ self.assertEquals(response.status, "200") self.assertEquals(response.message, "OK") return response d.addCallback(checkResponse) def cbFailed(ignored): self.assertEquals(path.getContent(), "abc") d.addCallback(cbFailed) return d
Example #5
Source File: test_webclient.py From learn_python3_spider with MIT License | 6 votes |
def test_downloadPageDeprecated(self): """ L{client.downloadPage} is deprecated. """ port = reactor.listenTCP( 0, server.Site(Data(b'', 'text/plain')), interface="127.0.0.1") portno = port.getHost().port self.addCleanup(port.stopListening) url = networkString("http://127.0.0.1:%d" % (portno,)) path = FilePath(self.mktemp()) d = client.downloadPage(url, path.path) warningInfo = self.flushWarnings([self.test_downloadPageDeprecated]) self.assertEqual(len(warningInfo), 1) self.assertEqual(warningInfo[0]['category'], DeprecationWarning) self.assertEqual( warningInfo[0]['message'], "twisted.web.client.downloadPage was deprecated in " "Twisted 16.7.0; please use https://pypi.org/project/treq/ or twisted.web.client.Agent instead") return d.addErrback(lambda _: None)
Example #6
Source File: test_webclient.py From learn_python3_spider with MIT License | 6 votes |
def test_downloadTimeoutsWorkWithoutReading(self): """ If the timeout indicated by the C{timeout} parameter to L{client.HTTPDownloader.__init__} elapses without the complete response being received, the L{defer.Deferred} returned by L{client.downloadPage} fires with a L{Failure} wrapping a L{defer.TimeoutError}, even if the remote peer isn't reading data from the socket. """ self.cleanupServerConnections = 1 # The timeout here needs to be slightly longer to give the resource a # change to stop the reading. d = client.downloadPage( self.getURL("never-read"), self.mktemp(), timeout=0.05) return self.assertFailure(d, defer.TimeoutError)
Example #7
Source File: test_webclient.py From learn_python3_spider with MIT License | 6 votes |
def test_downloadTimeout(self): """ If the timeout indicated by the C{timeout} parameter to L{client.HTTPDownloader.__init__} elapses without the complete response being received, the L{defer.Deferred} returned by L{client.downloadPage} fires with a L{Failure} wrapping a L{defer.TimeoutError}. """ self.cleanupServerConnections = 2 # Verify the behavior if no bytes are ever written. first = client.downloadPage( self.getURL("wait"), self.mktemp(), timeout=0.01) # Verify the behavior if some bytes are written but then the request # never completes. second = client.downloadPage( self.getURL("write-then-wait"), self.mktemp(), timeout=0.01) return defer.gatherResults([ self.assertFailure(first, defer.TimeoutError), self.assertFailure(second, defer.TimeoutError)])
Example #8
Source File: test_webclient.py From learn_python3_spider with MIT License | 6 votes |
def test_downloadAfterFoundGet(self): """ Passing C{True} for C{afterFoundGet} to L{client.downloadPage} invokes the same kind of redirect handling as passing that argument to L{client.getPage} invokes. """ url = self.getURL('extendedRedirect?code=302') def gotPage(page): self.assertEqual( self.extendedRedirect.lastMethod, b"GET", "With afterFoundGet, the HTTP method must change to GET") d = client.downloadPage(url, "downloadTemp", followRedirect=True, afterFoundGet=True, method=b"POST") d.addCallback(gotPage) return d
Example #9
Source File: test_webclient.py From learn_python3_spider with MIT License | 6 votes |
def test_downloadPageBrokenDownload(self): """ If the connection is closed before the number of bytes indicated by I{Content-Length} have been received, the L{Deferred} returned by L{downloadPage} fails with L{PartialDownloadError}. """ # test what happens when download gets disconnected in the middle path = FilePath(self.mktemp()) d = client.downloadPage(self.getURL("broken"), path.path) d = self.assertFailure(d, client.PartialDownloadError) def checkResponse(response): """ The HTTP status code from the server is propagated through the C{PartialDownloadError}. """ self.assertEqual(response.status, b"200") self.assertEqual(response.message, b"OK") return response d.addCallback(checkResponse) def cbFailed(ignored): self.assertEqual(path.getContent(), b"abc") d.addCallback(cbFailed) return d
Example #10
Source File: io.py From riko with MIT License | 6 votes |
def async_url_open(url, timeout=0, **kwargs): if url.startswith('http'): page = NamedTemporaryFile(delete=False) new_url = page.name yield downloadPage(encode(url), page, timeout=timeout) else: page, new_url = None, url f = yield async_get_file(new_url, StringTransport(), **kwargs) if not hasattr(f, 'name') and url.startswith('file'): f.name = url.split('://')[1] if page: page.close() remove(page.name) return_value(f)
Example #11
Source File: test_webclient.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_downloadTimeoutsWorkWithoutReading(self): """ If the timeout indicated by the C{timeout} parameter to L{client.HTTPDownloader.__init__} elapses without the complete response being received, the L{defer.Deferred} returned by L{client.downloadPage} fires with a L{Failure} wrapping a L{defer.TimeoutError}, even if the remote peer isn't reading data from the socket. """ self.cleanupServerConnections = 1 # The timeout here needs to be slightly longer to give the resource a # change to stop the reading. d = client.downloadPage( self.getURL("never-read"), self.mktemp(), timeout=0.05) return self.assertFailure(d, defer.TimeoutError)
Example #12
Source File: test_webclient.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_downloadTimeout(self): """ If the timeout indicated by the C{timeout} parameter to L{client.HTTPDownloader.__init__} elapses without the complete response being received, the L{defer.Deferred} returned by L{client.downloadPage} fires with a L{Failure} wrapping a L{defer.TimeoutError}. """ self.cleanupServerConnections = 2 # Verify the behavior if no bytes are ever written. first = client.downloadPage( self.getURL("wait"), self.mktemp(), timeout=0.01) # Verify the behavior if some bytes are written but then the request # never completes. second = client.downloadPage( self.getURL("write-then-wait"), self.mktemp(), timeout=0.01) return defer.gatherResults([ self.assertFailure(first, defer.TimeoutError), self.assertFailure(second, defer.TimeoutError)])
Example #13
Source File: test_webclient.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_downloadPageDeprecated(self): """ L{client.downloadPage} is deprecated. """ port = reactor.listenTCP( 0, server.Site(Data(b'', 'text/plain')), interface="127.0.0.1") portno = port.getHost().port self.addCleanup(port.stopListening) url = networkString("http://127.0.0.1:%d" % (portno,)) path = FilePath(self.mktemp()) d = client.downloadPage(url, path.path) warningInfo = self.flushWarnings([self.test_downloadPageDeprecated]) self.assertEqual(len(warningInfo), 1) self.assertEqual(warningInfo[0]['category'], DeprecationWarning) self.assertEqual( warningInfo[0]['message'], "twisted.web.client.downloadPage was deprecated in " "Twisted 16.7.0; please use https://pypi.org/project/treq/ or twisted.web.client.Agent instead") return d.addErrback(lambda _: None)
Example #14
Source File: test_webclient.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_downloadAfterFoundGet(self): """ Passing C{True} for C{afterFoundGet} to L{client.downloadPage} invokes the same kind of redirect handling as passing that argument to L{client.getPage} invokes. """ url = self.getURL('extendedRedirect?code=302') def gotPage(page): self.assertEqual( self.extendedRedirect.lastMethod, b"GET", "With afterFoundGet, the HTTP method must change to GET") d = client.downloadPage(url, "downloadTemp", followRedirect=True, afterFoundGet=True, method=b"POST") d.addCallback(gotPage) return d
Example #15
Source File: config.py From bitmask-dev with GNU General Public License v3.0 | 6 votes |
def maybe_download_ca_cert(self, ignored, replace=False): """ :rtype: deferred """ # TODO: doesn't update the cert :(((( enc_domain = self._domain.encode(sys.getfilesystemencoding()) path = os.path.join(self._basedir, 'providers', enc_domain, 'keys', 'ca', 'cacert.pem') if not replace and is_file(path): return defer.succeed('ca_cert_path_already_exists') def errback(failure): raise NetworkError(failure.getErrorMessage()) uri = self._get_ca_cert_uri() mkdir_p(os.path.split(path)[0]) # We don't validate the TLS cert for this connection, # just check the fingerprint of the ca.cert d = downloadPage(uri, path) d.addCallback(self._reload_http_client) d.addErrback(errback) return d
Example #16
Source File: test_webclient.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 6 votes |
def test_downloadPageLogsFileCloseError(self): """ If there is an exception closing the file being written to after the connection is prematurely closed, that exception is logged. """ class BrokenFile: def write(self, bytes): pass def close(self): raise IOError(ENOSPC, "No file left on device") d = client.downloadPage(self.getURL("broken"), BrokenFile()) d = self.assertFailure(d, client.PartialDownloadError) def cbFailed(ignored): self.assertEqual(len(self.flushLoggedErrors(IOError)), 1) d.addCallback(cbFailed) return d
Example #17
Source File: test_webclient.py From BitTorrent with GNU General Public License v3.0 | 5 votes |
def testDownloadServerError(self): return self._downloadTest(lambda url: client.downloadPage(self.getURL(url), url.split('?')[0]))
Example #18
Source File: test_webclient.py From BitTorrent with GNU General Public License v3.0 | 5 votes |
def testDownloadPageError3(self): # make sure failures in open() are caught too. This is tricky. # Might only work on posix. tmpfile = open("unwritable", "wb") tmpfile.close() os.chmod("unwritable", 0) # make it unwritable (to us) d = self.assertFailure( client.downloadPage(self.getURL("file"), "unwritable"), IOError) d.addBoth(self._cleanupDownloadPageError3) return d
Example #19
Source File: test_webclient.py From BitTorrent with GNU General Public License v3.0 | 5 votes |
def testDownloadPageError2(self): class errorfile: def write(self, data): pass def close(self): raise IOError, "badness happened during close" ef = errorfile() return self.assertFailure( client.downloadPage(self.getURL("file"), ef), IOError)
Example #20
Source File: test_webclient.py From BitTorrent with GNU General Public License v3.0 | 5 votes |
def testDownloadPageError1(self): class errorfile: def write(self, data): raise IOError, "badness happened during write" def close(self): pass ef = errorfile() return self.assertFailure( client.downloadPage(self.getURL("file"), ef), IOError)
Example #21
Source File: test_webclient.py From BitTorrent with GNU General Public License v3.0 | 5 votes |
def testDownloadPage(self): downloads = [] downloadData = [("file", self.mktemp(), "0123456789"), ("nolength", self.mktemp(), "nolength")] for (url, name, data) in downloadData: d = client.downloadPage(self.getURL(url), name) d.addCallback(self._cbDownloadPageTest, data, name) downloads.append(d) return defer.gatherResults(downloads)
Example #22
Source File: test_webclient.py From python-for-android with Apache License 2.0 | 5 votes |
def _cbRunPartial(self, ignored, name, partial): return client.downloadPage(self.getURL("file"), name, supportPartial=partial)
Example #23
Source File: test_webclient.py From python-for-android with Apache License 2.0 | 5 votes |
def testDownloadServerError(self): return self._downloadTest(lambda url: client.downloadPage(self.getURL(url), url.split('?')[0]))
Example #24
Source File: test_webclient.py From python-for-android with Apache License 2.0 | 5 votes |
def testDownloadPageError2(self): class errorfile: def write(self, data): pass def close(self): raise IOError, "badness happened during close" ef = errorfile() return self.assertFailure( client.downloadPage(self.getURL("file"), ef), IOError)
Example #25
Source File: test_webclient.py From python-for-android with Apache License 2.0 | 5 votes |
def testDownloadPageError1(self): class errorfile: def write(self, data): raise IOError, "badness happened during write" def close(self): pass ef = errorfile() return self.assertFailure( client.downloadPage(self.getURL("file"), ef), IOError)
Example #26
Source File: test_webclient.py From python-for-android with Apache License 2.0 | 5 votes |
def testDownloadPage(self): downloads = [] downloadData = [("file", self.mktemp(), "0123456789"), ("nolength", self.mktemp(), "nolength")] for (url, name, data) in downloadData: d = client.downloadPage(self.getURL(url), name) d.addCallback(self._cbDownloadPageTest, data, name) downloads.append(d) return defer.gatherResults(downloads)
Example #27
Source File: test_webclient.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 5 votes |
def testDownloadPage(self): downloads = [] downloadData = [("file", self.mktemp(), b"0123456789"), ("nolength", self.mktemp(), b"nolength")] for (url, name, data) in downloadData: d = client.downloadPage(self.getURL(url), name) d.addCallback(self._cbDownloadPageTest, data, name) downloads.append(d) return defer.gatherResults(downloads)
Example #28
Source File: t9webchat_download.py From python_web_Crawler_DA_ML_DL with BSD 2-Clause "Simplified" License | 5 votes |
def downloadToTempFile(url): ''' 传递一个URL,并返回一个Deferred对象用于下载完成时的回调 ''' filenum, tempfilename = tempfile.mkstemp() # 返回示例(6, '/var/folders/6y/kjgmpy6n1kl93r4tykvrcj1h0000gn/T/tmpkb_srsl5') # print(tempfilename) os.close(filenum) return client.downloadPage(url, tempfilename).addCallback(returnFilename, tempfilename) # 这一行实现了返回Deferred对象,并且将returnFilename作为其回调,临时文件名为附加参数。 # 这就是说,当downloadToTempFile返回时,reactor将会调用returnFileName作为downloadTempFile的首个参数 # 文件名作为第二个参数
Example #29
Source File: test_webclient.py From learn_python3_spider with MIT License | 5 votes |
def attemptRequestWithMaliciousURI(self, uri): """ Attempt a request with the provided URI. @param uri: see L{URIInjectionTestsMixin} """ client.downloadPage(uri, file=io.BytesIO())
Example #30
Source File: test_webclient.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 5 votes |
def testDownloadPageError1(self): class errorfile: def write(self, data): raise IOError("badness happened during write") def close(self): pass ef = errorfile() return self.assertFailure( client.downloadPage(self.getURL("file"), ef), IOError)