Python urllib.urlcleanup() Examples

The following are 15 code examples of urllib.urlcleanup(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module urllib , or try the search function .
Example #1
Source File: data.py    From mlb-led-scoreboard with GNU General Public License v3.0 6 votes vote down vote up
def refresh_overview(self):
    urllib.urlcleanup()
    attempts_remaining = 5
    while attempts_remaining > 0:
      try:
        self.overview = mlbgame.overview(self.current_game().game_id)
        self.__update_layout_state()
        self.needs_refresh = False
        self.print_overview_debug()
        self.network_issues = False
        break
      except URLError, e:
        self.network_issues = True
        debug.error("Networking Error while refreshing the current overview. {} retries remaining.".format(attempts_remaining))
        debug.error("URLError: {}".format(e.reason))
        attempts_remaining -= 1
        time.sleep(NETWORK_RETRY_SLEEP_TIME)
      except ValueError:
        self.network_issues = True
        debug.error("Value Error while refreshing current overview. {} retries remaining.".format(attempts_remaining))
        debug.error("ValueError: Failed to refresh overview for {}".format(self.current_game().game_id))
        attempts_remaining -= 1
        time.sleep(NETWORK_RETRY_SLEEP_TIME)

    # If we run out of retries, just move on to the next game 
Example #2
Source File: pyinstastories.py    From PyInstaStories with MIT License 5 votes vote down vote up
def download_file(url, path, attempt=0):
	try:
		urllib.urlretrieve(url, path)
		urllib.urlcleanup()
	except Exception as e:
		if not attempt == 3:
			attempt += 1
			print("[E] ({:d}) Download failed: {:s}.".format(attempt, str(e)))
			print("[W] Trying again in 5 seconds.")
			time.sleep(5)
			download_file(url, path, attempt)
		else: 
			print("[E] Retry failed three times, skipping file.")
			print('-' * 70) 
Example #3
Source File: regrtest.py    From medicare-demo with Apache License 2.0 5 votes vote down vote up
def dash_R_cleanup(fs, ps, pic):
    import gc, copy_reg
    import _strptime, linecache, dircache
    import urlparse, urllib, urllib2, mimetypes, doctest
    import struct, filecmp
    from distutils.dir_util import _path_created

    # Restore some original values.
    warnings.filters[:] = fs
    copy_reg.dispatch_table.clear()
    copy_reg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)

    # Clear assorted module caches.
    _path_created.clear()
    re.purge()
    _strptime._regex_cache.clear()
    urlparse.clear_cache()
    urllib.urlcleanup()
    urllib2.install_opener(None)
    dircache.reset()
    linecache.clearcache()
    mimetypes._default_mime_types()
    struct._cache.clear()
    filecmp._cache.clear()
    doctest.master = None

    # Collect cyclic trash.
    gc.collect() 
Example #4
Source File: urllibdownload.py    From python-scripts with GNU General Public License v3.0 5 votes vote down vote up
def download_file(url, destfile):
    """
     download_file: function for download from url to save as destfile
        @url the source file to download.
        @destfile the destination save file for local.
    """
    file_url = url

    try:
        print("--> Downloading file: %s" % file_url)
        filename, msg = urllib.urlretrieve(
                #'http://code.jquery.com/jquery-2.1.1.js',
                file_url,
                reporthook = reporthook)

        print ""
        #print "File:", filename
        print "Header:"
        print msg
        if os.path.exists(filename):
            if os.path.exists(destfile):
                now = currenttime()
                tmpfile = "%s.%s" % (destfile, now)
                shutil.move(destfile, tmpfile)
            shutil.move(filename, destfile)

        #print 'File exists before cleanup:', os.path.exists(filename)
    finally:
        urllib.urlcleanup()
        #print 'File still exists:', os.path.exists(filename) 
Example #5
Source File: decorators.py    From RHEAS with MIT License 5 votes vote down vote up
def http(fetch):
    """Decorator for downloading files from HTTP sites."""
    @wraps(fetch)
    def wrapper(*args, **kwargs):
        url, bbox, dt = fetch(*args, **kwargs)
        outpath = tempfile.mkdtemp()
        filename = url.format(dt.year, dt.month, dt.day)
        try:
            lfilename = filename.split("/")[-1]
            urllib.urlcleanup()
            urllib.urlretrieve(filename, "{0}/{1}".format(outpath, lfilename))
        except:
            lfilename = None
        return outpath, lfilename, bbox, dt
    return wrapper 
Example #6
Source File: data.py    From mlb-led-scoreboard with GNU General Public License v3.0 5 votes vote down vote up
def refresh_games(self):
    debug.log("Updating games for {}/{}/{}".format(self.month, self.day, self.year))
    urllib.urlcleanup()
    attempts_remaining = 5
    while attempts_remaining > 0:
      try:
        current_day = self.day
        self.set_current_date()

        all_games = mlbgame.day(self.year, self.month, self.day)
        if self.config.rotation_only_preferred:
          self.games = self.__filter_list_of_games(all_games, self.config.preferred_teams)
        else:
          self.games = all_games

        if current_day != self.day:
          self.current_game_index = self.game_index_for_preferred_team()
        self.games_refresh_time = time.time()
        self.network_issues = False
        break
      except URLError, e:
        self.network_issues = True
        debug.error("Networking error while refreshing the master list of games. {} retries remaining.".format(attempts_remaining))
        debug.error("URLError: {}".format(e.reason))
        attempts_remaining -= 1
        time.sleep(NETWORK_RETRY_SLEEP_TIME)
      except ValueError:
        self.network_issues = True
        debug.error("Value Error while refreshing master list of games. {} retries remaining.".format(attempts_remaining))
        debug.error("ValueError: Failed to refresh list of games")
        attempts_remaining -= 1
        time.sleep(NETWORK_RETRY_SLEEP_TIME) 
Example #7
Source File: data.py    From mlb-led-scoreboard with GNU General Public License v3.0 5 votes vote down vote up
def fetch_preferred_team_overview(self):
    if not self.is_offday_for_preferred_team():
      urllib.urlcleanup()
      game = self.games[self.game_index_for_preferred_team()]
      game_overview = mlbgame.overview(game.game_id)
      debug.log("Preferred Team's Game Status: {}, {} {}".format(game_overview.status, game_overview.inning_state, game_overview.inning))
      return game_overview 
Example #8
Source File: regrtest.py    From BinderFilter with MIT License 4 votes vote down vote up
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copy_reg
    import _strptime, linecache
    dircache = test_support.import_module('dircache', deprecated=True)
    import urlparse, urllib, urllib2, mimetypes, doctest
    import struct, filecmp
    from distutils.dir_util import _path_created

    # Clear the warnings registry, so they can be displayed again
    for mod in sys.modules.values():
        if hasattr(mod, '__warningregistry__'):
            del mod.__warningregistry__

    # Restore some original values.
    warnings.filters[:] = fs
    copy_reg.dispatch_table.clear()
    copy_reg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    for abc, registry in abcs.items():
        abc._abc_registry = registry.copy()
        abc._abc_cache.clear()
        abc._abc_negative_cache.clear()

    # Clear assorted module caches.
    _path_created.clear()
    re.purge()
    _strptime._regex_cache.clear()
    urlparse.clear_cache()
    urllib.urlcleanup()
    urllib2.install_opener(None)
    dircache.reset()
    linecache.clearcache()
    mimetypes._default_mime_types()
    filecmp._cache.clear()
    struct._clearcache()
    doctest.master = None
    try:
        import ctypes
    except ImportError:
        # Don't worry about resetting the cache if ctypes is not supported
        pass
    else:
        ctypes._reset_cache()

    # Collect cyclic trash.
    gc.collect() 
Example #9
Source File: regrtest.py    From oss-ftp with MIT License 4 votes vote down vote up
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copy_reg
    import _strptime, linecache
    dircache = test_support.import_module('dircache', deprecated=True)
    import urlparse, urllib, urllib2, mimetypes, doctest
    import struct, filecmp
    from distutils.dir_util import _path_created

    # Clear the warnings registry, so they can be displayed again
    for mod in sys.modules.values():
        if hasattr(mod, '__warningregistry__'):
            del mod.__warningregistry__

    # Restore some original values.
    warnings.filters[:] = fs
    copy_reg.dispatch_table.clear()
    copy_reg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    for abc, registry in abcs.items():
        abc._abc_registry = registry.copy()
        abc._abc_cache.clear()
        abc._abc_negative_cache.clear()

    # Clear assorted module caches.
    _path_created.clear()
    re.purge()
    _strptime._regex_cache.clear()
    urlparse.clear_cache()
    urllib.urlcleanup()
    urllib2.install_opener(None)
    dircache.reset()
    linecache.clearcache()
    mimetypes._default_mime_types()
    filecmp._cache.clear()
    struct._clearcache()
    doctest.master = None
    try:
        import ctypes
    except ImportError:
        # Don't worry about resetting the cache if ctypes is not supported
        pass
    else:
        ctypes._reset_cache()

    # Collect cyclic trash.
    gc.collect() 
Example #10
Source File: scythe.py    From Penetration-Testing-Study-Notes with MIT License 4 votes vote down vote up
def get_request(test):
    # perform GET request

    urllib.urlcleanup() # clear cache

    try:
        user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
        req_headers = { 'User-Agent' : user_agent }
        for each in test['headers']:
            key, val = each.split(":", 1)
            key = key.lstrip()
            val = val.lstrip()
            req_headers[key] = val
        if test['requestCookie'] or test['requestCSRF']:
            # request cookie and csrf token if set in module XML
            cookie_val, csrf_val = request_value(test)
            if cookie_val:
                req_headers['cookie'] = cookie_val
            if csrf_val:
                # replace <CSRFTOKEN> with the collected token
                test['url'] = test['url'].replace("<CSRFTOKEN>", csrf_val)
                test['postParameters'] = test['postParameters'].replace("<CSRFTOKEN>", csrf_val)
                test['headers'] = [h.replace('<CSRFTOKEN>', csrf_val) for h in test['headers']]

        if opts.debug:
            # print debug output
            print textwrap.fill((" [ ] URL (GET): %s" % test['url']),
                initial_indent='', subsequent_indent=' -> ', width=80)
            print

        # assign NullHTTPErrorProcessor as default opener
        opener = urllib2.build_opener(NullHTTPErrorProcessor())
        urllib2.install_opener(opener)

        req = urllib2.Request(test['url'], headers=req_headers)
        f = urllib2.urlopen(req)
        r_body = f.read()
        r_info = f.info()
        f.close()

        # handle instances where the response body is 0 bytes in length
        if not r_body:
            print " [" + color['red'] + "!" + color['end'] + "] Zero byte response received from %s" \
                % test['name']
            r_body = "<Scythe Message: Empty response from server>"

        # returned updated test and response data
        return test, r_body, r_info, req

    except Exception:
        print textwrap.fill((" [" + color['red'] + "!" + color['end'] + "] Error contacting %s" \
            % test['url']), initial_indent='', subsequent_indent='\t', width=80)
        if opts.debug:
            for ex in traceback.format_exc().splitlines():
                print textwrap.fill((" %s" \
                    % str(ex)), initial_indent='', subsequent_indent='\t', width=80)
            print
        return test, False, False, req 
Example #11
Source File: scythe.py    From Offensive-Security-Certified-Professional with MIT License 4 votes vote down vote up
def get_request(test):
    # perform GET request

    urllib.urlcleanup() # clear cache

    try:
        user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
        req_headers = { 'User-Agent' : user_agent }
        for each in test['headers']:
            key, val = each.split(":", 1)
            key = key.lstrip()
            val = val.lstrip()
            req_headers[key] = val
        if test['requestCookie'] or test['requestCSRF']:
            # request cookie and csrf token if set in module XML
            cookie_val, csrf_val = request_value(test)
            if cookie_val:
                req_headers['cookie'] = cookie_val
            if csrf_val:
                # replace <CSRFTOKEN> with the collected token
                test['url'] = test['url'].replace("<CSRFTOKEN>", csrf_val)
                test['postParameters'] = test['postParameters'].replace("<CSRFTOKEN>", csrf_val)
                test['headers'] = [h.replace('<CSRFTOKEN>', csrf_val) for h in test['headers']]

        if opts.debug:
            # print debug output
            print textwrap.fill((" [ ] URL (GET): %s" % test['url']),
                initial_indent='', subsequent_indent=' -> ', width=80)
            print

        # assign NullHTTPErrorProcessor as default opener
        opener = urllib2.build_opener(NullHTTPErrorProcessor())
        urllib2.install_opener(opener)

        req = urllib2.Request(test['url'], headers=req_headers)
        f = urllib2.urlopen(req)
        r_body = f.read()
        r_info = f.info()
        f.close()

        # handle instances where the response body is 0 bytes in length
        if not r_body:
            print " [" + color['red'] + "!" + color['end'] + "] Zero byte response received from %s" \
                % test['name']
            r_body = "<Scythe Message: Empty response from server>"

        # returned updated test and response data
        return test, r_body, r_info, req

    except Exception:
        print textwrap.fill((" [" + color['red'] + "!" + color['end'] + "] Error contacting %s" \
            % test['url']), initial_indent='', subsequent_indent='\t', width=80)
        if opts.debug:
            for ex in traceback.format_exc().splitlines():
                print textwrap.fill((" %s" \
                    % str(ex)), initial_indent='', subsequent_indent='\t', width=80)
            print
        return test, False, False, req 
Example #12
Source File: scythe.py    From pentest-notes with MIT License 4 votes vote down vote up
def get_request(test):
    # perform GET request

    urllib.urlcleanup() # clear cache

    try:
        user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
        req_headers = { 'User-Agent' : user_agent }
        for each in test['headers']:
            key, val = each.split(":", 1)
            key = key.lstrip()
            val = val.lstrip()
            req_headers[key] = val
        if test['requestCookie'] or test['requestCSRF']:
            # request cookie and csrf token if set in module XML
            cookie_val, csrf_val = request_value(test)
            if cookie_val:
                req_headers['cookie'] = cookie_val
            if csrf_val:
                # replace <CSRFTOKEN> with the collected token
                test['url'] = test['url'].replace("<CSRFTOKEN>", csrf_val)
                test['postParameters'] = test['postParameters'].replace("<CSRFTOKEN>", csrf_val)
                test['headers'] = [h.replace('<CSRFTOKEN>', csrf_val) for h in test['headers']]

        if opts.debug:
            # print debug output
            print textwrap.fill((" [ ] URL (GET): %s" % test['url']),
                initial_indent='', subsequent_indent=' -> ', width=80)
            print

        # assign NullHTTPErrorProcessor as default opener
        opener = urllib2.build_opener(NullHTTPErrorProcessor())
        urllib2.install_opener(opener)

        req = urllib2.Request(test['url'], headers=req_headers)
        f = urllib2.urlopen(req)
        r_body = f.read()
        r_info = f.info()
        f.close()

        # handle instances where the response body is 0 bytes in length
        if not r_body:
            print " [" + color['red'] + "!" + color['end'] + "] Zero byte response received from %s" \
                % test['name']
            r_body = "<Scythe Message: Empty response from server>"

        # returned updated test and response data
        return test, r_body, r_info, req

    except Exception:
        print textwrap.fill((" [" + color['red'] + "!" + color['end'] + "] Error contacting %s" \
            % test['url']), initial_indent='', subsequent_indent='\t', width=80)
        if opts.debug:
            for ex in traceback.format_exc().splitlines():
                print textwrap.fill((" %s" \
                    % str(ex)), initial_indent='', subsequent_indent='\t', width=80)
            print
        return test, False, False, req 
Example #13
Source File: regrtest.py    From gcblue with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
    import gc, copy_reg
    import _strptime, linecache
    dircache = test_support.import_module('dircache', deprecated=True)
    import urlparse, urllib, urllib2, mimetypes, doctest
    import struct, filecmp
    from distutils.dir_util import _path_created

    # Clear the warnings registry, so they can be displayed again
    for mod in sys.modules.values():
        if hasattr(mod, '__warningregistry__'):
            del mod.__warningregistry__

    # Restore some original values.
    warnings.filters[:] = fs
    copy_reg.dispatch_table.clear()
    copy_reg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)
    try:
        import zipimport
    except ImportError:
        pass # Run unmodified on platforms without zipimport support
    else:
        zipimport._zip_directory_cache.clear()
        zipimport._zip_directory_cache.update(zdc)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    for abc, registry in abcs.items():
        abc._abc_registry = registry.copy()
        abc._abc_cache.clear()
        abc._abc_negative_cache.clear()

    # Clear assorted module caches.
    _path_created.clear()
    re.purge()
    _strptime._regex_cache.clear()
    urlparse.clear_cache()
    urllib.urlcleanup()
    urllib2.install_opener(None)
    dircache.reset()
    linecache.clearcache()
    mimetypes._default_mime_types()
    filecmp._cache.clear()
    struct._clearcache()
    doctest.master = None
    try:
        import ctypes
    except ImportError:
        # Don't worry about resetting the cache if ctypes is not supported
        pass
    else:
        ctypes._reset_cache()

    # Collect cyclic trash.
    gc.collect() 
Example #14
Source File: regrtest.py    From CTFCrackTools-V2 with GNU General Public License v3.0 4 votes vote down vote up
def dash_R_cleanup(fs, ps, pic, abcs):
    import gc, copy_reg
    import _strptime, linecache
    dircache = test_support.import_module('dircache', deprecated=True)
    import urlparse, urllib, urllib2, mimetypes, doctest
    import struct, filecmp
    from distutils.dir_util import _path_created

    # Clear the warnings registry, so they can be displayed again
    for mod in sys.modules.values():
        if hasattr(mod, '__warningregistry__'):
            del mod.__warningregistry__

    # Restore some original values.
    warnings.filters[:] = fs
    copy_reg.dispatch_table.clear()
    copy_reg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    for abc, registry in abcs.items():
        abc._abc_registry = registry.copy()
        abc._abc_cache.clear()
        abc._abc_negative_cache.clear()

    # Clear assorted module caches.
    _path_created.clear()
    re.purge()
    _strptime._regex_cache.clear()
    urlparse.clear_cache()
    urllib.urlcleanup()
    urllib2.install_opener(None)
    dircache.reset()
    linecache.clearcache()
    mimetypes._default_mime_types()
    filecmp._cache.clear()
    struct._clearcache()
    doctest.master = None

    # Collect cyclic trash.
    gc.collect() 
Example #15
Source File: regrtest.py    From CTFCrackTools with GNU General Public License v3.0 4 votes vote down vote up
def dash_R_cleanup(fs, ps, pic, abcs):
    import gc, copy_reg
    import _strptime, linecache
    dircache = test_support.import_module('dircache', deprecated=True)
    import urlparse, urllib, urllib2, mimetypes, doctest
    import struct, filecmp
    from distutils.dir_util import _path_created

    # Clear the warnings registry, so they can be displayed again
    for mod in sys.modules.values():
        if hasattr(mod, '__warningregistry__'):
            del mod.__warningregistry__

    # Restore some original values.
    warnings.filters[:] = fs
    copy_reg.dispatch_table.clear()
    copy_reg.dispatch_table.update(ps)
    sys.path_importer_cache.clear()
    sys.path_importer_cache.update(pic)

    # clear type cache
    sys._clear_type_cache()

    # Clear ABC registries, restoring previously saved ABC registries.
    for abc, registry in abcs.items():
        abc._abc_registry = registry.copy()
        abc._abc_cache.clear()
        abc._abc_negative_cache.clear()

    # Clear assorted module caches.
    _path_created.clear()
    re.purge()
    _strptime._regex_cache.clear()
    urlparse.clear_cache()
    urllib.urlcleanup()
    urllib2.install_opener(None)
    dircache.reset()
    linecache.clearcache()
    mimetypes._default_mime_types()
    filecmp._cache.clear()
    struct._clearcache()
    doctest.master = None

    # Collect cyclic trash.
    gc.collect()