Python requests.exceptions.RequestException() Examples
The following are 30
code examples of requests.exceptions.RequestException().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
requests.exceptions
, or try the search function
.
Example #1
Source File: GooglePhotosDownload.py From gphotos-sync with MIT License | 8 votes |
def find_bad_items(self, batch: Mapping[str, DatabaseMedia]): """ a batch get failed. Now do all of its contents as individual gets so we can work out which ID(s) cause the failure """ for item_id, media_item in batch.items(): try: log.debug("BAD ID Retry on %s (%s)", item_id, media_item.relative_path) response = self._api.mediaItems.get.execute(mediaItemId=item_id) media_item_json = response.json() self.download_file(media_item, media_item_json) except RequestException as e: self.bad_ids.add_id( str(media_item.relative_path), media_item.id, media_item.url, e ) self.files_download_failed += 1 log.error( "FAILURE %d in get of %s BAD ID", self.files_download_failed, media_item.relative_path, )
Example #2
Source File: bulk.py From tap-salesforce with GNU Affero General Public License v3.0 | 7 votes |
def job_exists(self, job_id): try: endpoint = "job/{}".format(job_id) url = self.bulk_url.format(self.sf.instance_url, endpoint) headers = self._get_bulk_headers() with metrics.http_request_timer("get_job"): self.sf._make_request('GET', url, headers=headers) return True # requests will raise for a 400 InvalidJob except RequestException as ex: if ex.response.headers["Content-Type"] == 'application/json': exception_code = ex.response.json()['exceptionCode'] if exception_code == 'InvalidJob': return False raise
Example #3
Source File: download.py From netease-dl with MIT License | 6 votes |
def login(method): """Require user to login.""" def wrapper(*args, **kwargs): crawler = args[0].crawler # args[0] is a NetEase object try: if os.path.isfile(cookie_path): with open(cookie_path, 'r') as cookie_file: cookie = cookie_file.read() expire_time = re.compile(r'\d{4}-\d{2}-\d{2}').findall(cookie) now = time.strftime('%Y-%m-%d', time.localtime(time.time())) if expire_time[0] > now: crawler.session.cookies.load() else: crawler.login() else: crawler.login() except RequestException: click.echo('Maybe password error, please try again.') sys.exit(1) result = method(*args, **kwargs) return result return wrapper
Example #4
Source File: connection.py From insightconnect-plugins with MIT License | 6 votes |
def test(self): # list UDSO's json_payload = '' api_path = "/WebApp/api/SuspiciousObjects/UserDefinedSO/" request_url = self.url + api_path self.create_jwt_token(api_path, 'GET', json_payload) response = None try: response = requests.get(request_url, headers=self.header_dict, data=json_payload, verify=False) response.raise_for_status() if response.status_code != 200: raise ConnectionTestException(f'{response.text} (HTTP status: {response.status_code})') return {"success": True} except RequestException as rex: if response: self.logger.error(f"Received status code: {response.status_code}") self.logger.error(f"Response was: {response.text}") raise ConnectionTestException(assistance="Please verify the connection details and input data.", cause=f"Error processing the Apex request: {rex}")
Example #5
Source File: download.py From netease-dl with MIT License | 6 votes |
def download_artist_by_id(self, artist_id, artist_name): """Download a artist's top50 songs by his/her id. :params artist_id: artist id. :params artist_name: artist name. """ try: # use old api songs = self.crawler.get_artists_hot_songs(artist_id) except RequestException as exception: click.echo(exception) else: folder = os.path.join(self.folder, artist_name) for song in songs: self.download_song_by_id(song.song_id, song.song_name, folder)
Example #6
Source File: nfsession_access.py From plugin.video.netflix with MIT License | 6 votes |
def prefetch_login(self): """Check if we have stored credentials. If so, do the login before the user requests it""" from requests import exceptions try: common.get_credentials() if not self.is_logged_in(): self._login() self.is_prefetch_login = True except exceptions.RequestException as exc: # It was not possible to connect to the web service, no connection, network problem, etc import traceback common.error('Login prefetch: request exception {}', exc) common.debug(g.py2_decode(traceback.format_exc(), 'latin-1')) except MissingCredentialsError: common.info('Login prefetch: No stored credentials are available') except (LoginFailedError, LoginValidateError): ui.show_notification(common.get_local_string(30009)) except (InvalidMembershipStatusError, InvalidMembershipStatusAnonymous): ui.show_notification(common.get_local_string(30180), time=10000)
Example #7
Source File: download_single_item.py From ISIC-Archive-Downloader with Apache License 2.0 | 6 votes |
def fetch_description(url: str) -> list: """ :param id: Id of the image whose description will be downloaded :return: Json """ # Sometimes their site isn't responding well, and than an error occurs, # So we will retry 10 seconds later and repeat until it succeeds while True: try: # Download the description response_desc = requests.get(url, stream=True, timeout=20) # Validate the download status is ok response_desc.raise_for_status() # Parse the description parsed_description = response_desc.json() return parsed_description except (RequestException, ReadTimeoutError): time.sleep(5)
Example #8
Source File: sandboxes.py From epicbox with MIT License | 6 votes |
def working_directory(): docker_client = utils.get_docker_client() volume_name = 'epicbox-' + str(uuid.uuid4()) log = logger.bind(volume=volume_name) log.info("Creating new docker volume for working directory") try: volume = docker_client.volumes.create(volume_name) except (RequestException, DockerException) as e: log.exception("Failed to create a docker volume") raise exceptions.DockerError(str(e)) log.info("New docker volume is created") try: yield _WorkingDirectory(volume=volume_name, node=None) finally: # Ensure that volume cleanup takes place log.info("Removing the docker volume") try: volume.remove() except NotFound: log.warning("Failed to remove the docker volume, it doesn't exist") except (RequestException, DockerException): log.exception("Failed to remove the docker volume") else: log.info("Docker volume removed")
Example #9
Source File: repo.py From Penny-Dreadful-Tools with GNU General Public License v3.0 | 6 votes |
def get_pull_requests(start_date: datetime.datetime, end_date: datetime.datetime, max_pull_requests: int = sys.maxsize, repo_name: str = 'PennyDreadfulMTG/Penny-Dreadful-Tools' ) -> List[PullRequest.PullRequest]: gh_user = configuration.get_optional_str('github_user') gh_pass = configuration.get_optional_str('github_password') if gh_user is None or gh_pass is None: return [] g = Github(gh_user, gh_pass) git_repo = g.get_repo(repo_name) pulls: List[PullRequest.PullRequest] = [] try: for pull in git_repo.get_pulls(state='closed', sort='updated', direction='desc'): if not pull.merged_at: continue merged_dt = dtutil.UTC_TZ.localize(pull.merged_at) updated_dt = dtutil.UTC_TZ.localize(pull.updated_at) if merged_dt > end_date: continue if updated_dt < start_date: return pulls pulls.append(pull) if len(pulls) >= max_pull_requests: return pulls except RequestException as e: print('Github pulls error (request)', e) except GithubException as e: print('Gihub pulls error (github)', e) return pulls
Example #10
Source File: bots.py From Dallinger with MIT License | 6 votes |
def complete_questionnaire(self): """Complete the standard debriefing form. Answers the questions in the base questionnaire. """ while True: data = { "question": "questionnaire", "number": 1, "response": json.dumps(self.question_responses), } url = "{host}/question/{self.participant_id}".format( host=self.host, self=self ) try: result = requests.post(url, data=data) result.raise_for_status() except RequestException: self.stochastic_sleep() continue return True
Example #11
Source File: utils.py From epicbox with MIT License | 6 votes |
def inspect_exited_container_state(container): try: container.reload() except (RequestException, DockerException) as e: logger.exception("Failed to load the container from the Docker engine", container=container) raise exceptions.DockerError(str(e)) started_at = dateutil.parser.parse(container.attrs['State']['StartedAt']) finished_at = dateutil.parser.parse(container.attrs['State']['FinishedAt']) duration = finished_at - started_at duration_seconds = duration.total_seconds() if duration_seconds < 0: duration_seconds = -1 return { 'exit_code': container.attrs['State']['ExitCode'], 'duration': duration_seconds, 'oom_killed': container.attrs['State'].get('OOMKilled', False), }
Example #12
Source File: bots.py From Dallinger with MIT License | 6 votes |
def complete_experiment(self, status): """Record worker completion status to the experiment server. This is done using a GET request to the /worker_complete or /worker_failed endpoints. """ self.log("Bot player completing experiment. Status: {}".format(status)) while True: url = "{host}/{status}?participant_id={participant_id}".format( host=self.host, participant_id=self.participant_id, status=status ) try: result = requests.get(url) result.raise_for_status() except RequestException: self.stochastic_sleep() continue return result
Example #13
Source File: test_oandapyv20.py From oanda-api-v20 with MIT License | 6 votes |
def test__requests_exception(self): """force a requests exception.""" from requests.exceptions import RequestException import oandapyV20.endpoints.accounts as accounts setattr(sys.modules["oandapyV20.oandapyV20"], "TRADING_ENVIRONMENTS", {"practice": { "stream": "ttps://test.com", "api": "ttps://test.com", }}) api = API(environment=environment, access_token=access_token, headers={"Content-Type": "application/json"}) text = "No connection " \ "adapters were found for 'ttps://test.com/v3/accounts'" r = accounts.AccountList() with self.assertRaises(RequestException) as oErr: api.request(r) self.assertEqual("{}".format(oErr.exception), text)
Example #14
Source File: test_event_dispatcher.py From python-sdk with Apache License 2.0 | 6 votes |
def test_dispatch_event__handle_request_exception(self): """ Test that dispatch event handles exceptions and logs error. """ url = 'https://www.optimizely.com' params = { 'accountId': '111001', 'eventName': 'test_event', 'eventEntityId': '111028', 'visitorId': 'oeutest_user', } event = event_builder.Event(url, params, http_verb='POST', headers={'Content-Type': 'application/json'}) with mock.patch( 'requests.post', side_effect=request_exception.RequestException('Failed Request'), ) as mock_request_post, mock.patch('logging.error') as mock_log_error: event_dispatcher.EventDispatcher.dispatch_event(event) mock_request_post.assert_called_once_with( url, data=json.dumps(params), headers={'Content-Type': 'application/json'}, timeout=event_dispatcher.REQUEST_TIMEOUT, ) mock_log_error.assert_called_once_with('Dispatch event failed. Error: Failed Request')
Example #15
Source File: __main__.py From nzb-subliminal with GNU General Public License v3.0 | 6 votes |
def submit_bug(filename, options): import requests # only import when needed from requests.exceptions import RequestException try: opts = dict((k, v) for k, v in options.__dict__.items() if v and k != 'submit_bug') r = requests.post('http://localhost:5000/bugs', {'filename': filename, 'version': __version__, 'options': str(opts)}) if r.status_code == 200: print('Successfully submitted file: %s' % r.text) else: print('Could not submit bug at the moment, please try again later.') except RequestException as e: print('Could not submit bug at the moment, please try again later.')
Example #16
Source File: weapi.py From netease-dl with MIT License | 6 votes |
def exception_handle(method): """Handle exception raised by requests library.""" def wrapper(*args, **kwargs): try: result = method(*args, **kwargs) return result except ProxyError: LOG.exception('ProxyError when try to get %s.', args) raise ProxyError('A proxy error occurred.') except ConnectionException: LOG.exception('ConnectionError when try to get %s.', args) raise ConnectionException('DNS failure, refused connection, etc.') except Timeout: LOG.exception('Timeout when try to get %s', args) raise Timeout('The request timed out.') except RequestException: LOG.exception('RequestException when try to get %s.', args) raise RequestException('Please check out your network.') return wrapper
Example #17
Source File: helpers.py From pytos with Apache License 2.0 | 6 votes |
def get_user_list(self): """ Get the list of currently configured SecureApp users. :return: The list of currently configured SecureApp users. :rtype:User_List :raise IOError: If there was a communication error. """ logger.info("Getting SecureApp users list.") try: response_string = self.get_uri("/securechangeworkflow/api/secureapp/repository/users", expected_status_codes=200).response.content except RequestException: message = "Failed to GET SecureApp users list" logger.critical(message) raise IOError(message) self._user_list = User_List.from_xml_string(response_string) return self._user_list
Example #18
Source File: __init__.py From tap-salesforce with GNU Affero General Public License v3.0 | 6 votes |
def _make_request(self, http_method, url, headers=None, body=None, stream=False, params=None): if http_method == "GET": LOGGER.info("Making %s request to %s with params: %s", http_method, url, params) resp = self.session.get(url, headers=headers, stream=stream, params=params) elif http_method == "POST": LOGGER.info("Making %s request to %s with body %s", http_method, url, body) resp = self.session.post(url, headers=headers, data=body) else: raise TapSalesforceException("Unsupported HTTP method") try: resp.raise_for_status() except RequestException as ex: raise ex if resp.headers.get('Sforce-Limit-Info') is not None: self.rest_requests_attempted += 1 self.check_rest_quota_usage(resp.headers) return resp
Example #19
Source File: event_dispatcher.py From python-sdk with Apache License 2.0 | 6 votes |
def dispatch_event(event): """ Dispatch the event being represented by the Event object. Args: event: Object holding information about the request to be dispatched to the Optimizely backend. """ try: if event.http_verb == enums.HTTPVerbs.GET: requests.get(event.url, params=event.params, timeout=REQUEST_TIMEOUT).raise_for_status() elif event.http_verb == enums.HTTPVerbs.POST: requests.post( event.url, data=json.dumps(event.params), headers=event.headers, timeout=REQUEST_TIMEOUT, ).raise_for_status() except request_exception.RequestException as error: logging.error('Dispatch event failed. Error: %s' % str(error))
Example #20
Source File: helpers.py From pytos with Apache License 2.0 | 6 votes |
def get_user_by_id(self, user_id): """ Get the SecureApp user whose ID matches the specified ID. :param user_id: The ID for the user which will be returned. :type user_id: int :return: The user whose ID matches the specified ID. :rtype:User :raise ValueError: If an user with the specified ID is not found. :raise IOError: If there was a communication error. """ logger.info("Getting SecureApp users with ID '%s'.", user_id) try: response_string = self.get_uri("/securechangeworkflow/api/secureapp/repository/users/{}".format(user_id), expected_status_codes=200).response.content except REST_Not_Found_Error: message = "User with ID '{}' does not exist.".format(user_id) logger.critical(message) raise ValueError(message) except RequestException: message = "Failed to GET SecureApp users list." logger.critical(message) raise IOError(message) return User.from_xml_string(response_string)
Example #21
Source File: download.py From netease-dl with MIT License | 6 votes |
def download_playlist_by_id(self, playlist_id, playlist_name): """Download a playlist's songs by its id. :params playlist_id: playlist id. :params playlist_name: playlist name. """ try: songs = self.crawler.get_playlist_songs( playlist_id) except RequestException as exception: click.echo(exception) else: folder = os.path.join(self.folder, playlist_name) for song in songs: self.download_song_by_id(song.song_id, song.song_name, folder)
Example #22
Source File: download.py From netease-dl with MIT License | 6 votes |
def download_song_by_id(self, song_id, song_name, folder='.'): """Download a song by id and save it to disk. :params song_id: song id. :params song_name: song name. :params folder: storage path. """ try: url = self.crawler.get_song_url(song_id) if self.lyric: # use old api lyric_info = self.crawler.get_song_lyric(song_id) else: lyric_info = None song_name = song_name.replace('/', '') song_name = song_name.replace('.', '') self.crawler.get_song_by_url(url, song_name, folder, lyric_info) except RequestException as exception: click.echo(exception)
Example #23
Source File: TestRailPreRunModifier.py From robotframework-testrail with Apache License 2.0 | 6 votes |
def start_suite(self, suite: TestSuite) -> None: """Form list of tests for the Robot Framework test suite that are included in the TestRail test run. If analysis depth of the run results is greater than zero, when first suite is launched a list of 'testrailid' tags of stable test cases is obtained. After that the list of tags is written to the class attribute and for subsequent suites the obtaining is not happening. If analysis depth of the run results is zero, when the first suite is launched a list of 'testrailid' tags of all test cases in the given status is obtained. After that the list of tags is written to the class attribute and for subsequent suites the obtaining is not happening. *Args:*\n _suite_ - Robot Framework test suite object. """ tests = suite.tests suite.tests = None try: if self.results_depth > 0: suite.tests = [t for t in tests if (set(t.tags) & set(self.tr_stable_tags_list))] else: suite.tests = [t for t in tests if (set(t.tags) & set(self.tr_tags_list))] except (RequestException, TimeoutError) as error: self._log_to_parent_suite(suite, str(error))
Example #24
Source File: revocation_registry.py From aries-cloudagent-python with Apache License 2.0 | 5 votes |
def retrieve_tails(self, context: InjectionContext): """Fetch the tails file from the public URI.""" if not self._tails_public_uri: raise RevocationError("Tails file public URI is empty") LOGGER.info( "Downloading the tails file for the revocation registry: %s", self.registry_id, ) tails_file_path = Path(self.get_receiving_tails_local_path(context)) tails_file_dir = tails_file_path.parent if not tails_file_dir.exists(): tails_file_dir.mkdir(parents=True) buffer_size = 65536 # should be multiple of 32 bytes for sha256 with open(tails_file_path, "wb", buffer_size) as tails_file: file_hasher = hashlib.sha256() with Session() as req_session: try: resp = req_session.get(self._tails_public_uri, stream=True) for buf in resp.iter_content(chunk_size=buffer_size): tails_file.write(buf) file_hasher.update(buf) except RequestException as rx: raise RevocationError(f"Error retrieving tails file: {rx}") download_tails_hash = base58.b58encode(file_hasher.digest()).decode("utf-8") if download_tails_hash != self.tails_hash: raise RevocationError( "The hash of the downloaded tails file does not match." ) self.tails_local_path = tails_file_path return self.tails_local_path
Example #25
Source File: bots.py From Dallinger with MIT License | 5 votes |
def sign_up(self): """Signs up a participant for the experiment. This is done using a POST request to the /participant/ endpoint. """ self.log("Bot player signing up.") self.subscribe_to_quorum_channel() while True: url = ( "{host}/participant/{self.worker_id}/" "{self.hit_id}/{self.assignment_id}/" "debug?fingerprint_hash={hash}&recruiter=bots:{bot_name}".format( host=self.host, self=self, hash=uuid.uuid4().hex, bot_name=self.__class__.__name__, ) ) try: result = requests.post(url) result.raise_for_status() except RequestException: self.stochastic_sleep() continue if result.json()["status"] == "error": self.stochastic_sleep() continue self.on_signup(result.json()) return True
Example #26
Source File: download.py From netease-dl with MIT License | 5 votes |
def __init__(self, timeout, proxy, folder, quiet, lyric, again): self.crawler = Crawler(timeout, proxy) self.folder = '.' if folder is None else folder self.quiet = quiet self.lyric = lyric try: if again: self.crawler.login() except RequestException: click.echo('Maybe password error, please try again.')
Example #27
Source File: nfsession_requests.py From plugin.video.netflix with MIT License | 5 votes |
def try_refresh_session_data(self, raise_exception=False): """Refresh session_data from the Netflix website""" from requests import exceptions try: self.auth_url = website.extract_session_data(self._get('browse'))['auth_url'] cookies.save(self.account_hash, self.session.cookies) common.debug('Successfully refreshed session data') return True except InvalidMembershipStatusError: raise except (WebsiteParsingError, InvalidMembershipStatusAnonymous, LoginValidateErrorIncorrectPassword) as exc: import traceback common.warn('Failed to refresh session data, login can be expired or the password has been changed ({})', type(exc).__name__) common.debug(g.py2_decode(traceback.format_exc(), 'latin-1')) self.session.cookies.clear() if isinstance(exc, (InvalidMembershipStatusAnonymous, LoginValidateErrorIncorrectPassword)): # This prevent the MSL error: No entity association record found for the user common.send_signal(signal=common.Signals.CLEAR_USER_ID_TOKENS) return self._login() except exceptions.RequestException: import traceback common.warn('Failed to refresh session data, request error (RequestException)') common.warn(g.py2_decode(traceback.format_exc(), 'latin-1')) if raise_exception: raise except Exception: # pylint: disable=broad-except import traceback common.warn('Failed to refresh session data, login expired (Exception)') common.debug(g.py2_decode(traceback.format_exc(), 'latin-1')) self.session.cookies.clear() if raise_exception: raise return False
Example #28
Source File: zap_helper.py From zap-cli with MIT License | 5 votes |
def is_running(self): """Check if ZAP is running.""" try: result = requests.get(self.proxy_url) except RequestException: return False if 'ZAP-Header' in result.headers.get('Access-Control-Allow-Headers', []): return True raise ZAPError('Another process is listening on {0}'.format(self.proxy_url))
Example #29
Source File: mod_shellshock.py From ITWSV with MIT License | 5 votes |
def attack(self): http_resources = self.persister.get_links(attack_module=self.name) if self.do_get else [] for original_request in http_resources: try: url = original_request.path if self.verbose == 2: print("[¨] {0}".format(url)) if url not in self.attacked_get: self.attacked_get.append(url) evil_req = web.Request(url) resp = self.crawler.send(evil_req, headers=self.hdrs) if resp: data = resp.content if self.rand_string in data: self.log_red(_("URL {0} seems vulnerable to Shellshock attack!").format(url)) self.add_vuln( request_id=original_request.path_id, category=Vulnerability.EXEC, level=Vulnerability.HIGH_LEVEL, request=evil_req, info=_("URL {0} seems vulnerable to Shellshock attack").format(url) ) except (RequestException, KeyboardInterrupt) as exception: yield exception yield original_request
Example #30
Source File: __init__.py From cartography with Apache License 2.0 | 5 votes |
def start_github_ingestion(neo4j_session, config): """ If this module is configured, perform ingestion of CRXcavator data. Otherwise warn and exit :param neo4j_session: Neo4J session for database interface :param config: A cartography.config object :return: None """ if not config.github_config: logger.warning('GitHub import is not configured - skipping this module. See docs to configure.') return auth_tokens = json.loads(base64.b64decode(config.github_config).decode()) common_job_parameters = { "UPDATE_TAG": config.update_tag, } # run sync for the provided github tokens for auth_data in auth_tokens['organization']: try: sync_github( neo4j_session, common_job_parameters, auth_data['token'], auth_data['url'], auth_data['name'], ) except exceptions.RequestException as e: logger.error("Could not complete request to the GitHub API: {}", e) run_cleanup_job( 'github_import_cleanup.json', neo4j_session, common_job_parameters, )