Python aiohttp.client_exceptions.ClientError() Examples

The following are 18 code examples of aiohttp.client_exceptions.ClientError(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module aiohttp.client_exceptions , or try the search function .
Example #1
Source File: transport.py    From aiozipkin with Apache License 2.0 6 votes vote down vote up
def _send_data(self, data: DataList) -> bool:
        try:

            async with self._session.post(self._address, json=data) as resp:
                body = await resp.text()
                if resp.status >= 300:
                    msg = 'zipkin responded with code: {} and body: {}'.format(
                        resp.status, body)
                    raise RuntimeError(msg)

        except (asyncio.TimeoutError, ClientError):
            return False
        except Exception as exc:  # pylint: disable=broad-except
            # that code should never fail and break application
            logger.error('Can not send spans to zipkin', exc_info=exc)
        return True 
Example #2
Source File: httpUtil.py    From hsds with Apache License 2.0 6 votes vote down vote up
def http_put_binary(app, url, data=None, params=None):
    log.info(f"http_put_binary('{url}') nbytes: {len(data)}")
    rsp_json = None
    client = get_http_client(app)
    timeout = config.get("timeout")

    try:
        async with client.put(url, data=data, params=params, timeout=timeout) as rsp:
            log.info(f"http_put_binary status: {rsp.status}")
            if rsp.status != 201:
                log.error(f"PUT (binary) request error for {url}: status {rsp.status}")
                raise HTTPInternalServerError()
            elif rsp.status == 503:
                log.warn(f"503 error for http_put_binary {url}")
                raise HTTPServiceUnavailable()

            rsp_json = await rsp.json()
            log.debug(f"http_put_binary({url}) response: {rsp_json}")
    except ClientError as ce:
        log.error(f"Error for http_put_binary({url}): {ce} ")
        raise HTTPInternalServerError()
    except CancelledError as cle:
        log.error(f"CancelledError for http_put_binary({url}): {cle}")
        raise HTTPInternalServerError()
    return rsp_json 
Example #3
Source File: text_apis.py    From userbot with GNU General Public License v3.0 6 votes vote down vote up
def text_api(_, message: Message):
    cmd = message.command
    api_key = cmd[0]
    api = text_apis_data[api_key]

    try:
        try:
            data = await AioHttp().get_json(api['url'])
            resp_json = data[api['target_key']]
            await message.edit(
                resp_json.capitalize()
            )
        except Exception:
            data = await AioHttp().get_text(api['url'])
            await message.edit(data)
    except ClientError as e:
        print(e)
        await message.delete()


# Command help section 
Example #4
Source File: gateway.py    From deconz with MIT License 6 votes vote down vote up
def request(self, method, path="", json=None):
        """Make a request to the API."""
        LOGGER.debug('Sending "%s" "%s" to "%s %s"', method, json, self.host, path)

        url = f"http://{self.host}:{self.port}/api/{self.api_key}{path}"

        try:
            async with self.session.request(method, url, json=json) as res:

                if res.content_type != "application/json":
                    raise ResponseError(
                        "Invalid content type: {}".format(res.content_type)
                    )

                response = await res.json()
                LOGGER.debug("HTTP request response: %s", pformat(response))

                _raise_on_error(response)

                return response

        except client_exceptions.ClientError as err:
            raise RequestError(
                "Error requesting data from {}: {}".format(self.host, err)
            ) from None 
Example #5
Source File: __init__.py    From pyatv with MIT License 5 votes vote down vote up
def _poller(self):
        first_call = True

        while True:
            # Sleep some time before waiting for updates
            if not first_call and self._initial_delay > 0:
                _LOGGER.debug("Initial delay set to %d", self._initial_delay)
                await asyncio.sleep(self._initial_delay, loop=self._loop)
                first_call = False

            try:
                _LOGGER.debug("Waiting for playstatus updates")
                playstatus = await self._atv.playstatus(use_revision=True, timeout=0)

                self._loop.call_soon(self.listener.playstatus_update, self, playstatus)
            except asyncio.CancelledError:
                break

            except ClientError as ex:
                _LOGGER.exception("A communication error happened")
                listener = self._listener()
                if listener:
                    self._loop.call_soon(listener.listener.connection_lost, ex)

                break

            # It is not pretty to disable pylint here, but we must catch _all_
            # exceptions to keep the API.
            except Exception as ex:  # pylint: disable=broad-except
                _LOGGER.debug("Playstatus error occurred: %s", ex)
                self._loop.call_soon(self.listener.playstatus_error, self, ex)

        self._future = None 
Example #6
Source File: httpUtil.py    From hsds with Apache License 2.0 5 votes vote down vote up
def http_post(app, url, data=None, params=None):
    log.info(f"http_post('{url}', {data})")
    client = get_http_client(app)
    rsp_json = None
    timeout = config.get("timeout")

    try:
        async with client.post(url, json=data, params=params, timeout=timeout ) as rsp:
            log.info(f"http_post status: {rsp.status}")
            if rsp.status == 200:
                pass  # ok
            elif rsp.status == 201:
                pass # also ok
            elif rsp.status == 204: # no data
                return None
            elif rsp.status == 404:
                log.info(f"POST  reqest HTTPNotFound error for url: {url}")
            elif rsp.status == 410:
                log.info(f"POST  reqest HTTPGone error for url: {url}")
            elif rsp.status == 503:
                log.warn(f"503 error for http_get_Json {url}")
                raise HTTPServiceUnavailable()

            else:
                log.warn(f"POST request error for url: {url} - status: {rsp.status}")
                raise HTTPInternalServerError()
            rsp_json = await rsp.json()
            log.debug(f"http_post({url}) response: {rsp_json}")
    except ClientError as ce:
        log.error(f"Error for http_post({url}): {ce} ")
        raise HTTPInternalServerError()
    except CancelledError as cle:
        log.error(f"CancelledError for http_post({url}): {cle}")
        raise HTTPInternalServerError()
    return rsp_json 
Example #7
Source File: httpUtil.py    From hsds with Apache License 2.0 5 votes vote down vote up
def http_delete(app, url, data=None, params=None):
    # TBD - do we really need a data param?
    log.info(f"http_delete('{url}')")
    #client = get_http_client(app)
    rsp_json = None
    timeout = config.get("timeout")
    import aiohttp

    try:
        async with aiohttp.ClientSession() as session:
            async with session.delete(url, json=data, params=params, timeout=timeout) as rsp:
                log.info(f"http_delete status: {rsp.status}")
                if rsp.status == 200:
                    pass  # expectred
                elif rsp.status == 404:
                    log.info(f"NotFound response for DELETE for url: {url}")
                elif rsp.status == 503:
                    log.warn(f"503 error for http_delete {url}")
                    raise HTTPServiceUnavailable()
                else:
                    log.error(f"DELETE request error for url: {url} - status: {rsp.status}")
                    raise HTTPInternalServerError()

            #rsp_json = await rsp.json()
            #log.debug(f"http_delete({url}) response: {rsp_json}")
    except ClientError as ce:
        log.error(f"ClientError for http_delete({url}): {ce} ")
        raise HTTPInternalServerError()
    except CancelledError as cle:
        log.error(f"CancelledError for http_delete({url}): {cle}")
        raise HTTPInternalServerError()
    except ConnectionResetError as cre:
        log.error(f"ConnectionResetError for http_delete({url}): {cre}")
        raise HTTPInternalServerError()

    return rsp_json 
Example #8
Source File: awsLambdaClient.py    From hsds with Apache License 2.0 5 votes vote down vote up
def __aenter__(self):
        start_time = time.time()
        payload = json.dumps(self.params)
        log.info(f"invoking lambda function {self.lambdaFunction} with payload: {self.params} start: {start_time}")
        log.debug(f"Lambda function count: {self.funcStats['cnt']}")
        self.funcStats["cnt"] += 1
        self.funcStats["inflight"] += 1

        self.client = getLambdaClient(self.app, self.session)
        
        try:
            lambda_rsp = await self.client.invoke(FunctionName=self.lambdaFunction, Payload=payload) 
            finish_time = time.time()
            log.info(f"lambda.invoke({self.lambdaFunction} start={start_time:.4f} finish={finish_time:.4f} elapsed={finish_time-start_time:.4f}")
            self.funcStats["inflight"] -= 1
            log.info(f"lambda.invoke - {self.funcStats['inflight']} inflight requests")
            return lambda_rsp
        except ClientError as ce:
            log.error(f"Error for lambda invoke: {ce} ")
            self.funcStats["inflight"] -= 1
            self.funcStats["failed"] += 1
            raise HTTPInternalServerError()
        except CancelledError as cle:
            log.warn(f"CancelledError for lambda invoke: {cle}")
            self.funcStats["inflight"] -= 1
            self.funcStats["failed"] += 1
            raise HTTPInternalServerError()
        except Exception as e:
            log.error(f"Unexpected exception for lamdea invoke: {e}, type: {type(e)}")
            self.funcStats["inflight"] -= 1
            self.funcStats["failed"] += 1
            raise HTTPInternalServerError() 
Example #9
Source File: async_lib.py    From hsds with Apache License 2.0 5 votes vote down vote up
def removeKeys(app, objid):
    # iterate through all s3 keys under the given root or dataset id and delete them
    #
    # Note: not re-entrant!  Only one scanRoot an be run at a time per app.
    log.debug(f"removeKeys: {objid}")
    if not isSchema2Id(objid):
        log.warn("ignoring non-schema2 id")
        raise KeyError("Invalid key")
    s3key = getS3Key(objid)
    log.debug(f"removeKeys - got s3key: {s3key}")
    expected_suffixes = (".dataset.json", ".group.json")
    s3prefix = None

    for suffix in expected_suffixes:
        if s3key.endswith(suffix):
                s3prefix = s3key[:-len(suffix)]
    if not s3prefix:
        log.error("removeKeys - unexpected s3key for delete_set")
        raise KeyError("unexpected key suffix")
    log.info(f"removeKeys - delete for {objid} searching for s3prefix: {s3prefix}")
    if app["objDelete_prefix"]:
        log.error("removeKeys - objDelete_prefix is already set - improper use of non-reentrant call?")
        # just continue and reset
    app["objDelete_prefix"] = s3prefix
    try:
        await getStorKeys(app, prefix=s3prefix, include_stats=False, callback=objDeleteCallback)
    except ClientError as ce:
        log.error(f"removeKeys - getS3Keys faiiled: {ce}")
    except HTTPNotFound:
        log.warn(f"removeKeys - HTTPNotFound error for getStorKeys with prefix: {s3prefix}")
    except HTTPInternalServerError:
        log.error(f"removeKeys - HTTPInternalServerError for getStorKeys with prefix: {s3prefix}")
    except Exception as e:
        log.error(f"removeKeys - Unexpected Exception for getStorKeys with prefix: {s3prefix}: {e}")

    # reset the prefix
    app["objDelete_prefix"] = None 
Example #10
Source File: __init__.py    From pysma with MIT License 5 votes vote down vote up
def _fetch_json(self, url, payload):
        """Fetch json data for requests."""
        params = {
            "data": json.dumps(payload),
            "headers": {"content-type": "application/json"},
            "params": {"sid": self.sma_sid} if self.sma_sid else None,
        }
        for _ in range(3):
            try:
                with async_timeout.timeout(3):
                    res = yield from self._aio_session.post(self._url + url, **params)
                    return (yield from res.json()) or {}
            except (asyncio.TimeoutError, client_exceptions.ClientError):
                continue
        return {"err": "Could not connect to SMA at {} (timeout)".format(self._url)} 
Example #11
Source File: http.py    From mautrix-python with Mozilla Public License 2.0 5 votes vote down vote up
def request(self, method: Method, path: PathBuilder,
                      content: Optional[Union[JSON, bytes, str]] = None,
                      headers: Optional[Dict[str, str]] = None,
                      query_params: Optional[Dict[str, str]] = None) -> JSON:
        """
        Make a raw HTTP request.

        Args:
            method: The HTTP method to use.
            path: The API endpoint to call.
                Does not include the base path (e.g. /_matrix/client/r0).
            content: The content to post as a dict (json) or bytes/str (raw).
            headers: The dict of HTTP headers to send.
            query_params: The dict of query parameters to send.

        Returns:
            The response as a dict.
        """
        content = content or {}
        headers = headers or {}
        if self.token:
            headers["Authorization"] = f"Bearer {self.token}"
        query_params = query_params or {}

        if "Content-Type" not in headers:
            headers["Content-Type"] = "application/json"
        is_json = headers.get("Content-Type", None) == "application/json"
        orig_content = content
        if is_json and isinstance(content, (dict, list)):
            content = json.dumps(content)

        self._log_request(method, path, content, orig_content, query_params)

        endpoint = self.base_url + str(path)
        try:
            return await self._send(method, endpoint, content, query_params, headers or {})
        except ClientError as e:
            raise MatrixConnectionError(str(e)) from e 
Example #12
Source File: test_iot_base.py    From hass-nabucasa with GNU General Public License v3.0 5 votes vote down vote up
def test_cloud_unable_to_connect(mock_iot_client, caplog, cloud_mock_iot):
    """Test unable to connect error."""
    conn = MockIoT(cloud_mock_iot)
    mock_iot_client.receive.side_effect = client_exceptions.ClientError(None, None)

    await conn.connect()

    assert "Unable to connect:" in caplog.text 
Example #13
Source File: controller.py    From aiounifi with MIT License 4 votes vote down vote up
def request(self, method, path=None, json=None, url=None, **kwargs):
        """Make a request to the API."""
        if not url:
            if self.is_unifi_os:
                url = f"{self.url}/proxy/network/api/s/{self.site}"
            else:
                url = f"{self.url}/api/s/{self.site}"

            if path is not None:
                url += f"{path}"

        LOGGER.debug("%s", url)

        try:
            async with self.session.request(
                method,
                url,
                json=json,
                ssl=self.sslcontext,
                headers=self.headers,
                **kwargs,
            ) as res:
                LOGGER.debug("%s %s %s", res.status, res.content_type, res)

                if res.status == 401:
                    raise LoginRequired(f"Call {url} received 401 Unauthorized")

                if res.status == 404:
                    raise ResponseError(f"Call {url} received 404 Not Found")

                if res.content_type == "application/json":
                    response = await res.json()
                    _raise_on_error(response)
                    if "data" in response:
                        return response["data"]
                    return response
                return res

        except client_exceptions.ClientError as err:
            raise RequestError(
                f"Error requesting data from {self.host}: {err}"
            ) from None 
Example #14
Source File: twitter.py    From cjworkbench with GNU Affero General Public License v3.0 4 votes vote down vote up
def fetch(params, *, secrets, get_stored_dataframe):
    querytype = QueryType(params["querytype"])
    query: str = params[querytype.query_param_name]
    credentials = (secrets.get("twitter_credentials") or {}).get("secret")

    if not query.strip() and not credentials:
        return None  # Don't create a version

    if not query.strip():
        return "Please enter a query"

    if not credentials:
        return "Please sign in to Twitter"

    try:
        if params["accumulate"]:
            old_tweets = await get_stored_tweets(get_stored_dataframe)
            tweets = await get_new_tweets(credentials, querytype, query, old_tweets)
            tweets = merge_tweets(old_tweets, tweets)
        else:
            tweets = await get_new_tweets(credentials, querytype, query, None)
        return tweets

    except ValueError as err:
        return str(err)

    except ClientResponseError as err:
        if err.status:
            if querytype == QueryType.USER_TIMELINE and err.status == 401:
                return "User %s's tweets are private" % query
            elif querytype == QueryType.USER_TIMELINE and err.status == 404:
                return "User %s does not exist" % query
            elif err.status == 429:
                return (
                    "Twitter API rate limit exceeded. "
                    "Please wait a few minutes and try again."
                )
            else:
                return "Error from Twitter: %d %s" % (err.status, err.message)
        else:
            return "Error fetching tweets: %s" % str(err)

    except ClientError as err:
        return "Error fetching tweets: %s" % str(err) 
Example #15
Source File: domain_sn.py    From hsds with Apache License 2.0 4 votes vote down vote up
def doFlush(app, root_id, bucket=None):
    """ return wnen all DN nodes have wrote any pending changes to S3"""
    log.info(f"doFlush {root_id}")
    params = {"flush": 1}
    if bucket:
        params["bucket"] = bucket
    client = get_http_client(app)
    dn_urls = getDataNodeUrls(app)
    log.debug(f"doFlush - dn_urls: {dn_urls}")
    failed_count = 0

    try:
        tasks = []
        for dn_url in dn_urls:
            req = dn_url + "/groups/" + root_id
            task = asyncio.ensure_future(client.put(req, params=params))
            tasks.append(task)
        done, pending = await asyncio.wait(tasks)
        if pending:
            # should be empty since we didn't use return_when parameter
            log.error("doFlush - got pending tasks")
            raise HTTPInternalServerError()
        for task in done:
            log.info(f"doFlush - task: {task}")
            if task.exception():
                log.warn(f"doFlush - task had exception: {type(task.exception())}")
                failed_count += 1
            else:
                clientResponse = task.result()
                if clientResponse.status != 204:
                    log.warn(f"doFlush - expected 204 but got: {clientResponse.status}")
                    failed_count += 1
    except ClientError as ce:
        log.error(f"doFlush - ClientError for http_put('/groups/{root_id}'): {str(ce)}")
        raise HTTPInternalServerError()
    except CancelledError as cle:
        log.error(f"doFlush - CancelledError '/groups/{root_id}'): {str(cle)}")
        raise HTTPInternalServerError()
    log.info(f"doFlush for {root_id} complete, failed: {failed_count} out of {len(dn_urls)}")
    if failed_count > 0:
        log.error(f"doFlush fail count: {failed_count} returning 500")
        return 500
    else:
        log.info("doFlush no fails, returning 204")
        return 204 
Example #16
Source File: httpUtil.py    From hsds with Apache License 2.0 4 votes vote down vote up
def http_get(app, url, params=None, format="json"):
    log.info(f"http_get('{url}')")
    client = get_http_client(app)
    data = None
    status_code = None
    timeout = config.get("timeout")
    try:
        async with client.get(url, params=params, timeout=timeout) as rsp:
            log.info(f"http_get status: {rsp.status}")
            status_code = rsp.status
            if rsp.status != 200:
                log.warn(f"request to {url} failed with code: {status_code}")
            else:
                # 200, so read the response
                if format == "json":
                    data = await rsp.json()
                else:
                    data = await rsp.read()  # read response as bytes
    except ClientError as ce:
        log.debug(f"ClientError: {ce}")
        status_code = 404
    except CancelledError as cle:
        log.error(f"CancelledError for http_get({url}): {cle}")
        raise HTTPInternalServerError()

    if status_code == 403:
        log.warn(f"Forbiden to access {url}")
        raise HTTPForbidden()
    elif status_code == 404:
        log.warn(f"Object: {url} not found")
        raise HTTPNotFound()
    elif status_code == 410:
        log.warn(f"Object: {url} removed")
        raise HTTPGone()
    elif status_code == 503:
        log.warn(f"503 error for http_get_Json {url}")
        raise HTTPServiceUnavailable()
    elif status_code != 200:
        log.error(f"Error for http_get_json({url}): {status_code}")
        raise HTTPInternalServerError()

    return data 
Example #17
Source File: chunk_sn.py    From hsds with Apache License 2.0 4 votes vote down vote up
def write_chunk_query(app, chunk_id, dset_json, slices, query, query_update, limit, bucket=None):
    """ update the chunk selection from the DN based on query string
    chunk_id: id of chunk to write to
    chunk_sel: chunk-relative selection to read from
    np_arr: numpy array to store read bytes
    """
    # TBD = see if this code can be merged with the read_chunk_query function
    msg = f"write_chunk_query, chunk_id: {chunk_id}, slices: {slices}, query: {query}, query_udpate: {query_update}"
    log.info(msg)
    partition_chunk_id = getChunkIdForPartition(chunk_id, dset_json)
    if partition_chunk_id != chunk_id:
        log.debug(f"using partition_chunk_id: {partition_chunk_id}")
        chunk_id = partition_chunk_id  # replace the chunk_id

    req = getDataNodeUrl(app, chunk_id)
    req += "/chunks/" + chunk_id
    log.debug("PUT chunk req: " + req)
    client = get_http_client(app)

    layout = getChunkLayout(dset_json)
    chunk_sel = getChunkCoverage(chunk_id, slices, layout)

    # pass query as param
    params = {}
    params["query"] = query
    if limit > 0:
        params["Limit"] = limit
    if bucket:
        params["bucket"] = bucket

    chunk_shape = getSelectionShape(chunk_sel)
    log.debug(f"chunk_shape: {chunk_shape}")
    setSliceQueryParam(params, chunk_sel)
    dn_rsp = None
    try:
        async with client.put(req, data=json.dumps(query_update), params=params) as rsp:
            log.debug(f"http_put {req} status: <{rsp.status}>")
            if rsp.status in (200,201):
                dn_rsp = await rsp.json()  # read response as json
                log.debug(f"got query data: {dn_rsp}")
            elif rsp.status == 404:
                # no data, don't return any results
                dn_rsp = {"index": [], "value": []}
            elif rsp.status == 400:
                log.warn(f"request {req} failed withj code {rsp.status}")
                raise HTTPBadRequest()
            else:
                log.error(f"request {req} failed with code: {rsp.status}")
                raise HTTPInternalServerError()

    except ClientError as ce:
        log.error(f"Error for http_put({req}): {ce} ")
        raise HTTPInternalServerError()
    except CancelledError as cle:
        log.warn(f"CancelledError for http_get({req}): {cle}")
        return

    return dn_rsp 
Example #18
Source File: chunk_sn.py    From hsds with Apache License 2.0 4 votes vote down vote up
def write_chunk_hyperslab(app, chunk_id, dset_json, slices, deflate_level, arr, bucket=None):
    """ write the chunk selection to the DN
    chunk_id: id of chunk to write to
    chunk_sel: chunk-relative selection to write to
    np_arr: numpy array of data to be written
    """
    log.info(f"write_chunk_hyperslab, chunk_id:{chunk_id}, slices:{slices}, bucket: {bucket}")
    if deflate_level is not None:
        log.info("deflate_level: {deflate_level}")
    if "layout" not in dset_json:
        log.error(f"No layout found in dset_json: {dset_json}")
        raise HTTPInternalServerError()
    partition_chunk_id = getChunkIdForPartition(chunk_id, dset_json)
    if partition_chunk_id != chunk_id:
        log.debug(f"using partition_chunk_id: {partition_chunk_id}")
        chunk_id = partition_chunk_id  # replace the chunk_id

    if "type" not in dset_json:
        log.error(f"No type found in dset_json: {dset_json}")
        raise HTTPInternalServerError()

    layout = getChunkLayout(dset_json)
    chunk_sel = getChunkCoverage(chunk_id, slices, layout)
    log.debug(f"chunk_sel: {chunk_sel}")
    data_sel = getDataCoverage(chunk_id, slices, layout)
    log.debug(f"data_sel: {data_sel}")
    log.debug(f"arr.shape: {arr.shape}")
    arr_chunk = arr[data_sel]
    req = getDataNodeUrl(app, chunk_id)
    req += "/chunks/" + chunk_id

    log.debug(f"PUT chunk req: {req}")
    client = get_http_client(app)
    data = arrayToBytes(arr_chunk)
    # pass itemsize, type, dimensions, and selection as query params
    params = {}
    setSliceQueryParam(params, chunk_sel)
    if bucket:
        params["bucket"] = bucket

    try:
        async with client.put(req, data=data, params=params) as rsp:
            log.debug(f"req: {req} status: {rsp.status}")
            if rsp.status == 200:
                log.debug(f"http_put({req}) <200> Ok")
            elif rsp.status == 201:
                log.debug(f"http_out({req}) <201> Updated")
            elif rsp.status == 503:
                log.warn(f"DN node too busy to handle request: {req}")
                raise HTTPServiceUnavailable()
            else:
                log.error(f"request error status: {rsp.status} for {req}: {str(rsp)}")
                raise HTTPInternalServerError()

    except ClientError as ce:
        log.error(f"Error for http_put({req}): {ce} ")
        raise HTTPInternalServerError()
    except CancelledError as cle:
        log.warn(f"CancelledError for http_put({req}): {cle}")