Python aiofiles.open() Examples

The following are 30 code examples of aiofiles.open(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module aiofiles , or try the search function .
Example #1
Source File: file.py    From tributary with Apache License 2.0 9 votes vote down vote up
def File(node, filename='', json=True):
    '''Open up a file and write lines to the file

    Args:
        node (Node): input stream
        filename (str): filename to write
        json (bool): load file line as json
    '''

    async def _file(data):
        async with aiofiles.open(filename, mode='a') as fp:
            if json:
                fp.write(JSON.dumps(data))
            else:
                fp.write(data)
        return data

    ret = Node(foo=_file, name='File', inputs=1, graphvizshape=_OUTPUT_GRAPHVIZSHAPE)
    node >> ret
    return ret 
Example #2
Source File: rushwars.py    From SML-Cogs with MIT License 8 votes vote down vote up
def fetch_team(self, tag=None, auth=None, **kwargs):
        """Fetch player"""
        url = 'https://api.rushstats.com/v1/team/{}'.format(clean_tag(tag))
        fn = os.path.join(CACHE_TEAM_PATH, "{}.json".format(tag))
        try:
            data = await self.fetch(url=url, auth=auth)
        except APIServerError:
            if os.path.exists(fn):
                async with aiofiles.open(fn, mode='r') as f:
                    data = json.load(await f.read())
            else:
                raise
        else:
            async with aiofiles.open(fn, mode='w') as f:
                json.dump(data, f)

        return RWTeam(data) 
Example #3
Source File: brawlstars.py    From SML-Cogs with MIT License 6 votes vote down vote up
def api_fetch_player(tag=None, auth=None, session=None, **kwargs):
    """Fetch player"""
    url = 'https://api.starlist.pro/v1/player?tag={}'.format(clean_tag(tag))
    fn = os.path.join(CACHE_PLAYER_PATH, "{}.json".format(tag))
    try:
        data = await api_fetch(url=url, auth=auth, session=session)
    except APIServerError:
        if os.path.exists(fn):
            async with aiofiles.open(fn, mode='r') as f:
                data = json.load(await f.read())
        else:
            raise
    else:
        async with aiofiles.open(fn, mode='w') as f:
            json.dump(data, f)

    return BSPlayer(data) 
Example #4
Source File: brawlstars_official.py    From SML-Cogs with MIT License 6 votes vote down vote up
def _bsset_config(self, ctx):
        """Band config"""
        if len(ctx.message.attachments) == 0:
            await self.bot.say(
                "Please attach config yaml with this command. "
                "See config.example.yml for how to format it."
            )
            return

        attach = ctx.message.attachments[0]
        url = attach["url"]

        async with self.session.get(url) as resp:
            with open(BAND_CONFIG_YML, "wb") as f:
                f.write(await resp.read())

        await self.bot.say(
            "Attachment received and saved as {}".format(BAND_CONFIG_YML))

        self.settings['config'] = BAND_CONFIG_YML
        dataIO.save_json(JSON, self.settings)

        await self.bot.delete_message(ctx.message) 
Example #5
Source File: autobump.py    From bioconda-utils with MIT License 6 votes vote down vote up
def apply(self, recipe: Recipe) -> None:
        if not recipe.is_modified():
            raise self.NoChanges(recipe)

        branch_name = self.branch_name(recipe)
        changed = False
        async with self.git.lock_working_dir:
            self.git.prepare_branch(branch_name)
            async with aiofiles.open(recipe.path, "w",
                                     encoding="utf-8") as fdes:
                await fdes.write(recipe.dump())
            if recipe.version != recipe.orig.version:
                msg = f"Update {recipe} to {recipe.version}"
            elif recipe.build_number != recipe.orig.build_number:
                msg = f"Bump {recipe} buildnumber"
            else:
                msg = f"Update {recipe}"
            changed = self.git.commit_and_push_changes([recipe.path], branch_name, msg)
        if changed:
            # CircleCI appears to have problems picking up on our PRs. Let's wait
            # a while before we create the PR, so the pushed branch has time to settle.
            await asyncio.sleep(10)  # let push settle before going on
        elif not recipe.on_branch:
            raise self.NoChanges(recipe) 
Example #6
Source File: archived_logs.py    From polyaxon with Apache License 2.0 6 votes vote down vote up
def get_archived_operation_logs(
    run_uuid: str, last_file: Optional[str], check_cache: bool = True
) -> Tuple[List[V1Log], Optional[str]]:

    logs = []
    last_file = await get_next_file(run_uuid=run_uuid, last_file=last_file)
    if not last_file:
        return logs, last_file

    logs_path = await download_logs_file(
        run_uuid=run_uuid, last_file=last_file, check_cache=check_cache
    )

    if not os.path.exists(logs_path):
        return logs, last_file

    async with aiofiles.open(logs_path, mode="r") as f:
        contents = await f.read()
        if contents:
            logs = V1Logs.read(contents)
            logs = logs.logs
    return logs, last_file 
Example #7
Source File: events.py    From polyaxon with Apache License 2.0 6 votes vote down vote up
def process_operation_event(
    events_path: str,
    event_kind: str,
    event_name: str,
    orient: str = V1Events.ORIENT_CSV,
) -> Optional[Dict]:
    if not events_path or not os.path.exists(events_path):
        return None

    async with aiofiles.open(events_path, mode="r") as f:
        contents = await f.read()
        if contents:
            if orient == V1Events.ORIENT_CSV:
                return {"name": event_name, "kind": event_kind, "data": contents}
            if orient == V1Events.ORIENT_DICT:
                df = V1Events.read(
                    kind=event_kind, name=event_name, data=contents, parse_dates=False
                )
                return {"name": event_name, "kind": event_kind, "data": df.to_dict()}
            else:
                raise HTTPException(
                    detail="received an unrecognisable orient value {}.".format(orient),
                    status_code=status.HTTP_400_BAD_REQUEST,
                )
    return None 
Example #8
Source File: brawlstars.py    From SML-Cogs with MIT License 6 votes vote down vote up
def api_fetch_club(tag=None, auth=None, session=None, **kwargs):
    """Fetch player"""
    url = 'https://api.starlist.pro/v1/club?tag={}'.format(clean_tag(tag))
    fn = os.path.join(CACHE_CLUB_PATH, "{}.json".format(tag))
    try:
        data = await api_fetch(url=url, auth=auth, session=session)
    except APIServerError:
        if os.path.exists(fn):
            async with aiofiles.open(fn, mode='r') as f:
                data = json.load(await f.read())
        else:
            raise
    else:
        async with aiofiles.open(fn, mode='w') as f:
            json.dump(data, f)

    return BSClub(data) 
Example #9
Source File: brawlstars_official.py    From SML-Cogs with MIT License 6 votes vote down vote up
def api_fetch_player(tag=None, auth=None, session=None, **kwargs):
    """Fetch player"""
    url = 'https://api.brawlstars.com/v1/players/%23{}'.format(clean_tag(tag))
    fn = os.path.join(CACHE_PLAYER_PATH, "{}.json".format(tag))
    try:
        data = await api_fetch(url=url, auth=auth, session=session)
    except APIServerError:
        if os.path.exists(fn):
            async with aiofiles.open(fn, mode='r') as f:
                data = json.load(await f.read())
        else:
            raise
    else:
        async with aiofiles.open(fn, mode='w') as f:
            json.dump(data, f)

    return BSPlayer(data) 
Example #10
Source File: rushwars.py    From SML-Cogs with MIT License 6 votes vote down vote up
def _rwset_config(self, ctx):
        """Team config"""
        if len(ctx.message.attachments) == 0:
            await self.bot.say(
                "Please attach config yaml with this command. "
                "See config.example.yml for how to format it."
            )
            return

        attach = ctx.message.attachments[0]
        url = attach["url"]

        async with aiohttp.ClientSession() as session:
            async with session.get(url) as resp:
                with open(TEAM_CONFIG_YML, "wb") as f:
                    f.write(await resp.read())

        await self.bot.say(
            "Attachment received and saved as {}".format(TEAM_CONFIG_YML))

        self.settings['config'] = TEAM_CONFIG_YML
        dataIO.save_json(JSON, self.settings)

        await self.bot.delete_message(ctx.message) 
Example #11
Source File: __init__.py    From peony-twitter with MIT License 6 votes vote down vote up
def download(self, session=None, chunk=-1):
        if self.content:
            if chunk < 0:
                return self.content
            else:
                return self.content[:chunk]

        Media.cache_dir.mkdir(exist_ok=True)

        if self.cache.exists():
            async with aiofiles.open(str(self.cache), mode='rb') as stream:
                self.content = await stream.read()
                if self.content_length == len(self.content):
                    return self.content

        if session is None:
            raise RuntimeError("No session")

        async with session.get(self.url) as response:
            print("downloading", self.filename)
            self.content = await response.read()
            async with aiofiles.open(str(self.cache), mode='wb') as stream:
                await stream.write(self.content)

            return self.content 
Example #12
Source File: upload.py    From peony-twitter with MIT License 6 votes vote down vote up
def send_tweet_with_media():
    # read the tweet's status
    status = input("status: ")

    path = ""
    while not path and not os.path.exists(path):
        path = input('file to upload:\n')

    # read the most common input formats
    path = urlparse(path).path.strip(" \"'")

    async with aiofiles.open(path, 'rb') as media:
        # optimize pictures if PIL is available
        if PIL:
            media = await process_media(media, path)

        uploaded = await client.upload_media(media,
                                             chunk_size=2**18,
                                             chunked=True)
        media_id = uploaded.media_id
        await client.api.statuses.update.post(status=status,
                                              media_ids=media_id) 
Example #13
Source File: 淘宝API文档解析.py    From TSDK with MIT License 6 votes vote down vote up
def main(self):
        ''''''
        res = self.getAllapi();
        if isinstance(res,dict):
            print(res)
            return
        for item in res:
            dt = {}
            name = item.get('name',None) or item.get('treeName',None)
            if name not in ['淘宝客API','淘宝搜索API','会员中心API','淘宝卡券平台','商户API','手淘分享','手机淘宝API']:
                continue
            for doc_item in item.get('catelogList',[]):
                res = await self.getDetail(doc_item.get('docId',285),doc_item.get('docType',2))
                dt.update(res)
            # async with aiofiles.open(name += '.json','wb') as f:
            #     await f.write(json.dumps(dt))
            with open(name + '.json','w') as f:
                # f.write(json.dumps(dt))
                json.dump(dt,f) 
Example #14
Source File: rushwars.py    From SML-Cogs with MIT License 6 votes vote down vote up
def fetch_player(self, tag=None, auth=None, **kwargs):
        """Fetch player"""
        url = 'https://api.rushstats.com/v1/player/{}'.format(clean_tag(tag))
        fn = os.path.join(CACHE_PLAYER_PATH, "{}.json".format(tag))
        try:
            data = await self.fetch(url=url, auth=auth)
        except APIServerError:
            if os.path.exists(fn):
                async with aiofiles.open(fn, mode='r') as f:
                    data = json.load(await f.read())
            else:
                raise
        else:
            async with aiofiles.open(fn, mode='w') as f:
                json.dump(data, f)

        return RWPlayer(data) 
Example #15
Source File: function.py    From owllook with Apache License 2.0 6 votes vote down vote up
def _get_data(filename, default='') -> list:
    """
    Get data from a file
    :param filename: filename
    :param default: default value
    :return: data
    """
    root_folder = os.path.dirname(os.path.dirname(__file__))
    user_agents_file = os.path.join(
        os.path.join(root_folder, 'data'), filename)
    try:
        async with aiofiles.open(user_agents_file, mode='r') as f:
            data = [_.strip() for _ in await
            f.readlines()]
    except:
        data = [default]
    return data 
Example #16
Source File: asgi.py    From datasette with Apache License 2.0 6 votes vote down vote up
def asgi_send_file(
    send, filepath, filename=None, content_type=None, chunk_size=4096
):
    headers = {}
    if filename:
        headers["Content-Disposition"] = 'attachment; filename="{}"'.format(filename)
    first = True
    async with aiofiles.open(str(filepath), mode="rb") as fp:
        if first:
            await asgi_start(
                send,
                200,
                headers,
                content_type or guess_type(str(filepath))[0] or "text/plain",
            )
            first = False
        more_body = True
        while more_body:
            chunk = await fp.read(chunk_size)
            more_body = len(chunk) == chunk_size
            await send(
                {"type": "http.response.body", "body": chunk, "more_body": more_body}
            ) 
Example #17
Source File: github_releases_data.py    From randovania with GNU General Public License v3.0 6 votes vote down vote up
def _read_from_persisted() -> Optional[List[dict]]:
    try:
        async with aiofiles.open(_last_check_file()) as open_file:
            last_check = json.loads(await open_file.read())

        if _is_recent(last_check):
            return last_check["data"]
        else:
            return None

    except json.JSONDecodeError as e:
        logging.warning("Unable to decode persisted releases check: %s", str(e))
        return None

    except FileNotFoundError:
        return None 
Example #18
Source File: __init__.py    From BlackSheep with MIT License 6 votes vote down vote up
def get_file_getter(file_path,
                    file_size,
                    size_limit=1024*64):
    # NB: if the file size is small, we read its bytes and return them;
    # otherwise, a lazy reader is returned; that returns the file in chunks

    if file_size > size_limit:
        async def file_chunker():
            async for chunk in get_file_chunks(file_path, size_limit):
                yield chunk
            yield b''

        return file_chunker

    async def file_getter():
        async with aiofiles.open(file_path, mode='rb') as file:
            data = await file.read()
            yield data
            yield b''
    return file_getter 
Example #19
Source File: proxy_tools.py    From hproxy with MIT License 5 votes vote down vote up
def _get_data(filename, default=''):
    """
    Get data from a file
    :param filename: filename
    :param default: default value
    :return: data
    """
    root_folder = os.path.dirname(__file__)
    user_agents_file = os.path.join(root_folder, filename)
    try:
        async with aiofiles.open(user_agents_file, mode='r') as f:
            data = [_.strip() for _ in await f.readlines()]
    except:
        data = [default]
    return data 
Example #20
Source File: esa_cci_odp.py    From cate with MIT License 5 votes vote down vote up
def get_updates(self, reset=False) -> Dict:
        """
        Ask to retrieve the differences found between a previous
        dataStore status and the current one,
        The implementation return a dictionary with the new ['new'] and removed ['del'] dataset.
        it also return the reference time to the datastore status taken as previous snapshot,
        Reset flag is used to clean up the support files, freeze and diff.
        :param: reset=False. Set this flag to true to clean up all the support files forcing a
                synchronization with the remote catalog
        :return: A dictionary with keys { 'generated', 'source_ref_time', 'new', 'del' }.
                 genetated: generation time, when the check has been executed
                 source_ref_time: when the local copy of the remoted dataset hes been made.
                                  It is also used by the system to refresh the current images when
                                  is older then 1 day.
                 new: a list of new dataset entry
                 del: a list of removed dataset
        """
        diff_file = os.path.join(self._metadata_store_path, self._get_update_tag() + '-diff.json')

        if os.path.isfile(diff_file):
            with open(diff_file, 'r') as json_in:
                report = json.load(json_in)
        else:
            generated = datetime.now()
            report = {"generated": str(generated),
                      "source_ref_time": str(generated),
                      "new": list(),
                      "del": list()}

            # clean up when requested
        if reset:
            if os.path.isfile(diff_file):
                os.remove(diff_file)
            frozen_file = os.path.join(self._metadata_store_path, self._get_update_tag() + '-freeze.json')
            if os.path.isfile(frozen_file):
                os.remove(frozen_file)
        return report 
Example #21
Source File: esa_cci_odp.py    From cate with MIT License 5 votes vote down vote up
def _get_infos_from_feature(session, feature: dict) -> tuple:
    feature_info = _extract_feature_info(feature)
    opendap_dds_url = f"{feature_info[4]['Opendap']}.dds"
    resp = await session.request(method='GET', url=opendap_dds_url)
    if resp.status >= 400:
        resp.release()
        _LOG.warning(f"Could not open {opendap_dds_url}: {resp.status}")
        return {}, {}
    content = await resp.read()
    return _retrieve_infos_from_dds(str(content, 'utf-8').split('\n')) 
Example #22
Source File: esa_cci_odp.py    From cate with MIT License 5 votes vote down vote up
def _create_data_source(self, json_dict: dict, datasource_id: str):
        local_metadata_dataset_dir = os.path.join(self._metadata_store_path, datasource_id)
        # todo set True when dimensions shall be read during meta data fetching
        meta_info = await _load_or_fetch_json(_fetch_meta_info,
                                              fetch_json_args=[datasource_id,
                                                               json_dict.get('odd_url', None),
                                                               json_dict.get('metadata_url', None),
                                                               json_dict.get('variables', []),
                                                               False],
                                              fetch_json_kwargs=dict(),
                                              cache_used=self.index_cache_used,
                                              cache_dir=local_metadata_dataset_dir,
                                              cache_json_filename='meta-info.json',
                                              cache_timestamp_filename='meta-info-timestamp.txt',
                                              cache_expiration_days=self.index_cache_expiration_days)
        drs_ids = self._get_as_list(meta_info, 'drs_id', 'drs_ids')
        with open(os.path.join(os.path.dirname(__file__), 'data/excluded_data_sources')) as fp:
            excluded_data_sources = fp.read().split('\n')
            for drs_id in drs_ids:
                if drs_id in excluded_data_sources:
                    continue
                if drs_id in set([ds.id for ds in self._data_sources]):
                    _LOG.warning(f'Data source {drs_id} already included. Will omit this one.')
                    continue
                meta_info = meta_info.copy()
                meta_info.update(json_dict)
                self._adjust_json_dict(meta_info, drs_id)
                meta_info['cci_project'] = meta_info['ecv']
                meta_info['fid'] = datasource_id
                data_source = EsaCciOdpDataSource(self, meta_info, datasource_id, drs_id)
                self._data_sources.append(data_source) 
Example #23
Source File: __init__.py    From PyPlanet with GNU General Public License v3.0 5 votes vote down vote up
def touch(self, path: str, **kwargs):
		async with self.open(path, 'w+') as fh:
			await fh.write('') 
Example #24
Source File: rushwars.py    From SML-Cogs with MIT License 5 votes vote down vote up
def _get_team_config(self, force_update=False):
        if force_update or self._team_config is None:
            async with aiofiles.open(TEAM_CONFIG_YML) as f:
                contents = await f.read()
                self._team_config = yaml.load(contents, Loader=yaml.FullLoader)
        return self._team_config 
Example #25
Source File: brawlstars_official.py    From SML-Cogs with MIT License 5 votes vote down vote up
def _get_club_config(self, force_update=False):
        if force_update or self._club_config is None:
            async with aiofiles.open(BAND_CONFIG_YML) as f:
                contents = await f.read()
                self._club_config = yaml.load(contents)
        return self._club_config 
Example #26
Source File: test_pipelines.py    From ant_nest with GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_item_base_file_dump_pipeline():
    pl = pls.ItemBaseFileDumpPipeline()
    await pl.dump("/dev/null", "Hello World")
    await pl.dump("/dev/null", b"Hello World")
    await pl.dump("/dev/null", io.StringIO("Hello World"))
    await pl.dump("/dev/null", io.BytesIO(b"Hello World"))
    await pl.dump("/dev/null", open("./tests/test.html"), buffer_size=4)
    async with aiofiles.open("./tests/test.html") as f:
        await pl.dump("/dev/null", f)
    async with aiofiles.open("./tests/test.html", "rb") as f:
        await pl.dump("/dev/null", f, buffer_size=4)

    with pytest.raises(ValueError):
        await pl.dump("/dev/null", None) 
Example #27
Source File: github_releases_data.py    From randovania with GNU General Public License v3.0 5 votes vote down vote up
def _persist(data: List[dict]):
    _last_check_file().parent.mkdir(parents=True, exist_ok=True)

    async with aiofiles.open(_last_check_file(), "w") as open_file:
        await open_file.write(
            json.dumps(
                {
                    "last_check": datetime.datetime.now().isoformat(),
                    "data": data,
                },
                default=str)) 
Example #28
Source File: __init__.py    From BlackSheep with MIT License 5 votes vote down vote up
def get_file_chunks(file_path: str, size_limit: int = 1024*64):
    async with aiofiles.open(file_path, mode='rb') as f:
        while True:
            chunk = await f.read(size_limit)

            if not chunk:
                break

            yield chunk 
Example #29
Source File: responses.py    From BlackSheep with MIT License 5 votes vote down vote up
def _get_file_provider(file_path: str):
    async def data_provider():
        async with aiofiles.open(file_path, mode='rb') as f:
            while True:
                chunk = await f.read(1024 * 64)

                if not chunk:
                    break

                yield chunk
            yield b''
    return data_provider 
Example #30
Source File: gigantum.py    From gigantum-client with MIT License 5 votes vote down vote up
def _file_loader(self, filename: str, progress_update_fn: Callable):
        """Method to provide non-blocking chunked reads of files, useful if large.

        Args:
            filename: absolute path to the file to upload
            progress_update_fn: A callable with arg "completed_bytes" (int) indicating how many bytes have been
                                uploaded in since last called
        """
        async with aiofiles.open(filename, 'rb') as f:
            if self.is_multipart:
                if not self.current_part:
                    raise ValueError("No parts remain to get presigned URL.")

                # if multipart, seek to the proper spot in the file
                await f.seek(self.current_part.start_byte)

            read_bytes = 0
            chunk = await f.read(self.upload_chunk_size)
            while chunk:
                progress_update_fn(completed_bytes=len(chunk))
                read_bytes += len(chunk)
                yield chunk

                if self.is_multipart:
                    if not self.current_part:
                        raise ValueError("No parts remain to get presigned URL.")
                    if self.current_part.start_byte + read_bytes >= self.current_part.end_byte:
                        # You're done reading for this part
                        break

                # Keep reading and streaming chunks for this part
                chunk = await f.read(self.upload_chunk_size)