Python yaml.CDumper() Examples
The following are 30
code examples of yaml.CDumper().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
yaml
, or try the search function
.
Example #1
Source File: ao.py From miui-updates-tracker with MIT License | 6 votes |
def add_rom(codename, link, info): update = {} file_size = naturalsize(int(get(link, stream=True).headers['Content-Length'])) file = link.split('/')[-1] version = link.split('/')[3] android = link.split('_')[-2] update.update({"android": android}) update.update({"codename": codename}) update.update({"device": info['name']}) update.update({"download": link}) update.update({"filename": file}) update.update({"size": file_size}) update.update({"md5": "null"}) update.update({"version": version}) DATA.append(update) with open(f'stable_fastboot/{codename}.yml', 'w', newline='\n') as output: yaml.dump(update, output, Dumper=yaml.CDumper)
Example #2
Source File: tracker.py From miui-updates-tracker with MIT License | 6 votes |
def archive(update: dict): """Append new update to the archive""" codename = update['codename'] link = update['download'] version = update['version'] branch = get_branch(version) rom_type = 'recovery' if update['filename'].endswith('.zip') else 'fastboot' try: with open(f'archive/{branch}_{rom_type}/{codename}.yml', 'r') as yaml_file: data = yaml.load(yaml_file, Loader=yaml.CLoader) data[codename].update({version: link}) data.update({codename: data[codename]}) with open(f'archive/{branch}_{rom_type}/{codename}.yml', 'w') as output: yaml.dump(data, output, Dumper=yaml.CDumper) except FileNotFoundError: data = {codename: {version: link}} with open(f'archive/{branch}_{rom_type}/{codename}.yml', 'w') as output: yaml.dump(data, output, Dumper=yaml.CDumper)
Example #3
Source File: iodrivers.py From openmmtools with MIT License | 5 votes |
def _nc_dict_encoder(data): dump_options = {'Dumper': _DictYamlDumper, 'line_break': '\n', 'indent': 4} data_as_string = yaml.dump(data, **dump_options) packaged_string = nc_string_encoder(data_as_string) return packaged_string
Example #4
Source File: yaml_handler.py From Det3D with Apache License 2.0 | 5 votes |
def dump_to_str(self, obj, **kwargs): kwargs.setdefault("Dumper", Dumper) return yaml.dump(obj, **kwargs)
Example #5
Source File: yaml_handler.py From Det3D with Apache License 2.0 | 5 votes |
def dump_to_fileobj(self, obj, file, **kwargs): kwargs.setdefault("Dumper", Dumper) yaml.dump(obj, file, **kwargs)
Example #6
Source File: utils.py From CO2MPAS-TA with European Union Public License 1.1 | 5 votes |
def dump(self, file, default_flow_style=False, **kw): import yaml kw['Dumper'] = kw.get('Dumper', yaml.CDumper) with open(file, 'w') as f: yaml.dump( self.to_dict(), f, default_flow_style=default_flow_style, **kw )
Example #7
Source File: logging.py From easypy with BSD 3-Clause "New" or "Revised" License | 5 votes |
def format(self, record): return yaml.dump(vars(record), Dumper=Dumper) + "\n---\n"
Example #8
Source File: utils.py From polemarch with GNU Affero General Public License v3.0 | 5 votes |
def set(self, value): self.cache.set(self.key, dump(value, Dumper=Dumper), self.timeout)
Example #9
Source File: inout.py From mx-DeepIM with Apache License 2.0 | 5 votes |
def save_gt(path, gts): for im_id in sorted(gts.keys()): im_gts = gts[im_id] for gt in im_gts: if "cam_R_m2c" in gt.keys(): gt["cam_R_m2c"] = gt["cam_R_m2c"].flatten().tolist() if "cam_t_m2c" in gt.keys(): gt["cam_t_m2c"] = gt["cam_t_m2c"].flatten().tolist() if "obj_bb" in gt.keys(): gt["obj_bb"] = [int(x) for x in gt["obj_bb"]] with open(path, "w") as f: yaml.dump(gts, f, Dumper=yaml.CDumper, width=10000)
Example #10
Source File: inout.py From mx-DeepIM with Apache License 2.0 | 5 votes |
def save_info(path, info): for im_id in sorted(info.keys()): im_info = info[im_id] if "cam_K" in im_info.keys(): im_info["cam_K"] = im_info["cam_K"].flatten().tolist() if "cam_R_w2c" in im_info.keys(): im_info["cam_R_w2c"] = im_info["cam_R_w2c"].flatten().tolist() if "cam_t_w2c" in im_info.keys(): im_info["cam_t_w2c"] = im_info["cam_t_w2c"].flatten().tolist() with open(path, "w") as f: yaml.dump(info, f, Dumper=yaml.CDumper, width=10000)
Example #11
Source File: inout.py From mx-DeepIM with Apache License 2.0 | 5 votes |
def save_yaml(path, content): with open(path, "w") as f: yaml.dump(content, f, Dumper=yaml.CDumper, width=10000)
Example #12
Source File: yaml.py From luscan-devel with GNU General Public License v2.0 | 5 votes |
def test_data_serialization(self, data): fp = StringIO() dump(data, fp, Dumper=Dumper)
Example #13
Source File: yaml.py From luscan-devel with GNU General Public License v2.0 | 5 votes |
def serialize_report(self, output_file, report_data): with open(output_file, "wb") as fp: dump(report_data, fp, Dumper=Dumper) #--------------------------------------------------------------------------
Example #14
Source File: types.py From costar_plan with Apache License 2.0 | 5 votes |
def SaveYaml(filename,demo): ''' SaveYaml Really simple function to quickly load from a yaml file ''' stream = file(filename,'w') yaml.dump(demo,stream,Dumper=Dumper)
Example #15
Source File: utilities.py From insights-core with Apache License 2.0 | 5 votes |
def write_tags(tags, tags_file_path=constants.default_tags_file): """ Writes tags to tags_file_path Arguments: - tags (dict): the tags to write - tags_file_path (string): path to which tag data will be written Returns: None """ with open(tags_file_path, mode="w+") as f: data = yaml.dump(tags, Dumper=Dumper, default_flow_style=False) f.write(data)
Example #16
Source File: randomizer_window.py From wwrando with MIT License | 5 votes |
def save_settings(self): with open(self.settings_path, "w") as f: yaml.dump(self.settings, f, default_flow_style=False, Dumper=yaml.Dumper)
Example #17
Source File: serializer.py From tache with MIT License | 5 votes |
def _to_yaml(data): """Dump data into a YAML string.""" from yaml import dump try: from yaml import CDumper as Dumper except ImportError: from yaml import Dumper return dump(data, Dumper=Dumper)
Example #18
Source File: tracker.py From miui-updates-tracker with MIT License | 5 votes |
def merge_yaml(name: str): """ merge all devices yaml files into one file """ print("Creating YAML files") yaml_files = [x for x in sorted(glob(f'{name}/*.yml')) if not x.endswith('recovery.yml') and not x.endswith('fastboot.yml')] yaml_data = [] for file in yaml_files: with open(file, "r") as yaml_file: yaml_data.append(yaml.load(yaml_file, Loader=yaml.CLoader)) with open(f'{name}/{name}', "w") as output: yaml.dump(yaml_data, output, Dumper=yaml.CDumper) if path.exists(f'{name}/{name}'): rename(f'{name}/{name}', f'{name}/{name}.yml')
Example #19
Source File: EOL.py From miui-updates-tracker with MIT License | 5 votes |
def main(): """ MIUI Updates Tracker """ initialize() names, sr_devices, sf_devices, wr_devices, wf_devices = load_devices() versions = {'stable_fastboot': {'branch': 'F', 'devices': sf_devices}, 'stable_recovery': {'branch': '1', 'devices': sr_devices}, 'weekly_fastboot': {'branch': 'X', 'devices': wf_devices}, 'weekly_recovery': {'branch': '0', 'devices': wr_devices}} for name, data in versions.items(): # fetch based on version if "_fastboot" in name: fastboot.fetch(data['devices'], data['branch'], f'{name}/', names) elif "_recovery" in name: recovery.get_roms(name) print("Fetched " + name.replace('_', ' ')) # Merge files print("Creating YAML") yaml_files = [x for x in sorted(glob(f'{name}/*.yml')) if not x.startswith('old_')] yaml_data = [] for file in yaml_files: with open(file, "r") as yaml_file: yaml_data.append(yaml.load(yaml_file, Loader=yaml.CLoader)) with open(f'{name}/{name}', "w") as output: yaml.dump(yaml_data, output, Dumper=yaml.CDumper) # Cleanup for file in glob(f'{name}/*.yml'): remove(file) if path.exists(f'{name}/{name}'): rename(f'{name}/{name}', f'{name}/{name}.yml')
Example #20
Source File: yaml_handler.py From mmcv with Apache License 2.0 | 5 votes |
def dump_to_str(self, obj, **kwargs): kwargs.setdefault('Dumper', Dumper) return yaml.dump(obj, **kwargs)
Example #21
Source File: yaml_handler.py From mmcv with Apache License 2.0 | 5 votes |
def dump_to_fileobj(self, obj, file, **kwargs): kwargs.setdefault('Dumper', Dumper) yaml.dump(obj, file, **kwargs)
Example #22
Source File: utils.py From pySCENIC with GNU General Public License v3.0 | 5 votes |
def save_to_yaml(signatures: Sequence[Type[GeneSignature]], fname: str): """ :param signatures: :return: """ with openfile(fname, 'w') as f: f.write(dump(signatures, default_flow_style=False, Dumper=Dumper))
Example #23
Source File: local_grad_daemon.py From SparseSC with MIT License | 5 votes |
def __init__(self, common_data, K): subprocess.call(["python", "-m", "SparseSC.cli.scgrad", "start"]) # CREATE THE RESPONSE FIFO # replace any missing values with environment variables self.common_data = common_data self.K = K # BUILT THE TEMPORARY FILE NAMES self.tmpDirManager = tempfile.TemporaryDirectory() self.tmpdirname = self.tmpDirManager.name print("Created temporary directory:", self.tmpdirname) self.GRAD_PART_FILE = os.path.join(self.tmpdirname, _GRAD_PART_FILE) self.CONTAINER_OUTPUT_FILE = os.path.join(self.tmpdirname, _CONTAINER_OUTPUT_FILE) # WRITE THE COMMON DATA TO FILE: with open(os.path.join(self.tmpdirname, _GRAD_COMMON_FILE), "w") as fh: fh.write(dump(self.common_data, Dumper=Dumper)) #-- # A UTILITY FUNCTION #-- def tarify(x,name): #-- with tarfile.open(os.path.join(self.tmpdirname, '{}.tar.gz'.format(name)), mode='w:gz') as dest_file: #-- for i, k in itertools.product( range(len(x)), range(len(x[0]))): #-- fname = 'arr_{}_{}'.format(i,k) #-- array_bytes = x[i][k].tobytes() #-- info = tarfile.TarInfo(fname) #-- info.size = len(array_bytes) #-- dest_file.addfile(info,io.BytesIO(array_bytes) ) #-- #-- tarify(part_data["dA_dV_ki"],"dA_dV_ki") #-- tarify(part_data["dB_dV_ki"],"dB_dV_ki") #-- import pdb; pdb.set_trace()
Example #24
Source File: gradient_batch_client.py From SparseSC with MIT License | 5 votes |
def upload_object_to_container( self, block_blob_client, container_name, blob_name, obj ): """ Uploads a local file to an Azure Blob storage container. :param block_blob_client: A blob service client. :type block_blob_client: `azure.storage.blob.BlockBlobService` :param str container_name: The name of the Azure Blob storage container. :param str file_path: The local path to the file. :rtype: `azure.batch.models.ResourceFile` :return: A ResourceFile initialized with a SAS URL appropriate for Batch tasks. """ # print("Uploading file {} to container [{}]...".format(blob_name, container_name)) block_blob_client.create_blob_from_text( container_name, blob_name, dump(obj, Dumper=Dumper) ) sas_token = block_blob_client.generate_blob_shared_access_signature( container_name, blob_name, permission=azureblob.BlobPermissions.READ, expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=self.config.STORAGE_ACCESS_DURATION_HRS), ) sas_url = block_blob_client.make_blob_url( container_name, blob_name, sas_token=sas_token ) return models.ResourceFile(http_url=sas_url, file_path=blob_name)
Example #25
Source File: archiver.py From miui-updates-tracker with MIT License | 4 votes |
def gen_yaml(links, folder): """ generate yaml file with device's info for each rom link :param links: a list of links :param folder: stable/weekly """ print(folder) if 'fastboot' in folder: codenames = sorted(list(set(link.split('/')[-1].split('_images')[0] for link in links))) else: devices = set() for link in links: try: devices.add(link.split('/')[-1].split('_')[1]) except IndexError: print(f"Bad link: {link}!") exit(1) devices = sorted(list(devices)) codenames = [] for model in devices: try: for codename, info in DEVICES.items(): if info[1] == model: codenames.append(codename) except IndexError as err: print(f"can't find codename for {model}\n{err}") continue for codename in codenames: data = {} roms = [] if 'fastboot' in folder: roms = [link for link in links if codename == link.split('/')[-1].split('_images')[0]] elif 'stable_recovery' in folder: try: roms = [link for link in links if CODES[codename] in link.split('/')[3]] except KeyError as e: print(f'KeyError {e}') #except KeyError: continue elif 'weekly_recovery' in folder: if codename == 'whyred_global': roms = [link for link in links if link.split('/')[-1].split('_')[1].startswith(DEVICES[codename][1]) or link.split('/')[-1].split('_')[1] == 'HMNote5HMNote5ProGlobal'] else: roms = [link for link in links if link.split('/')[-1].split('_')[1] == DEVICES[codename][1]] info = {} for rom in roms: if 'fastboot' in folder: version = rom.split('/')[3] else: version = rom.split('/')[-1].split('_')[2] info.update({version: rom}) info = dict(sorted(info.items(), reverse=True)) data.update({codename: info}) with open(f'{folder}/{codename}.yml', 'w') as output: yaml.dump(data, output, Dumper=yaml.CDumper)
Example #26
Source File: stereo.py From cvcalib with Apache License 2.0 | 4 votes |
def main(): Setting.generate_missing_shorthands() defaults = Setting.generate_defaults_dict() conf_parser = \ Setting.generate_parser(defaults, console_only=True, description= "Test stereo algorithms on two image files.") # ============== STORAGE/RETRIEVAL OF CONSOLE SETTINGS ===========================================# args, remaining_argv = conf_parser.parse_known_args() defaults[Setting.save_settings.name] = args.save_settings if args.settings_file: defaults[Setting.settings_file.name] = args.settings_file if osp.isfile(args.settings_file): file_stream = open(args.settings_file, "r", encoding="utf-8") config_defaults = load(file_stream, Loader=Loader) file_stream.close() if config_defaults: for key, value in config_defaults.items(): defaults[key] = value else: raise ValueError("Settings file not found at: {0:s}".format(args.settings_file)) parser = Setting.generate_parser(defaults, parents=[conf_parser]) args = parser.parse_args(remaining_argv) # process "special" setting values if args.folder == "!settings_file_location": if args.settings_file and osp.isfile(args.settings_file): args.folder = osp.dirname(args.settings_file) # save settings if prompted to do so if args.save_settings and args.settings_file: setting_dict = vars(args) file_stream = open(args.settings_file, "w", encoding="utf-8") file_name = setting_dict[Setting.save_settings.name] del setting_dict[Setting.save_settings.name] del setting_dict[Setting.settings_file.name] dump(setting_dict, file_stream, Dumper=Dumper) file_stream.close() setting_dict[Setting.save_settings.name] = file_name setting_dict[Setting.settings_file.name] = True app = StereoMatcherApp(args) app.disparity2()
Example #27
Source File: calibrate.py From cvcalib with Apache License 2.0 | 4 votes |
def main(): Setting.generate_missing_shorthands() defaults = Setting.generate_defaults_dict() conf_parser = \ Setting.generate_parser(defaults, console_only=True, description= "Use one or more .mp4 video files to perform calibration: " + "find the cameras' intrinsics and/or extrinsics.") # ============== STORAGE/RETRIEVAL OF CONSOLE SETTINGS ===========================================# args, remaining_argv = conf_parser.parse_known_args() defaults[Setting.save_settings.name] = args.save_settings if args.settings_file: defaults[Setting.settings_file.name] = args.settings_file if osp.isfile(args.settings_file): file_stream = open(args.settings_file, "r", encoding="utf-8") config_defaults = load(file_stream, Loader=Loader) file_stream.close() for key, value in config_defaults.items(): defaults[key] = value else: raise ValueError("Settings file not found at: {0:s}".format(args.settings_file)) parser = Setting.generate_parser(defaults, parents=[conf_parser]) args = parser.parse_args(remaining_argv) # process "special" setting values if args.folder == "!settings_file_location": if args.settings_file and osp.isfile(args.settings_file): args.folder = osp.dirname(args.settings_file) # save settings if prompted to do so if args.save_settings and args.settings_file: setting_dict = vars(args) file_stream = open(args.settings_file, "w", encoding="utf-8") file_name = setting_dict[Setting.save_settings.name] del setting_dict[Setting.save_settings.name] del setting_dict[Setting.settings_file.name] dump(setting_dict, file_stream, Dumper=Dumper) file_stream.close() setting_dict[Setting.save_settings.name] = file_name setting_dict[Setting.settings_file.name] = True if args.unsynced: app = ApplicationUnsynced(args) app.gather_frame_data() app.calibrate_time_reprojection(save_data=True) else: app = ApplicationSynced(args) app.gather_frame_data() app.run_calibration() return 0
Example #28
Source File: multistereo.py From cvcalib with Apache License 2.0 | 4 votes |
def main(): Setting.generate_missing_shorthands() defaults = Setting.generate_defaults_dict() conf_parser = \ Setting.generate_parser(defaults, console_only=True, description= "Use one or more .mp4 video files to perform calibration: " + "find the cameras' intrinsics and/or extrinsics.") # ============== STORAGE/RETRIEVAL OF CONSOLE SETTINGS ===========================================# args, remaining_argv = conf_parser.parse_known_args() defaults[Setting.save_settings.name] = args.save_settings if args.settings_file: defaults[Setting.settings_file.name] = args.settings_file if osp.isfile(args.settings_file): file_stream = open(args.settings_file, "r", encoding="utf-8") config_defaults = load(file_stream, Loader=Loader) file_stream.close() for key, value in config_defaults.items(): defaults[key] = value else: raise ValueError("Settings file not found at: {0:s}".format(args.settings_file)) parser = Setting.generate_parser(defaults, parents=[conf_parser]) args = parser.parse_args(remaining_argv) # process "special" setting values if args.folder == "!settings_file_location": if args.settings_file and osp.isfile(args.settings_file): args.folder = osp.dirname(args.settings_file) # save settings if prompted to do so if args.save_settings and args.settings_file: setting_dict = vars(args) file_stream = open(args.settings_file, "w", encoding="utf-8") file_name = setting_dict[Setting.save_settings.name] del setting_dict[Setting.save_settings.name] del setting_dict[Setting.settings_file.name] dump(setting_dict, file_stream, Dumper=Dumper) file_stream.close() setting_dict[Setting.save_settings.name] = file_name setting_dict[Setting.settings_file.name] = True app = MultiStereoApplication(args)
Example #29
Source File: stt.py From SparseSC with MIT License | 4 votes |
def main(): # GET THE COMMAND LINE ARGS ARGS = sys.argv[1:] if ARGS[0] == "ssc.py": ARGS.pop(0) assert ( len(ARGS) == 3 ), "ssc.py expects 2 parameters, including a file name and a batch number" infile, outfile, batchNumber = ARGS batchNumber = int(batchNumber) v_pen, w_pen, config = get_config(infile) n_folds = len(config["folds"]) * len(v_pen) * len(w_pen) assert 0 <= batchNumber < n_folds, "Batch number out of range" i_fold = batchNumber % len(config["folds"]) i_v = (batchNumber // len(config["folds"])) % len(v_pen) i_w = (batchNumber // len(config["folds"])) // len(v_pen) params = config.copy() del params["folds"] del params["v_pen"] del params["w_pen"] train, test = config["folds"][i_fold] out = score_train_test( train=train, test=test, v_pen=v_pen[i_v], w_pen=w_pen[i_w], **params ) with open(outfile, "w") as fp: fp.write( dump( { "batch": batchNumber, "i_fold": i_fold, "i_v": i_v, "i_w": i_w, "results": out, }, Dumper=Dumper, ) )
Example #30
Source File: batch_gradient.py From SparseSC with MIT License | 4 votes |
def single_grad_cli( tmpdir, N0, N1, in_controls, splits, b_i, w_pen, treated_units, Y_treated, Y_control ): """ wrapper for the real function """ from yaml import load, dump try: from yaml import CLoader as Loader, CDumper as Dumper except ImportError: from yaml import Loader, Dumper _common_params = { "N0": N0, "N1": N1, "in_controls": in_controls, "splits": splits, "b_i": b_i, "w_pen": w_pen, "treated_units": treated_units, "Y_treated": Y_treated, "Y_control": Y_control, } COMMONFILE = os.path.join(tmpdir, "commonfile.yaml") PARTFILE = os.path.join(tmpdir, "partfile.yaml") OUTFILE = os.path.join(tmpdir, "outfile.yaml") with open(COMMONFILE, "w") as fp: fp.write(dump(_common_params, Dumper=Dumper)) def inner(A, weights, dA_dV_ki_k, dB_dV_ki_k): """ Calculate a single component of the gradient """ _local_params = { "A": A, "weights": weights, "dA_dV_ki_k": dA_dV_ki_k, "dB_dV_ki_k": dB_dV_ki_k, } with open(PARTFILE, "w") as fp: fp.write(dump(_local_params, Dumper=Dumper)) subprocess.run(["scgrad", COMMONFILE, PARTFILE, OUTFILE]) with open(OUTFILE, "r") as fp: val = load(fp, Loader=Loader) return val return inner