Python yaml.UnsafeLoader() Examples
The following are 12
code examples of yaml.UnsafeLoader().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
yaml
, or try the search function
.
Example #1
Source File: tokenizer_test.py From OpenNMT-tf with MIT License | 6 votes |
def testOpenNMTTokenizerAssets(self): asset_dir = self.get_temp_dir() # Write a dummy BPE model. bpe_model_path = os.path.join(asset_dir, "model.bpe") with open(bpe_model_path, "w") as bpe_model_file: bpe_model_file.write("#version: 0.2\ne s</w>\n") tokenizer = tokenizers.OpenNMTTokenizer(mode="conservative", bpe_model_path=bpe_model_path) # Generated assets are prefixed but not existing resources. assets = tokenizer.export_assets(asset_dir, asset_prefix="source_") self.assertIn("source_tokenizer_config.yml", assets) self.assertTrue(os.path.exists(assets["source_tokenizer_config.yml"])) self.assertIn("model.bpe", assets) self.assertTrue(os.path.exists(assets["model.bpe"])) # The tokenization configuration should not contain absolute paths to resources. with open(assets["source_tokenizer_config.yml"], "rb") as config_file: asset_config = yaml.load(config_file.read(), Loader=yaml.UnsafeLoader) self.assertDictEqual(asset_config, {"mode": "conservative", "bpe_model_path": "model.bpe"})
Example #2
Source File: config.py From OpenNMT-tf with MIT License | 6 votes |
def load_config(config_paths, config=None): """Loads YAML configuration files. Args: config_paths: A list of configuration files that will be merged to a single configuration. The rightmost configuration takes priority. config: A (possibly non empty) config dictionary to fill. Returns: The configuration as Python dictionary. """ if config is None: config = {} for config_path in config_paths: with tf.io.gfile.GFile(config_path, mode="rb") as config_file: subconfig = yaml.load(config_file.read(), Loader=yaml.UnsafeLoader) # Add or update section in main configuration. merge_dict(config, subconfig) return config
Example #3
Source File: __init__.py From satpy with GNU General Public License v3.0 | 6 votes |
def read_reader_config(config_files, loader=UnsafeLoader): """Read the reader `config_files` and return the info extracted.""" conf = {} LOG.debug('Reading %s', str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: reader_info = conf['reader'] except KeyError: raise KeyError( "Malformed config file {}: missing reader 'reader'".format( config_files)) reader_info['config_files'] = config_files return reader_info
Example #4
Source File: yaml_reader.py From satpy with GNU General Public License v3.0 | 6 votes |
def __init__(self, config_files): """Load information from YAML configuration file about how to read data files.""" self.config = {} self.config_files = config_files for config_file in config_files: with open(config_file) as fd: self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) self.info = self.config['reader'] self.name = self.info['name'] self.file_patterns = [] for file_type, filetype_info in self.config['file_types'].items(): filetype_info.setdefault('file_type', file_type) # correct separator if needed file_patterns = [os.path.join(*pattern.split('/')) for pattern in filetype_info['file_patterns']] filetype_info['file_patterns'] = file_patterns self.file_patterns.extend(file_patterns) if 'sensors' in self.info and not isinstance(self.info['sensors'], (list, tuple)): self.info['sensors'] = [self.info['sensors']] self.datasets = self.config.get('datasets', {}) self.info['filenames'] = [] self.all_ids = {} self.load_ds_ids_from_config()
Example #5
Source File: __init__.py From satpy with GNU General Public License v3.0 | 6 votes |
def read_writer_config(config_files, loader=UnsafeLoader): """Read the writer `config_files` and return the info extracted.""" conf = {} LOG.debug('Reading %s', str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: writer_info = conf['writer'] except KeyError: raise KeyError( "Malformed config file {}: missing writer 'writer'".format( config_files)) writer_info['config_files'] = config_files return writer_info
Example #6
Source File: utils.py From rl-baselines-zoo with MIT License | 5 votes |
def get_saved_hyperparams(stats_path, norm_reward=False, test_mode=False): """ :param stats_path: (str) :param norm_reward: (bool) :param test_mode: (bool) :return: (dict, str) """ hyperparams = {} if not os.path.isdir(stats_path): stats_path = None else: config_file = os.path.join(stats_path, 'config.yml') if os.path.isfile(config_file): # Load saved hyperparameters with open(os.path.join(stats_path, 'config.yml'), 'r') as f: hyperparams = yaml.load(f, Loader=yaml.UnsafeLoader) # pytype: disable=module-attr hyperparams['normalize'] = hyperparams.get('normalize', False) else: obs_rms_path = os.path.join(stats_path, 'obs_rms.pkl') hyperparams['normalize'] = os.path.isfile(obs_rms_path) # Load normalization params if hyperparams['normalize']: if isinstance(hyperparams['normalize'], str): normalize_kwargs = eval(hyperparams['normalize']) if test_mode: normalize_kwargs['norm_reward'] = norm_reward else: normalize_kwargs = {'norm_obs': hyperparams['normalize'], 'norm_reward': norm_reward} hyperparams['normalize_kwargs'] = normalize_kwargs return hyperparams, stats_path
Example #7
Source File: tokenizer.py From OpenNMT-tf with MIT License | 5 votes |
def make_tokenizer(config=None): """Creates a tokenizer instance from the configuration. Args: config: Path to a configuration file or the configuration dictionary. Returns: A :class:`opennmt.tokenizers.Tokenizer` instance. Raises: ValueError: if :obj:`config` is invalid. """ if config: if isinstance(config, str) and tf.io.gfile.exists(config): with tf.io.gfile.GFile(config, mode="rb") as config_file: config = yaml.load(config_file, Loader=yaml.UnsafeLoader) if isinstance(config, dict): tokenizer_type = config.get("type") if tokenizer_type is None: tokenizer_type = "OpenNMTTokenizer" tokenizer_params = config else: tokenizer_params = config.get("params", {}) tokenizer_class = _TOKENIZERS_REGISTRY.get(tokenizer_type) if tokenizer_class is None: raise ValueError("%s is not in list of accepted tokenizers: %s" % ( tokenizer_type, ", ".join(sorted(_TOKENIZERS_REGISTRY.class_names)))) tokenizer = tokenizer_class(**tokenizer_params) else: raise ValueError("Invalid tokenization configuration: %s" % str(config)) else: # If the tokenization was not configured, we assume that an external tokenization # was used and we don't include the tokenizer in the exported graph. tokenizer = SpaceTokenizer(in_graph=False) return tokenizer
Example #8
Source File: yaml_cache.py From vmaas with GNU General Public License v2.0 | 5 votes |
def load_yaml(self): """Load cache from YAML file.""" with open(self.filename, "r") as stream: try: # FIXME: workaround using UnsafeLoader because https://github.com/yaml/pyyaml/issues/380 data = yaml.load(stream, Loader=yaml.UnsafeLoader) except yaml.YAMLError as err: print(err) for key, val in data.items(): setattr(self, key, val) return self
Example #9
Source File: config.py From sockeye with Apache License 2.0 | 5 votes |
def load(fname: str) -> 'Config': """ Returns a Config object loaded from a file. :param fname: Name of file to load the Config from. :return: Configuration. """ with open(fname) as inp: obj = yaml.load(inp, Loader=yaml.UnsafeLoader) # type: ignore return obj
Example #10
Source File: __init__.py From satpy with GNU General Public License v3.0 | 5 votes |
def _load_config(self, composite_configs, **kwargs): if not isinstance(composite_configs, (list, tuple)): composite_configs = [composite_configs] conf = {} for composite_config in composite_configs: with open(composite_config) as conf_file: conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader)) try: sensor_name = conf['sensor_name'] except KeyError: LOG.debug('No "sensor_name" tag found in %s, skipping.', composite_config) return sensor_id = sensor_name.split('/')[-1] sensor_deps = sensor_name.split('/')[:-1] compositors = self.compositors.setdefault(sensor_id, DatasetDict()) modifiers = self.modifiers.setdefault(sensor_id, {}) for sensor_dep in reversed(sensor_deps): if sensor_dep not in self.compositors or sensor_dep not in self.modifiers: self.load_sensor_composites(sensor_dep) if sensor_deps: compositors.update(self.compositors[sensor_deps[-1]]) modifiers.update(self.modifiers[sensor_deps[-1]]) for composite_type in ['modifiers', 'composites']: if composite_type not in conf: continue for composite_name in conf[composite_type]: self._process_composite_config(composite_name, conf, composite_type, sensor_id, composite_config, **kwargs)
Example #11
Source File: __init__.py From satpy with GNU General Public License v3.0 | 5 votes |
def add_config_to_tree(self, *decision_dict): """Add configuration to tree.""" conf = {} for config_file in decision_dict: if os.path.isfile(config_file): with open(config_file) as fd: enhancement_config = yaml.load(fd, Loader=UnsafeLoader) if enhancement_config is None: # empty file continue enhancement_section = enhancement_config.get( self.prefix, {}) if not enhancement_section: LOG.debug("Config '{}' has no '{}' section or it is empty".format(config_file, self.prefix)) continue conf = recursive_dict_update(conf, enhancement_section) elif isinstance(config_file, dict): conf = recursive_dict_update(conf, config_file) else: LOG.debug("Loading enhancement config string") d = yaml.load(config_file, Loader=UnsafeLoader) if not isinstance(d, dict): raise ValueError( "YAML file doesn't exist or string is not YAML dict: {}".format(config_file)) conf = recursive_dict_update(conf, d) self._build_tree(conf)
Example #12
Source File: config.py From satpy with GNU General Public License v3.0 | 5 votes |
def check_yaml_configs(configs, key): """Get a diagnostic for the yaml *configs*. *key* is the section to look for to get a name for the config at hand. """ diagnostic = {} for i in configs: for fname in i: with open(fname) as stream: try: res = yaml.load(stream, Loader=UnsafeLoader) msg = 'ok' except yaml.YAMLError as err: stream.seek(0) res = yaml.load(stream, Loader=BaseLoader) if err.context == 'while constructing a Python object': msg = err.problem else: msg = 'error' finally: try: diagnostic[res[key]['name']] = msg except (KeyError, TypeError): # this object doesn't have a 'name' pass return diagnostic