Python loguru.logger.success() Examples
The following are 5
code examples of loguru.logger.success().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
loguru.logger
, or try the search function
.
Example #1
Source File: classifier.py From CharGer with GNU General Public License v3.0 | 6 votes |
def match_clinvar(self) -> None: """Match the input variant with the ClinVar table. Update :attr:`CharGerResult.clinvar` the variant matches a ClinVar record by calling :meth:`_match_clinvar_one_variant`. """ if self.config.clinvar_table is None: logger.info("Skip matching ClinVar") return logger.info( f"Match input variants with ClinVar table at {self.config.clinvar_table}" ) clinvar_match_num = 0 with TabixFile(str(self.config.clinvar_table), encoding="utf8") as tabix: cols = tabix.header[0][len("#") :].split("\t") for result in self.results: record = self._match_clinvar_one_variant(result.variant, tabix, cols) if record is not None: result.clinvar = record clinvar_match_num += 1 logger.success( f"Matched {clinvar_match_num:,d} out of {len(self.input_variants):,d} input variants to a ClinVar record" )
Example #2
Source File: test_threading.py From loguru with MIT License | 6 votes |
def test_heavily_threaded_logging(capsys): logger.remove() def function(): i = logger.add(NonSafeSink(0.1), format="{message}", catch=False) logger.debug("AAA") logger.info("BBB") logger.success("CCC") logger.remove(i) threads = [Thread(target=function) for _ in range(10)] for thread in threads: thread.start() for thread in threads: thread.join() logger.remove() out, err = capsys.readouterr() assert out == "" assert err == ""
Example #3
Source File: classifier.py From CharGer with GNU General Public License v3.0 | 5 votes |
def _read_input_variants(self) -> None: """Read input VCF and set up the result template Load :attr:`input_variants` from :attr:`self.config.input <.CharGerConfig.input>`. Also populate :attr:`results` matching the input variant. """ if self.config.input is None: raise ValueError(f"No input file is given in the config") logger.info(f"Read input VCF from {self.config.input}") # TODO: Skip variants with filter, or with high allele frequency # num_skipped_variants: Dict[str, int] = {"has_filter": 0} for variant in Variant.read_and_parse_vcf(self.config.input): # # Skip the variant with filter (not PASS) # if variant.filter: # logger.warning( # f"{variant} has filter {','.join(variant.filter)}. Skipped" # ) # num_skipped_variants["has_filter"] += 1 # continue self.input_variants.append(variant) # We also create the result template self.results.append(CharGerResult(variant)) logger.success( f"Read total {len(self.input_variants):,d} variants from the input VCF" )
Example #4
Source File: config.py From google-music-scripts with MIT License | 4 votes |
def configure_logging( modifier=0, *, username=None, debug=False, log_to_stdout=True, log_to_file=False ): logger.remove() if debug: logger.enable('audio_metadata') logger.enable('google_music') logger.enable('google_music-proto') logger.enable('google_music_utils') verbosity = 3 + modifier if verbosity < 0: verbosity = 0 elif verbosity > 8: verbosity = 8 log_level = VERBOSITY_LOG_LEVELS[verbosity] if log_to_stdout: logger.add( sys.stdout, level=log_level, format=LOG_FORMAT, backtrace=False ) if log_to_file: log_dir = ensure_log_dir(username=username) log_file = (log_dir / time.strftime('%Y-%m-%d_%H-%M-%S')).with_suffix('.log') logger.success("Logging to file: {}", log_file) logger.add( log_file, level=log_level, format=LOG_FORMAT, backtrace=False, encoding='utf8', newline='\n' )
Example #5
Source File: train.py From bert-sentiment with MIT License | 4 votes |
def train( root=True, binary=False, bert="bert-large-uncased", epochs=30, batch_size=32, save=False, ): trainset = SSTDataset("train", root=root, binary=binary) devset = SSTDataset("dev", root=root, binary=binary) testset = SSTDataset("test", root=root, binary=binary) config = BertConfig.from_pretrained(bert) if not binary: config.num_labels = 5 model = BertForSequenceClassification.from_pretrained(bert, config=config) model = model.to(device) lossfn = torch.nn.CrossEntropyLoss() optimizer = torch.optim.Adam(model.parameters(), lr=1e-5) for epoch in range(1, epochs): train_loss, train_acc = train_one_epoch( model, lossfn, optimizer, trainset, batch_size=batch_size ) val_loss, val_acc = evaluate_one_epoch( model, lossfn, optimizer, devset, batch_size=batch_size ) test_loss, test_acc = evaluate_one_epoch( model, lossfn, optimizer, testset, batch_size=batch_size ) logger.info(f"epoch={epoch}") logger.info( f"train_loss={train_loss:.4f}, val_loss={val_loss:.4f}, test_loss={test_loss:.4f}" ) logger.info( f"train_acc={train_acc:.3f}, val_acc={val_acc:.3f}, test_acc={test_acc:.3f}" ) if save: label = "binary" if binary else "fine" nodes = "root" if root else "all" torch.save(model, f"{bert}__{nodes}__{label}__e{epoch}.pickle") logger.success("Done!")