Python loguru.logger.info() Examples
The following are 30
code examples of loguru.logger.info().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
loguru.logger
, or try the search function
.
Example #1
Source File: join_list.py From bot with MIT License | 6 votes |
def join_expired(chat_id: int, message_id: int): users = await join_list.check_list(chat_id=chat_id, message_id=message_id) if not users: logger.info( "All users from join-list in chat {chat} and message {message} already answer for question", chat=chat_id, message=message_id, ) return for user_id in users: await join_list.pop_user_from_list(chat_id=chat_id, message_id=message_id, user_id=user_id) logger.info( "Kick chat member {user} from chat {chat} " "in due to user do not answer to the question in message {message}.", user=user_id, chat=chat_id, message=message_id, ) await bot.kick_chat_member(chat_id=chat_id, user_id=user_id) await bot.unban_chat_member(chat_id=chat_id, user_id=user_id) with suppress(MessageToDeleteNotFound): await bot.delete_message(chat_id, message_id)
Example #2
Source File: Base_Logging.py From makemework with MIT License | 6 votes |
def write(self,msg,level='info'): "Write out a message" fname = inspect.stack()[2][3] #May be use a entry-exit decorator instead d = {'caller_func': fname} if level.lower()== 'debug': logger.debug("{module} | {msg}",module=d['caller_func'],msg=msg) elif level.lower()== 'info': logger.info("{module} | {msg}",module=d['caller_func'],msg=msg) elif level.lower()== 'warn' or level.lower()=='warning': logger.warning("{module} | {msg}",module=d['caller_func'],msg=msg) elif level.lower()== 'error': logger.error("{module} | {msg}",module=d['caller_func'],msg=msg) elif level.lower()== 'critical': logger.critical("{module} | {msg}",module=d['caller_func'],msg=msg) else: logger.critical("Unknown level passed for the msg: {}", msg)
Example #3
Source File: cmd.py From Projectors with GNU General Public License v3.0 | 6 votes |
def release(self): """Create a zipfile release with the current version number defined in bl_info dict in __init__.py""" # Builds dir builds = Path('.', 'builds') if not builds.exists(): builds.mkdir() # Extract the version number from the __init__.py file. regex = r"\"version\":\s*(\(\d\,\s*\d\,\s*\d\))" with Path('__init__.py').open('r') as f: string = f.read().replace("\n", '') match = re.findall(regex, string, re.MULTILINE)[0] log.debug(match) log.info(f'Create release version: {match}') postfix = '.'.join([str(x) for x in eval(match)]) # Zip all needed file into a realease. zip_file = builds / f'Projectors {postfix}.zip' with zipfile.ZipFile(zip_file, 'w') as zf: for f in Path('.').glob('*.py'): zf.write(f) zf.write('README.md') zf.write('LICENSE') return f'A realease zipfile was created: {zip_file}'
Example #4
Source File: video.py From stagesepx with MIT License | 6 votes |
def load_frames(self): # TODO full frames list can be very huge, for some devices logger.info(f"start loading {self.path} to memory ...") data: typing.List[VideoFrame] = [] with toolbox.video_capture(self.path) as cap: success, frame = cap.read() while success: frame_object = VideoFrame.init(cap, frame) data.append(frame_object) success, frame = cap.read() # calculate memory cost each_cost = data[0].data.nbytes logger.debug(f"single frame cost: {each_cost} bytes") total_cost = each_cost * self.frame_count logger.debug(f"total frame cost: {total_cost} bytes") logger.info( f"frames loaded. frame count: {self.frame_count}. memory cost: {total_cost} bytes" ) # lock the order self.data = tuple(data) # fix the length ( the last frame may be broken sometimes ) self.frame_count = len(data)
Example #5
Source File: common.py From virtex with MIT License | 6 votes |
def cycle(dataloader, device, start_iteration: int = 0): r""" A generator to yield batches of data from dataloader infinitely. Internally, it sets the ``epoch`` for dataloader sampler to shuffle the examples. One may optionally provide the starting iteration to make sure the shuffling seed is different and continues naturally. """ iteration = start_iteration while True: # Set the `epoch` of sampler as current iteration. This is just for # determinisitic shuffling after every epoch, so it is just a seed and # need not necessarily be the "epoch". logger.info(f"Beginning new epoch, setting shuffle seed {iteration}") dataloader.sampler.set_epoch(iteration) for batch in dataloader: for key in batch: batch[key] = batch[key].to(device) yield batch iteration += 1
Example #6
Source File: make.py From httprunner with Apache License 2.0 | 6 votes |
def format_pytest_with_black(*python_paths: Text) -> NoReturn: logger.info("format pytest cases with black ...") try: if is_support_multiprocessing() or len(python_paths) <= 1: subprocess.run(["black", *python_paths]) else: logger.warning( f"this system does not support multiprocessing well, format files one by one ..." ) [subprocess.run(["black", path]) for path in python_paths] except subprocess.CalledProcessError as ex: capture_exception(ex) logger.error(ex) sys.exit(1) except FileNotFoundError: err_msg = """ missing dependency tool: black install black manually and try again: $ pip install black """ logger.error(err_msg) sys.exit(1)
Example #7
Source File: runner.py From httprunner with Apache License 2.0 | 6 votes |
def __run_step(self, step: TStep) -> Dict: """run teststep, teststep maybe a request or referenced testcase""" logger.info(f"run step begin: {step.name} >>>>>>") if step.request: step_data = self.__run_step_request(step) elif step.testcase: step_data = self.__run_step_testcase(step) else: raise ParamsError( f"teststep is neither a request nor a referenced testcase: {step.dict()}" ) self.__step_datas.append(step_data) logger.info(f"run step end: {step.name} <<<<<<\n") return step_data.export_vars
Example #8
Source File: compat.py From httprunner with Apache License 2.0 | 6 votes |
def ensure_testcase_v3_api(api_content: Dict) -> Dict: logger.info("convert api in v2 to testcase format v3") teststep = { "request": _sort_request_by_custom_order(api_content["request"]), } teststep.update(_ensure_step_attachment(api_content)) teststep = _sort_step_by_custom_order(teststep) config = {"name": api_content["name"]} extract_variable_names: List = list(teststep.get("extract", {}).keys()) if extract_variable_names: config["export"] = extract_variable_names return { "config": config, "teststeps": [teststep], }
Example #9
Source File: core.py From httprunner with Apache License 2.0 | 6 votes |
def gen_testcase(self, file_type="pytest"): logger.info(f"Start to generate testcase from {self.har_file_path}") harfile = os.path.splitext(self.har_file_path)[0] try: testcase = self._make_testcase() except Exception as ex: capture_exception(ex) raise if file_type == "JSON": output_testcase_file = f"{harfile}.json" utils.dump_json(testcase, output_testcase_file) elif file_type == "YAML": output_testcase_file = f"{harfile}.yml" utils.dump_yaml(testcase, output_testcase_file) else: # default to generate pytest file testcase["config"]["path"] = self.har_file_path output_testcase_file = make_testcase(testcase) format_pytest_with_black(output_testcase_file) logger.info(f"generated testcase: {output_testcase_file}")
Example #10
Source File: test_cli.py From stagesepx with MIT License | 6 votes |
def test_cli(): logger.info("checking main") subprocess.check_call(["python", "-m", "stagesepx.cli"]) logger.info("checking one_step ...") subprocess.check_call(["stagesepx", "one_step", VIDEO_PATH]) subprocess.check_call(["stagesepx", "one_step", VIDEO_PATH, "output"]) logger.info("checking keras trainer ...") subprocess.check_call(["stagesepx", "train", "output", "output.h5"]) # try to train subprocess.check_call( ["stagesepx", "train", "output", "output.h5", "--epochs", "1"] ) # new subprocess.check_call(["stagesepx", "analyse", VIDEO_PATH, "output"]) shutil.rmtree("output")
Example #11
Source File: classifier.py From CharGer with GNU General Public License v3.0 | 6 votes |
def _read_pp2_gene_list(self) -> None: """Read gene list for PP2 module. Load :attr:`pp2_genes` from :attr:`self.config.PP2_gene_list <.CharGerConfig.PP2_gene_list>`. Skip PP2 module if not provided. """ gene_list_pth = self.config.PP2_gene_list # Disable PP2 module if no list is provided if gene_list_pth is None: logger.warning( "CharGer cannot make PP2 calls without the given gene list. " "Disable PP2 module" ) self._acmg_module_availability["PP2"] = ModuleAvailability.INVALID_SETUP return logger.info(f"Read PP2 gene list from {gene_list_pth}") self.pp2_genes = set(l.strip() for l in read_lines(gene_list_pth)) logger.info(f"Marked {len(self.pp2_genes):,d} genes for PP2")
Example #12
Source File: classifier.py From CharGer with GNU General Public License v3.0 | 6 votes |
def match_clinvar(self) -> None: """Match the input variant with the ClinVar table. Update :attr:`CharGerResult.clinvar` the variant matches a ClinVar record by calling :meth:`_match_clinvar_one_variant`. """ if self.config.clinvar_table is None: logger.info("Skip matching ClinVar") return logger.info( f"Match input variants with ClinVar table at {self.config.clinvar_table}" ) clinvar_match_num = 0 with TabixFile(str(self.config.clinvar_table), encoding="utf8") as tabix: cols = tabix.header[0][len("#") :].split("\t") for result in self.results: record = self._match_clinvar_one_variant(result.variant, tabix, cols) if record is not None: result.clinvar = record clinvar_match_num += 1 logger.success( f"Matched {clinvar_match_num:,d} out of {len(self.input_variants):,d} input variants to a ClinVar record" )
Example #13
Source File: classifier.py From CharGer with GNU General Public License v3.0 | 6 votes |
def run_acmg_modules(self) -> None: """Run all ACMG modules. See :mod:`~charger.acmg_modules` for all the currently implemented modules. """ logger.info("Run all ACMG modules") def run_or_skip(module_name: str): return self._run_or_skip_module( module_name, self._acmg_module_availability[module_name] ) # PVS1 if run_or_skip("PVS1"): for result in self.results: run_pvs1(result, self.inheritance_genes) # PM4 if run_or_skip("PM4"): for result in self.results: run_pm4(result, self.inheritance_genes)
Example #14
Source File: assets.py From veros with MIT License | 6 votes |
def _get_asset(self, key): url = self._asset_config[key]['url'] md5 = self._asset_config[key].get('md5') target_filename = os.path.basename(urlparse.urlparse(url).path) target_path = os.path.join(self._asset_dir, target_filename) target_lock = target_path + '.lock' with FileLock(target_lock): if not os.path.isfile(target_path) or (md5 is not None and _filehash(target_path) != md5): logger.info('Downloading asset {} ...', target_filename) _download_file(url, target_path) if md5 is not None and _filehash(target_path) != md5: raise AssetError('Mismatching MD5 checksum on asset %s' % target_filename) return target_path
Example #15
Source File: isoneutral.py From veros with MIT License | 6 votes |
def check_isoneutral_slope_crit(vs): """ check linear stability criterion from Griffies et al """ epsln = 1e-20 if vs.enable_neutral_diffusion: ft1 = 1.0 / (4.0 * vs.K_iso_0 * vs.dt_tracer + epsln) delta1a = np.min(vs.dxt[2:-2, np.newaxis, np.newaxis] * np.abs(vs.cost[np.newaxis, 2:-2, np.newaxis]) \ * vs.dzt[np.newaxis, np.newaxis, :] * ft1) delta1b = np.min(vs.dyt[np.newaxis, 2:-2, np.newaxis] * vs.dzt[np.newaxis, np.newaxis, :] * ft1) delta_iso1 = min( vs.dzt[0] * ft1 * vs.dxt[-1] * abs(vs.cost[-1]), min(delta1a, delta1b) ) logger.info('Diffusion grid factor delta_iso1 = {}', float(delta_iso1)) if delta_iso1 < vs.iso_slopec: raise RuntimeError('Without latitudinal filtering, delta_iso1 is the steepest ' 'isoneutral slope available for linear stability of ' 'Redi and GM. Maximum allowable isoneutral slope is ' 'specified as iso_slopec = {}.' .format(vs.iso_slopec))
Example #16
Source File: superuser.py From bot with MIT License | 6 votes |
def create_super_user(user_id: int, remove: bool) -> bool: user = await User.query.where(User.id == user_id).gino.first() if not user: logger.error("User is not registered in bot") raise ValueError("User is not registered in bot") logger.info( "Loaded user {user}. It's registered at {register_date}.", user=user.id, register_date=user.created_at, ) await user.update(is_superuser=not remove).apply() if remove: logger.warning("User {user} now IS NOT superuser", user=user_id) else: logger.warning("User {user} now IS superuser", user=user_id) return True
Example #17
Source File: console.py From CharGer with GNU General Public License v3.0 | 5 votes |
def parse_console(args=None) -> CharGerConfig: """ Create a :class:`~charger.config.CharGerConfig` object based on the command-line arguments or the given `args`. """ parser = create_console_parser() config = parser.parse_args(args, namespace=CharGerConfig()) console_parameters = " ".join(map(quote, args or sys.argv[1:])) logger.info(f"Running CharGer v{__version__} with parameters: {console_parameters}") return config
Example #18
Source File: progress.py From veros with MIT License | 5 votes |
def flush(self): report_time = time.convert_time(self._time, 'seconds', self._time_unit) total_time = time.convert_time(self._total, 'seconds', self._time_unit) if self._time > self._start_time: rate_in_seconds = (perf_counter() - self._start) / (self._time - self._start_time) else: rate_in_seconds = 0 rate_in_seconds_per_year = rate_in_seconds / time.convert_time(1, 'seconds', 'years') rate, rate_unit = time.format_time(rate_in_seconds_per_year) eta, eta_unit = time.format_time((self._total - self._time) * rate_in_seconds) if self._start_time < self._total: percentage = 100 * (self._time - self._start_time) / (self._total - self._start_time) else: percentage = 100 logger.info( BAR_FORMAT, time=report_time, total=total_time, unit=self._time_unit[0], percentage=percentage, iteration=self._iteration, rate=rate, rate_unit='{}/(model year)'.format(rate_unit[0]), eta=eta, eta_unit=eta_unit[0], )
Example #19
Source File: classifier.py From CharGer with GNU General Public License v3.0 | 5 votes |
def _run_or_skip_module( module_name: str, module_avail: "ModuleAvailability" ) -> bool: if module_avail is ModuleAvailability.ACTIVE: logger.info("Running {name} module", name=module_name) return True else: logger.info("Skipped {name} module", name=module_name) return False
Example #20
Source File: wave_propagation.py From veros with MIT License | 5 votes |
def set_timestep(self, vs): if vs.time < 90 * 86400: if vs.dt_tracer != 1800.: vs.dt_tracer = vs.dt_mom = 1800. logger.info('Setting time step to 30m') else: if vs.dt_tracer != 3600.: vs.dt_tracer = vs.dt_mom = 3600. logger.info('Setting time step to 1h')
Example #21
Source File: cmd.py From Projectors with GNU General Public License v3.0 | 5 votes |
def test(self, versions_dir=None): """ This function allows running the test suite agains different version of Blender. !!MacOS only!! """ versions_dir = versions_dir if versions_dir else blender_versions_dir binaries = blender_binaries(versions_dir) # 1) Mimic the Blender User Script directory. # 2) Copy the addon into the temporally created structure. # 3) Use the BLENDER_USER_SCRIPTS environment variable to point Blender to the created scripts directory. with tempfile.TemporaryDirectory() as tempdir: tempdir = Path(tempdir) addon_dir = tempdir / 'scripts' / 'addons' / 'Projectors' addon_dir.mkdir(parents=True) scripts_dir = addon_dir.parent.parent # Copy addon into temp dir. copy_tree(str(Path(__file__).parent), str(addon_dir)) # Set the environment variable to the temp scripts dir. os.environ['BLENDER_USER_SCRIPTS'] = str(scripts_dir) log.debug( f'BLENDER_USER_SCRIPTS: {os.environ.get("BLENDER_USER_SCRIPTS")}') # Run the tests against all Blender versions. for name, path in binaries.items(): print('\n'*3) log.info(f'Testing against: {name}') print('=='*50) subprocess.run([str(path.resolve()), '--addons', 'Projectors', '--factory-startup', '-noaudio', '-b', '-P', 'tests.py']) log.debug(f'Temp dir { tempdir } was deleted: {not tempdir.exists()}') return 'Finished Testing'
Example #22
Source File: classifier.py From CharGer with GNU General Public License v3.0 | 5 votes |
def _read_pathogenic_variants(self) -> None: """Read known pathogenic variants. Load :attr:`pathogenic_variants` from :attr:`self.config.pathogenic_variant <.CharGerConfig.pathogenic_variant>`. """ if self.config.pathogenic_variant is None: return logger.info(f"Read pathogenic VCF from {self.config.pathogenic_variant}") self.pathogenic_variants = list( Variant.read_and_parse_vcf(self.config.pathogenic_variant) ) logger.info( f"Read total {len(self.pathogenic_variants):,d} pathogenic variants from the VCF" )
Example #23
Source File: classifier.py From CharGer with GNU General Public License v3.0 | 5 votes |
def run_charger_modules(self) -> None: """Run all CharGer customized modules. See :mod:`~charger.custom_modules` for all the currently implemented modules. """ logger.info("Run all CharGer modules") def run_or_skip(module_name: str): return self._run_or_skip_module( module_name, self._charger_module_availability[module_name] ) # PSC1 if run_or_skip("PSC1"): for result in self.results: run_psc1(result, self.inheritance_genes) # PMC1 if run_or_skip("PMC1"): for result in self.results: run_pmc1(result, self.inheritance_genes) # PPC1 if run_or_skip("PPC1"): for result in self.results: run_ppc1(result, self.inheritance_genes) # PPC if run_or_skip("PPC2"): for result in self.results: run_ppc2(result, self.inheritance_genes)
Example #24
Source File: veros.py From veros with MIT License | 5 votes |
def setup(self): vs = self.state with vs.timers['setup']: logger.info('Setting up everything') self.set_parameter(vs) for setting, value in self.override_settings.items(): setattr(vs, setting, value) settings.check_setting_conflicts(vs) distributed.validate_decomposition(vs) vs.allocate_variables() self.set_grid(vs) numerics.calc_grid(vs) self.set_coriolis(vs) numerics.calc_beta(vs) self.set_topography(vs) numerics.calc_topo(vs) self.set_initial_conditions(vs) numerics.calc_initial_conditions(vs) streamfunction.streamfunction_init(vs) eke.init_eke(vs) for plugin in self._plugin_interfaces: plugin.setup_entrypoint(vs) vs.create_diagnostics() self.set_diagnostics(vs) diagnostics.initialize(vs) diagnostics.read_restart(vs) self.set_forcing(vs) isoneutral.check_isoneutral_slope_crit(vs)
Example #25
Source File: __init__.py From veros with MIT License | 5 votes |
def read_restart(vs): if not vs.restart_input_filename: return if vs.force_overwrite: raise RuntimeError('To prevent data loss, force_overwrite cannot be used in restart runs') logger.info('Reading restarts') for diagnostic in vs.diagnostics.values(): diagnostic.read_restart(vs, vs.restart_input_filename.format(**vars(vs)))
Example #26
Source File: __init__.py From veros with MIT License | 5 votes |
def initialize(vs): for name, diagnostic in vs.diagnostics.items(): diagnostic.initialize(vs) if diagnostic.sampling_frequency: logger.info(' Running diagnostic "{0}" every {1[0]:.1f} {1[1]}' .format(name, time.format_time(diagnostic.sampling_frequency))) if diagnostic.output_frequency: logger.info(' Writing output for diagnostic "{0}" every {1[0]:.1f} {1[1]}' .format(name, time.format_time(diagnostic.output_frequency)))
Example #27
Source File: diagnostic.py From veros with MIT License | 5 votes |
def read_h5_restart(self, vs, var_meta, restart_filename): if not os.path.isfile(restart_filename): raise IOError('restart file {} not found'.format(restart_filename)) logger.info(' Reading restart data for diagnostic {} from {}', self.name, restart_filename) with h5tools.threaded_io(vs, restart_filename, 'r') as infile: variables = {} for key, var in infile[self.name].items(): if np.isscalar(var): variables[key] = var continue local_shape = distributed.get_local_size(vs, var.shape, var_meta[key].dims, include_overlap=True) gidx, lidx = distributed.get_chunk_slices(vs, var_meta[key].dims[:var.ndim], include_overlap=True) variables[key] = np.empty(local_shape, dtype=str(var.dtype)) if runtime_settings.backend == 'bohrium': variables[key][lidx] = var[gidx].astype(variables[key].dtype) else: variables[key][lidx] = var[gidx] distributed.exchange_overlap(vs, variables[key], var_meta[key].dims) attributes = {key: var.item() for key, var in infile[self.name].attrs.items()} return attributes, variables
Example #28
Source File: snapshot.py From veros with MIT License | 5 votes |
def output(self, vs): logger.info(' Writing snapshot at {0[0]:.2f} {0[1]}', time.format_time(vs.time)) if not os.path.isfile(self.get_output_file_name(vs)): self.initialize(vs) var_meta = {var: vs.variables[var] for var in self.output_variables if vs.variables[var].time_dependent} var_data = {var: getattr(vs, var) for var in var_meta.keys()} self.write_output(vs, var_meta, var_data)
Example #29
Source File: player.py From fitch with MIT License | 5 votes |
def __init__(self, device_id: str): self.device_id = device_id self.mnt = MNTDevice(device_id) self.cmd_builder = CommandBuilder() logger.info("action player inited")
Example #30
Source File: scipy.py From veros with MIT License | 5 votes |
def __init__(self, vs): self._matrix = self._assemble_poisson_matrix(vs) jacobi_precon = self._jacobi_preconditioner(vs, self._matrix) self._matrix = jacobi_precon * self._matrix self._rhs_scale = jacobi_precon.diagonal() self._extra_args = {} logger.info('Computing ILU preconditioner...') ilu_preconditioner = spalg.spilu(self._matrix.tocsc(), drop_tol=1e-6, fill_factor=100) self._extra_args['M'] = spalg.LinearOperator(self._matrix.shape, ilu_preconditioner.solve)