Python progressbar.FormatLabel() Examples
The following are 14
code examples of progressbar.FormatLabel().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
progressbar
, or try the search function
.
Example #1
Source File: utils.py From HoneyBot with MIT License | 6 votes |
def capture_on_interface(interface, name, timeout=60): """ :param interface: The name of the interface on which to capture traffic :param name: The name of the capture file :param timeout: A limit in seconds specifying how long to capture traffic """ if timeout < 15: logger.error("Timeout must be over 15 seconds.") return if not sys.warnoptions: warnings.simplefilter("ignore") start = time.time() widgets = [ progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.FormatLabel('Packets Captured: %(value)d'), ' ', progressbar.Timer(), ] progress = progressbar.ProgressBar(widgets=widgets) capture = pyshark.LiveCapture(interface=interface, output_file=os.path.join('tmp', name)) pcap_size = 0 for i, packet in enumerate(capture.sniff_continuously()): progress.update(i) if os.path.getsize(os.path.join('tmp', name)) != pcap_size: pcap_size = os.path.getsize(os.path.join('tmp', name)) if not isinstance(packet, pyshark.packet.packet.Packet): continue if time.time() - start > timeout: break if pcap_size > const.PT_MAX_BYTES: break capture.clear() capture.close() return pcap_size
Example #2
Source File: knn_missing_data.py From Generative-ConvACs with MIT License | 6 votes |
def knn_masked_data(trX,trY,missing_data_dir, input_shape, k): raw_im_data = np.loadtxt(join(script_dir,missing_data_dir,'index.txt'),delimiter=' ',dtype=str) raw_mask_data = np.loadtxt(join(script_dir,missing_data_dir,'index_mask.txt'),delimiter=' ',dtype=str) # Using 'brute' method since we only want to do one query per classifier # so this will be quicker as it avoids overhead of creating a search tree knn_m = KNeighborsClassifier(algorithm='brute',n_neighbors=k) prob_Y_hat = np.zeros((raw_im_data.shape[0],int(np.max(trY)+1))) total_images = raw_im_data.shape[0] pbar = progressbar.ProgressBar(widgets=[progressbar.FormatLabel('\rProcessed %(value)d of %(max)d Images '), progressbar.Bar()], maxval=total_images, term_width=50).start() for i in range(total_images): mask_im=load_image(join(script_dir,missing_data_dir,raw_mask_data[i][0]), input_shape,1).reshape(np.prod(input_shape)) mask = np.logical_not(mask_im > eps) # since mask is 1 at missing locations v_im=load_image(join(script_dir,missing_data_dir,raw_im_data[i][0]), input_shape, 255).reshape(np.prod(input_shape)) rep_mask = np.tile(mask,(trX.shape[0],1)) # Corrupt whole training set according to the current mask corr_trX = np.multiply(trX, rep_mask) knn_m.fit(corr_trX, trY) prob_Y_hat[i,:] = knn_m.predict_proba(v_im.reshape(1,-1)) pbar.update(i) pbar.finish() return prob_Y_hat
Example #3
Source File: pyrdp-convert.py From pyrdp with GNU General Public License v3.0 | 6 votes |
def processReplay(self, infile: Path): widgets = [ progressbar.FormatLabel('Encoding MP4 '), progressbar.BouncingBar(), progressbar.FormatLabel(' Elapsed: %(elapsed)s'), ] with progressbar.ProgressBar(widgets=widgets) as progress: print(f"[*] Converting '{infile}' to MP4.") outfile = self.prefix + infile.stem + '.mp4' sink = Mp4EventHandler(outfile, progress=lambda: progress.update(0)) fd = open(infile, "rb") replay = Replay(fd, handler=sink) print(f"\n[+] Succesfully wrote '{outfile}'") sink.cleanup() fd.close()
Example #4
Source File: progress.py From desmod with MIT License | 6 votes |
def _get_progressbar_widgets( sim_index: Optional[int], timescale: TimeValue, know_stop_time: bool ) -> List[progressbar.widgets.WidgetBase]: widgets = [] if sim_index is not None: widgets.append(f'Sim {sim_index:3}|') magnitude, units = timescale if magnitude == 1: sim_time_format = f'%(value)6.0f {units}|' else: sim_time_format = f'{magnitude}x%(value)6.0f {units}|' widgets.append(progressbar.FormatLabel(sim_time_format)) widgets.append(progressbar.Percentage()) if know_stop_time: widgets.append(progressbar.Bar()) else: widgets.append(progressbar.BouncingBar()) widgets.append(progressbar.ETA()) return widgets
Example #5
Source File: progress.py From desmod with MIT License | 6 votes |
def _get_overall_pbar( num_simulations: int, max_width: int, fd: IO ) -> progressbar.ProgressBar: pbar = progressbar.ProgressBar( fd=fd, min_value=0, max_value=num_simulations, widgets=[ progressbar.FormatLabel('%(value)s of %(max_value)s '), 'simulations (', progressbar.Percentage(), ') ', progressbar.Bar(), progressbar.ETA(), ], ) if max_width and pbar.term_width > max_width: pbar.term_width = max_width return pbar
Example #6
Source File: filehunt.py From PANhunt with BSD 3-Clause "New" or "Revised" License | 5 votes |
def find_all_regexs_in_files(text_or_zip_files, regexs, search_extensions, hunt_type, gauge_update_function=None): """ Searches files in doc_files list for regular expressions""" if not gauge_update_function: pbar_widgets = ['%s Hunt: ' % hunt_type, progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' %ss:0' % hunt_type)] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = '%s Hunt: ' % hunt_type) total_files = len(text_or_zip_files) files_completed = 0 matches_found = 0 for afile in text_or_zip_files: matches = afile.check_regexs(regexs, search_extensions) matches_found += len(matches) files_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' %ss:%s' % (hunt_type, matches_found)) pbar.update(files_completed * 100.0 / total_files) else: gauge_update_function(value = files_completed * 100.0 / total_files) if not gauge_update_function: pbar.finish() return total_files, matches_found
Example #7
Source File: es2csv.py From es2csv with Apache License 2.0 | 5 votes |
def write_to_csv(self): if self.num_results > 0: self.num_results = sum(1 for line in codecs.open(self.tmp_file, mode='r', encoding='utf-8')) if self.num_results > 0: output_file = codecs.open(self.opts.output_file, mode='a', encoding='utf-8') csv_writer = csv.DictWriter(output_file, fieldnames=self.csv_headers) csv_writer.writeheader() timer = 0 widgets = ['Write to csv ', progressbar.Bar(left='[', marker='#', right=']'), progressbar.FormatLabel(' [%(value)i/%(max)i] ['), progressbar.Percentage(), progressbar.FormatLabel('] [%(elapsed)s] ['), progressbar.ETA(), '] [', progressbar.FileTransferSpeed(unit='lines'), ']' ] bar = progressbar.ProgressBar(widgets=widgets, maxval=self.num_results).start() for line in codecs.open(self.tmp_file, mode='r', encoding='utf-8'): timer += 1 bar.update(timer) csv_writer.writerow(json.loads(line)) output_file.close() bar.finish() else: print('There is no docs with selected field(s): {}.'.format(','.join(self.opts.fields))) os.remove(self.tmp_file)
Example #8
Source File: filehunt.py From PassHunt with BSD 3-Clause "New" or "Revised" License | 5 votes |
def find_all_regexs_in_files(text_or_zip_files, regexs, search_extensions, hunt_type, gauge_update_function=None): """ Searches files in doc_files list for regular expressions""" if not gauge_update_function: pbar_widgets = ['%s Hunt: ' % hunt_type, progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' %ss:0' % hunt_type)] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = '%s Hunt: ' % hunt_type) total_files = len(text_or_zip_files) files_completed = 0 matches_found = 0 for afile in text_or_zip_files: matches = afile.check_regexs(regexs, search_extensions) matches_found += len(matches) files_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' %ss:%s' % (hunt_type, matches_found)) pbar.update(files_completed * 100.0 / total_files) else: gauge_update_function(value = files_completed * 100.0 / total_files) if not gauge_update_function: pbar.finish() return total_files, matches_found
Example #9
Source File: filehunt.py From PANhunt with BSD 3-Clause "New" or "Revised" License | 4 votes |
def check_pst_regexs(self, regexs, search_extensions, hunt_type, gauge_update_function=None): """ Searches a pst file for regular expressions in messages and attachments using regular expressions""" all_extensions = search_extensions['TEXT'] + search_extensions['ZIP'] + search_extensions['SPECIAL'] if not gauge_update_function: pbar_widgets = ['%s Hunt %s: ' % (hunt_type, unicode2ascii(self.filename)), progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' %ss:0' % hunt_type)] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = '%s Hunt: ' % hunt_type) try: apst = pst.PST(self.path) if apst.header.validPST: total_messages = apst.get_total_message_count() total_attachments = apst.get_total_attachment_count() total_items = total_messages + total_attachments items_completed = 0 for folder in apst.folder_generator(): for message in apst.message_generator(folder): if message.Subject: message_path = os.path.join(folder.path, message.Subject) else: message_path = os.path.join(folder.path, u'[NoSubject]') if message.Body: self.check_text_regexs(message.Body, regexs, message_path) if message.HasAttachments: for subattachment in message.subattachments: if get_ext(subattachment.Filename) in search_extensions['TEXT']+search_extensions['ZIP']: attachment = message.get_attachment(subattachment) self.check_attachment_regexs(attachment, regexs, search_extensions, message_path) items_completed += 1 items_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' %ss:%s' % (hunt_type, len(self.matches))) pbar.update(items_completed * 100.0 / total_items) else: gauge_update_function(value = items_completed * 100.0 / total_items) apst.close() except IOError: self.set_error(sys.exc_info()[1]) except pst.PSTException: self.set_error(sys.exc_info()[1]) if not gauge_update_function: pbar.finish() return self.matches
Example #10
Source File: filehunt.py From PANhunt with BSD 3-Clause "New" or "Revised" License | 4 votes |
def find_all_files_in_directory(AFileClass, root_dir, excluded_directories, search_extensions, gauge_update_function=None): """Recursively searches a directory for files. search_extensions is a dictionary of extension lists""" global TEXT_FILE_SIZE_LIMIT all_extensions = [ext for ext_list in search_extensions.values() for ext in ext_list] extension_types = {} for ext_type, ext_list in search_extensions.iteritems(): for ext in ext_list: extension_types[ext] = ext_type if not gauge_update_function: pbar_widgets = ['Doc Hunt: ', progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' Docs:0')] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = 'Doc Hunt: ') doc_files = [] root_dir_dirs = None root_items_completed = 0 docs_found = 0 for root, sub_dirs, files in os.walk(root_dir): sub_dirs[:] = [check_dir for check_dir in sub_dirs if os.path.join(root, check_dir).lower() not in excluded_directories] if not root_dir_dirs: root_dir_dirs = [os.path.join(root, sub_dir) for sub_dir in sub_dirs] root_total_items = len(root_dir_dirs) + len(files) if root in root_dir_dirs: root_items_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' Docs:%s' % docs_found) pbar.update(root_items_completed * 100.0 / root_total_items) else: gauge_update_function(value = root_items_completed * 100.0 / root_total_items) for filename in files: if root == root_dir: root_items_completed += 1 afile = AFileClass(filename, root) # AFile or PANFile if afile.ext.lower() in all_extensions: afile.set_file_stats() afile.type = extension_types[afile.ext.lower()] if afile.type in ('TEXT','SPECIAL') and afile.size > TEXT_FILE_SIZE_LIMIT: afile.type = 'OTHER' afile.set_error('File size {1} over limit of {0} for checking'.format(get_friendly_size(TEXT_FILE_SIZE_LIMIT), afile.size_friendly())) doc_files.append(afile) if not afile.errors: docs_found += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' Docs:%s' % docs_found) pbar.update(root_items_completed * 100.0 / root_total_items) else: gauge_update_function(value = root_items_completed * 100.0 / root_total_items) if not gauge_update_function: pbar.finish() return doc_files
Example #11
Source File: generate_missing_data.py From Generative-ConvACs with MIT License | 4 votes |
def convert_dataset(args): try: if args.min_rects > args.max_rects: raise ValueError('min_rect must be less than or equal to max_rect.') if args.min_width > args.max_width: raise ValueError('min_width must be less than or equal to max_width.') try: os.makedirs(args.output_dir) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(args.output_dir): pass else: raise ValueError('output_dir argument is not a valid path.') total_images = len(args.filenames) params = zip(args.filenames, [args] * total_images) pool = Pool(initializer=init_worker) pbar = progressbar.ProgressBar(widgets=[progressbar.FormatLabel('\rProcessed %(value)d of %(max)d Images '), progressbar.Bar()], maxval=total_images, term_width=50).start() try: results = pool.imap_unordered(corrupt_source_image, params, chunksize=max(int(math.sqrt(len(args.filenames)))/2, 10)) for i in range(len(args.filenames)): next(results) pbar.update(i+1) pool.close() pool.join() pbar.finish() except KeyboardInterrupt: pool.terminate() pool.join() pbar.finish() raise except ValueError as e: print print 'Bad parameters:', e raise e except KeyboardInterrupt: print if __name__ == '__main__': print 'User stopped generation!' raise except: print print "Unexpected error:", sys.exc_info()[0] raise # Main routine
Example #12
Source File: utilities.py From dynamite-nsm with GNU General Public License v3.0 | 4 votes |
def run_subprocess_with_status(process, expected_lines=None): """ Run a subprocess inside a wrapper, that hides the output, and replaces with a progressbar :param process: The subprocess.Popen instance :param expected_lines: The number of stdout lines to expect :return: True, if exited with POSIX 0 """ i = 0 widgets = [ '\033[92m', '{} '.format(datetime.strftime(datetime.utcnow(), '%Y-%m-%d %H:%M:%S')), '\033[0m', '\033[0;36m', 'PROCESS_TRACKER ', '\033[0m', ' | ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.FormatLabel(''), ' ', progressbar.ETA() ] over_max_value = False try: pb = progressbar.ProgressBar(widgets=widgets, max_value=expected_lines) except TypeError: pb = progressbar.ProgressBar(widgets=widgets, maxval=expected_lines) pb.start() while True: output = process.stdout.readline().decode() if output == '' and process.poll() is not None: break if output: i += 1 try: if not over_max_value: widgets[11] = '<{0}...>'.format(str(output).replace('\n', '').replace('\t', '')[0:40]) pb.update(i) except ValueError: if not over_max_value: pb.finish() over_max_value = True # print(i, process.poll(), output) if not over_max_value: pb.finish() return process.poll()
Example #13
Source File: filehunt.py From PassHunt with BSD 3-Clause "New" or "Revised" License | 4 votes |
def check_pst_regexs(self, regexs, search_extensions, hunt_type, gauge_update_function=None): """ Searches a pst file for regular expressions in messages and attachments using regular expressions""" all_extensions = search_extensions['TEXT'] + search_extensions['ZIP'] + search_extensions['SPECIAL'] if not gauge_update_function: pbar_widgets = ['%s Hunt %s: ' % (hunt_type, unicode2ascii(self.filename)), progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' %ss:0' % hunt_type)] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = '%s Hunt: ' % hunt_type) try: apst = pst.PST(self.path) total_messages = apst.get_total_message_count() total_attachments = apst.get_total_attachment_count() total_items = total_messages + total_attachments items_completed = 0 for folder in apst.folder_generator(): for message in apst.message_generator(folder): if message.Subject: message_path = os.path.join(folder.path, message.Subject) else: message_path = os.path.join(folder.path, u'[NoSubject]') if message.Body: self.check_text_regexs(message.Body, regexs, message_path) if message.HasAttachments: for subattachment in message.subattachments: if get_ext(subattachment.Filename) in search_extensions['TEXT']+search_extensions['ZIP']: attachment = message.get_attachment(subattachment) self.check_attachment_regexs(attachment, regexs, search_extensions, message_path) items_completed += 1 items_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' %ss:%s' % (hunt_type, len(self.matches))) pbar.update(items_completed * 100.0 / total_items) else: gauge_update_function(value = items_completed * 100.0 / total_items) apst.close() except IOError: self.set_error(sys.exc_info()[1]) except pst.PSTException: self.set_error(sys.exc_info()[1]) if not gauge_update_function: pbar.finish() return self.matches
Example #14
Source File: filehunt.py From PassHunt with BSD 3-Clause "New" or "Revised" License | 4 votes |
def find_all_files_in_directory(AFileClass, root_dir, excluded_directories, search_extensions, gauge_update_function=None): """Recursively searches a directory for files. search_extensions is a dictionary of extension lists""" global TEXT_FILE_SIZE_LIMIT all_extensions = [ext for ext_list in search_extensions.values() for ext in ext_list] extension_types = {} for ext_type, ext_list in search_extensions.iteritems(): for ext in ext_list: extension_types[ext] = ext_type if not gauge_update_function: pbar_widgets = ['Doc Hunt: ', progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' Docs:0')] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = 'Doc Hunt: ') doc_files = [] root_dir_dirs = None root_items_completed = 0 docs_found = 0 for root, sub_dirs, files in os.walk(root_dir): sub_dirs[:] = [check_dir for check_dir in sub_dirs if os.path.join(root, check_dir).lower() not in excluded_directories] if not root_dir_dirs: root_dir_dirs = [os.path.join(root, sub_dir) for sub_dir in sub_dirs] root_total_items = len(root_dir_dirs) + len(files) if root in root_dir_dirs: root_items_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' Docs:%s' % docs_found) pbar.update(root_items_completed * 100.0 / root_total_items) else: gauge_update_function(value = root_items_completed * 100.0 / root_total_items) for filename in files: if root == root_dir: root_items_completed += 1 afile = AFileClass(filename, root) # AFile or PANFile if afile.ext.lower() in all_extensions: afile.set_file_stats() afile.type = extension_types[afile.ext.lower()] if afile.type in ('TEXT','SPECIAL') and afile.size > TEXT_FILE_SIZE_LIMIT: afile.type = 'OTHER' afile.set_error('File size {1} over limit of {0} for checking'.format(get_friendly_size(TEXT_FILE_SIZE_LIMIT), afile.size_friendly())) doc_files.append(afile) if not afile.errors: docs_found += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' Docs:%s' % docs_found) pbar.update(root_items_completed * 100.0 / root_total_items) else: gauge_update_function(value = root_items_completed * 100.0 / root_total_items) if not gauge_update_function: pbar.finish() return doc_files