Python progressbar.RotatingMarker() Examples
The following are 24
code examples of progressbar.RotatingMarker().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
progressbar
, or try the search function
.
Example #1
Source File: dataset.py From zhusuan with MIT License | 6 votes |
def show_progress(block_num, block_size, total_size): global pbar if pbar is None: if total_size > 0: prefixes = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi') power = min(int(math.log(total_size, 2) / 10), len(prefixes) - 1) scaled = float(total_size) / (2 ** (10 * power)) total_size_str = '{:.1f} {}B'.format(scaled, prefixes[power]) try: marker = '█' except UnicodeEncodeError: marker = '*' widgets = [ progressbar.Percentage(), ' ', progressbar.DataSize(), ' / ', total_size_str, ' ', progressbar.Bar(marker=marker), ' ', progressbar.ETA(), ' ', progressbar.AdaptiveTransferSpeed(), ] pbar = progressbar.ProgressBar(widgets=widgets, max_value=total_size) else: widgets = [ progressbar.DataSize(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.Timer(), ' ', progressbar.AdaptiveTransferSpeed(), ] pbar = progressbar.ProgressBar(widgets=widgets, max_value=progressbar.UnknownLength) downloaded = block_num * block_size if downloaded < total_size: pbar.update(downloaded) else: pbar.finish() pbar = None
Example #2
Source File: utils.py From HoneyBot with MIT License | 6 votes |
def capture_on_interface(interface, name, timeout=60): """ :param interface: The name of the interface on which to capture traffic :param name: The name of the capture file :param timeout: A limit in seconds specifying how long to capture traffic """ if timeout < 15: logger.error("Timeout must be over 15 seconds.") return if not sys.warnoptions: warnings.simplefilter("ignore") start = time.time() widgets = [ progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.FormatLabel('Packets Captured: %(value)d'), ' ', progressbar.Timer(), ] progress = progressbar.ProgressBar(widgets=widgets) capture = pyshark.LiveCapture(interface=interface, output_file=os.path.join('tmp', name)) pcap_size = 0 for i, packet in enumerate(capture.sniff_continuously()): progress.update(i) if os.path.getsize(os.path.join('tmp', name)) != pcap_size: pcap_size = os.path.getsize(os.path.join('tmp', name)) if not isinstance(packet, pyshark.packet.packet.Packet): continue if time.time() - start > timeout: break if pcap_size > const.PT_MAX_BYTES: break capture.clear() capture.close() return pcap_size
Example #3
Source File: zbx_deleteMonitors.py From zabbix-scripts with BSD 3-Clause "New" or "Revised" License | 6 votes |
def deleteHostsByHostgroup(groupname): hostgroup = zapi.hostgroup.get(output=['groupid'],filter={'name': groupname}) if hostgroup.__len__() != 1: logger.error('Hostgroup not found: %s\n\tFound this: %s' % (groupname,hostgroup)) groupid = int(hostgroup[0]['groupid']) hosts = zapi.host.get(output=['name','hostid'],groupids=groupid) total = len(hosts) logger.info('Hosts found: %d' % (total)) if ( args.run ): x = 0 bar = ProgressBar(maxval=total,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() logger.echo = False for host in hosts: x = x + 1 bar.update(x) logger.debug('(%d/%d) >> Removing >> %s' % (x, total, host)) out = zapi.globo.deleteMonitors(host['name']) bar.finish() logger.echo = True else: logger.info('No host removed due to --no-run arg. Full list of hosts:') for host in hosts: logger.info('%s' % host['name']) return
Example #4
Source File: zbx_clone.py From zabbix-scripts with BSD 3-Clause "New" or "Revised" License | 6 votes |
def hosts_disable_all(): """ status de host 0 = enabled status de host 1 = disabled """ logger.info('Disabling all hosts, in blocks of 1000') hosts = zapi.host.get(output=[ 'hostid' ], search={ 'status': 0 }) maxval = int(ceil(hosts.__len__())/1000+1) bar = ProgressBar(maxval=maxval,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for i in xrange(maxval): block = hosts[:1000] del hosts[:1000] result = zapi.host.massupdate(hosts=[ x for x in block ], status=1) i += 1 bar.update(i) bar.finish() logger.info('Done') return
Example #5
Source File: zbx_clone.py From zabbix-scripts with BSD 3-Clause "New" or "Revised" License | 6 votes |
def proxy_passive_to_active(): """ status de prxy 5 = active status de prxy 6 = passive """ logger.info('Change all proxys to active') proxys = zapi.proxy.get(output=[ 'shorten', 'host' ], filter={ 'status': 6 }) if ( proxys.__len__() == 0 ): logger.info('Done') return bar = ProgressBar(maxval=proxys.__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in proxys: i += 1 proxyid = x['proxyid'] result = zapi.proxy.update(proxyid=proxyid, status=5) logger.echo = False logger.debug('Changed from passive to active proxy: %s' % (x['host'])) bar.update(i) bar.finish() logger.echo = True logger.info('Done') return
Example #6
Source File: filehunt.py From PassHunt with BSD 3-Clause "New" or "Revised" License | 5 votes |
def find_all_regexs_in_files(text_or_zip_files, regexs, search_extensions, hunt_type, gauge_update_function=None): """ Searches files in doc_files list for regular expressions""" if not gauge_update_function: pbar_widgets = ['%s Hunt: ' % hunt_type, progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' %ss:0' % hunt_type)] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = '%s Hunt: ' % hunt_type) total_files = len(text_or_zip_files) files_completed = 0 matches_found = 0 for afile in text_or_zip_files: matches = afile.check_regexs(regexs, search_extensions) matches_found += len(matches) files_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' %ss:%s' % (hunt_type, matches_found)) pbar.update(files_completed * 100.0 / total_files) else: gauge_update_function(value = files_completed * 100.0 / total_files) if not gauge_update_function: pbar.finish() return total_files, matches_found
Example #7
Source File: pst.py From PassHunt with BSD 3-Clause "New" or "Revised" License | 5 votes |
def get_simple_progressbar(title): pbar_widgets = [title, progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA()] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() return pbar
Example #8
Source File: rop.py From angrop with BSD 2-Clause "Simplified" License | 5 votes |
def _addresses_to_check_with_caching(self, show_progress=True): num_addrs = len(list(self._addresses_to_check())) widgets = ['ROP: ', progressbar.Percentage(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed()] progress = progressbar.ProgressBar(widgets=widgets, maxval=num_addrs) if show_progress: progress.start() self._cache = dict() seen = dict() for i, a in enumerate(self._addresses_to_check()): if show_progress: progress.update(i) try: bl = self.project.factory.block(a) if bl.size > self._max_block_size: continue block_data = bl.bytes except (SimEngineError, SimMemoryError): continue if block_data in seen: self._cache[seen[block_data]].add(a) continue else: if self._is_jumpkind_valid(bl.vex.jumpkind) and \ len(bl.vex.constant_jump_targets) == 0 and \ not self._block_has_ip_relative(a, bl): seen[block_data] = a self._cache[a] = set() yield a if show_progress: progress.finish()
Example #9
Source File: fb-video-dl.py From fb-video-dl with GNU Lesser General Public License v3.0 | 5 votes |
def dnl_vid(url, filename, size): try: file = open(filename, 'wb') except IOError: sys.exit('cannot access file '+filename) size = int(size) dsize = 0 widgets = ['progress: ', pb.Percentage(), ' ', pb.Bar(marker=pb.RotatingMarker()), ' ', pb.ETA(), ' ', pb.FileTransferSpeed()] pbar = pb.ProgressBar(widgets=widgets, maxval=size).start() try: h_url = urllib2.urlopen(url) except urllib2.URLError: sys.exit('error : cannot open url') try: while True: info = h_url.read(8192) if len(info) < 1 : break dsize += len(info) file.write(info) pbar += len(info) pbar.finish() except IOError: sys.exit('error : unable to download the video') print 'done' pass
Example #10
Source File: ZabbixTuner.py From ZabbixTuner with GNU General Public License v3.0 | 5 votes |
def desabilitaItensNaoSuportados(): query = { "output": "extend", "filter": { "state": 1 }, "monitored": True } filtro = input('Qual a busca para key_? [NULL = ENTER]') if filtro.__len__() > 0: query['search'] = {'key_': filtro} limite = input('Qual o limite de itens? [NULL = ENTER]') if limite.__len__() > 0: try: query['limit'] = int(limite) except: print('Limite invalido') input("Pressione ENTER para voltar") main() opcao = input("Confirma operação? [s/n]") if opcao == 's' or opcao == 'S': itens = zapi.item.get(query) print('Encontramos {} itens'.format(itens.__len__())) bar = ProgressBar(maxval=itens.__len__(), widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in itens: zapi.item.update({"itemid": x['itemid'], "status": 1}) i += 1 bar.update(i) bar.finish() print("Itens desabilitados!!!") print() input("Pressione ENTER para continuar") main()
Example #11
Source File: baidufuse2.py From baidu-fuse with GNU General Public License v2.0 | 5 votes |
def __call__(self, *args, **kwargs): if self.first_call: self.widgets = [progressbar.Percentage(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker('>')), ' ', progressbar.FileTransferSpeed()] self.pbar = progressbar.ProgressBar(widgets=self.widgets, maxval=kwargs['size']).start() self.first_call = False if kwargs['size'] <= kwargs['progress']: self.pbar.finish() else: self.pbar.update(kwargs['progress'])
Example #12
Source File: baidufuse.py From baidu-fuse with GNU General Public License v2.0 | 5 votes |
def __call__(self, *args, **kwargs): if self.first_call: self.widgets = [progressbar.Percentage(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker('>')), ' ', progressbar.FileTransferSpeed()] self.pbar = progressbar.ProgressBar(widgets=self.widgets, maxval=kwargs['size']).start() self.first_call = False if kwargs['size'] <= kwargs['progress']: self.pbar.finish() else: self.pbar.update(kwargs['progress'])
Example #13
Source File: conversion_utils.py From ont_fast5_api with Mozilla Public License 2.0 | 5 votes |
def get_progress_bar(num_reads): bar_format = [RotatingMarker(), " ", SimpleProgress(), Bar(), Percentage(), " ", ETA()] progress_bar = ProgressBar(maxval=num_reads, widgets=bar_format) bad_progressbar_version = False try: progress_bar.currval except AttributeError as e: bad_progressbar_version = True pass if bad_progressbar_version: raise RuntimeError('Wrong progressbar package detected, likely ' '"progressbar2". Please uninstall that package and ' 'install "progressbar33" instead.') return progress_bar.start()
Example #14
Source File: zbx_clone.py From zabbix-scripts with BSD 3-Clause "New" or "Revised" License | 5 votes |
def discovery_disable_all(status=0): """ Alterar status de todos os discoveries *auto* Status 0 = enable Status 1 = disable """ logger.info('Disabling all network discoveries') druleids = zapi.drule.get(output=[ 'druleid', 'iprange', 'name', 'proxy_hostid', 'status' ], selectDChecks='extend', filter={ 'status': 0 }) if ( druleids.__len__() == 0 ): logger.info('Done') return bar = ProgressBar(maxval=druleids.__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in druleids: params_disable = { 'druleid': x['druleid'], 'iprange': x['iprange'], 'name': x['name'], 'dchecks': x['dchecks'], 'status': 1 } out = zapi.drule.update(**params_disable) logger.echo = False if out: logger.debug('\tNew status: %s (%s) --> %d' % (x['name'],out['druleids'],status)) else: logger.warning('\tFAILED to change status: %s (%s) --> %d' % (x['name'],out['druleids'],status)) i += 1 bar.update(i) logger.echo = True bar.finish() logger.info('Done') return
Example #15
Source File: move_items.py From zabbix-scripts with BSD 3-Clause "New" or "Revised" License | 5 votes |
def createSQL(table,values,name='insert'): ''' Generate the SQL insert line, breaking each insert to up to ~1k values and up to ~1k insert's (~1M values total for each SQL file) ''' logger.info('Generating SQL file') queryInsert='INSERT INTO %s (itemid,clock,num,value_min,value_avg,value_max) VALUES' % table i=0 # Controls the progress bar x=0 # Controls number of inserts in one line y=0 # Controls number of lines in one file z=0 # Controls number of file name valuesLen=values.__len__() sqlFile='%s.sql.%d' % (name,z) logger.debug('Total itens for %s: %d' % (name,valuesLen)) if valuesLen > 0: bar=ProgressBar(maxval=valuesLen,widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() for value in values: i+=1 x+=1 if x != 1: # First line only sqlInsert='%s,%s' % (sqlInsert,value) else: sqlInsert=value if y >= 1000: # If there is more than 1k lines, write to new file z+=1 y=0 if x >= 1000 or i == valuesLen: # If there is more than 1k values or we finished our list, write to file sqlFile='%s.sql.%d' % (name,z) fileAppend(f=sqlFile,content='%s %s;\n' % (queryInsert,sqlInsert)) x=0 y+=1 sqlInsert='' if args.loglevel.upper() != 'DEBUG': # Dont print progressbar if in debug mode bar.update(i) bar.finish() else: logger.warning('No values received')
Example #16
Source File: downloader.py From chakin with MIT License | 5 votes |
def download(number=-1, name="", save_dir='./'): """Download pre-trained word vector :param number: integer, default ``None`` :param save_dir: str, default './' :return: file path for downloaded file """ df = load_datasets() if number > -1: row = df.iloc[[number]] elif name: row = df.loc[df["Name"] == name] url = ''.join(row.URL) if not url: print('The word vector you specified was not found. Please specify correct name.') widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets) def dlProgress(count, blockSize, totalSize): if pbar.max_value is None: pbar.max_value = totalSize pbar.start() pbar.update(min(count * blockSize, totalSize)) file_name = url.split('/')[-1] if not os.path.exists(save_dir): os.makedirs(save_dir) save_path = os.path.join(save_dir, file_name) path, _ = urlretrieve(url, save_path, reporthook=dlProgress) pbar.finish() return path
Example #17
Source File: filehunt.py From PANhunt with BSD 3-Clause "New" or "Revised" License | 5 votes |
def find_all_regexs_in_files(text_or_zip_files, regexs, search_extensions, hunt_type, gauge_update_function=None): """ Searches files in doc_files list for regular expressions""" if not gauge_update_function: pbar_widgets = ['%s Hunt: ' % hunt_type, progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' %ss:0' % hunt_type)] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = '%s Hunt: ' % hunt_type) total_files = len(text_or_zip_files) files_completed = 0 matches_found = 0 for afile in text_or_zip_files: matches = afile.check_regexs(regexs, search_extensions) matches_found += len(matches) files_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' %ss:%s' % (hunt_type, matches_found)) pbar.update(files_completed * 100.0 / total_files) else: gauge_update_function(value = files_completed * 100.0 / total_files) if not gauge_update_function: pbar.finish() return total_files, matches_found
Example #18
Source File: pst.py From PANhunt with BSD 3-Clause "New" or "Revised" License | 5 votes |
def get_simple_progressbar(title): pbar_widgets = [title, progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA()] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() return pbar
Example #19
Source File: move_items.py From zabbix-scripts with BSD 3-Clause "New" or "Revised" License | 4 votes |
def main(): ''' Controls general flow of operations ''' # If it exists, use the cached data of hosts and items if (os.path.isfile(move_items_file)): with open(move_items_file) as infile: hosts=json.load(infile) logger.info('Cache loaded from file (%s)' % move_items_file) else: hosts=getItems() with open(move_items_file, 'w') as outfile: json.dump(hosts, outfile) logger.info('Cache written to file (%s)' % move_items_file) for host in hosts: logger.info('Geting trends data of host: %s' % host['name']) host['trends']=list() host['trends_uint']=list() if host['itens'].__len__() > 0: bar=ProgressBar(maxval=host['itens'].__len__(),widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i=0 for item in host['itens']: temp=getTrends(hostname=host['name'],item=item) i+=1 if args.loglevel.upper() != 'DEBUG': bar.update(i) if temp['table'] == 'trends': for value in temp['values']: host['trends'].append('(%d, %d, %d, %d, %d, %d)' % (int(item['itemid']), int(value[1]), int(value[2]), int(value[3]), int(value[4]), int(value[5]))) elif temp['table'] == 'trends_uint': for value in temp['values']: host['trends_uint'].append('(%d, %d, %d, %d, %d, %d)' % (int(item['itemid']), int(value[1]), int(value[2]), int(value[3]), int(value[4]), int(value[5]))) else: logger.warning('Unknown value type: %s' % temp['table']) bar.finish() ''' Now, we send in blocks of up to ~1M values to generate the SQL files ''' if host['trends'].__len__() > 0: createSQL(table='trends',values=host['trends'],name=host['name']) elif host['trends_uint'].__len__() > 0: createSQL(table='trends_uint',values=host['trends_uint'],name=host['name']) else: logger.warning('No data from %s found to be sent.' % host['name']) # Start DB connection
Example #20
Source File: girlscout.py From angr with BSD 2-Clause "Simplified" License | 4 votes |
def _full_code_scan(self): """ Perform a full code scan on the target binary. """ # We gotta time this function start_time = datetime.now() traced_address = set() self.functions = set() self.call_map = networkx.DiGraph() self.cfg = networkx.DiGraph() initial_state = self.project.factory.blank_state(mode="fastpath") initial_options = initial_state.options - {o.TRACK_CONSTRAINTS} - o.refs initial_options |= {o.SUPER_FASTPATH} # initial_options.remove(o.COW_STATES) initial_state.options = initial_options # Sadly, not all calls to functions are explicitly made by call # instruction - they could be a jmp or b, or something else. So we # should record all exits from a single function, and then add # necessary calling edges in our call map during the post-processing # phase. function_exits = defaultdict(set) widgets = [progressbar.Percentage(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.Timer(), ' ', progressbar.ETA() ] pb = progressbar.ProgressBar(widgets=widgets, maxval=10000 * 100).start() while True: next_addr = self._get_next_code_addr(initial_state) percentage = self._seg_list.occupied_size * 100.0 / (self._valid_memory_region_size) if percentage > 100.0: percentage = 100.0 pb.update(percentage * 10000) if next_addr is not None: l.info("Analyzing %xh, progress %0.04f%%", next_addr, percentage) else: l.info('No more addr to analyze. Progress %0.04f%%', percentage) break self.call_map.add_node(next_addr) self._scan_code(traced_address, function_exits, initial_state, next_addr) pb.finish() end_time = datetime.now() l.info("A full code scan takes %d seconds.", (end_time - start_time).seconds)
Example #21
Source File: filehunt.py From PANhunt with BSD 3-Clause "New" or "Revised" License | 4 votes |
def find_all_files_in_directory(AFileClass, root_dir, excluded_directories, search_extensions, gauge_update_function=None): """Recursively searches a directory for files. search_extensions is a dictionary of extension lists""" global TEXT_FILE_SIZE_LIMIT all_extensions = [ext for ext_list in search_extensions.values() for ext in ext_list] extension_types = {} for ext_type, ext_list in search_extensions.iteritems(): for ext in ext_list: extension_types[ext] = ext_type if not gauge_update_function: pbar_widgets = ['Doc Hunt: ', progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' Docs:0')] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = 'Doc Hunt: ') doc_files = [] root_dir_dirs = None root_items_completed = 0 docs_found = 0 for root, sub_dirs, files in os.walk(root_dir): sub_dirs[:] = [check_dir for check_dir in sub_dirs if os.path.join(root, check_dir).lower() not in excluded_directories] if not root_dir_dirs: root_dir_dirs = [os.path.join(root, sub_dir) for sub_dir in sub_dirs] root_total_items = len(root_dir_dirs) + len(files) if root in root_dir_dirs: root_items_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' Docs:%s' % docs_found) pbar.update(root_items_completed * 100.0 / root_total_items) else: gauge_update_function(value = root_items_completed * 100.0 / root_total_items) for filename in files: if root == root_dir: root_items_completed += 1 afile = AFileClass(filename, root) # AFile or PANFile if afile.ext.lower() in all_extensions: afile.set_file_stats() afile.type = extension_types[afile.ext.lower()] if afile.type in ('TEXT','SPECIAL') and afile.size > TEXT_FILE_SIZE_LIMIT: afile.type = 'OTHER' afile.set_error('File size {1} over limit of {0} for checking'.format(get_friendly_size(TEXT_FILE_SIZE_LIMIT), afile.size_friendly())) doc_files.append(afile) if not afile.errors: docs_found += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' Docs:%s' % docs_found) pbar.update(root_items_completed * 100.0 / root_total_items) else: gauge_update_function(value = root_items_completed * 100.0 / root_total_items) if not gauge_update_function: pbar.finish() return doc_files
Example #22
Source File: filehunt.py From PANhunt with BSD 3-Clause "New" or "Revised" License | 4 votes |
def check_pst_regexs(self, regexs, search_extensions, hunt_type, gauge_update_function=None): """ Searches a pst file for regular expressions in messages and attachments using regular expressions""" all_extensions = search_extensions['TEXT'] + search_extensions['ZIP'] + search_extensions['SPECIAL'] if not gauge_update_function: pbar_widgets = ['%s Hunt %s: ' % (hunt_type, unicode2ascii(self.filename)), progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' %ss:0' % hunt_type)] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = '%s Hunt: ' % hunt_type) try: apst = pst.PST(self.path) if apst.header.validPST: total_messages = apst.get_total_message_count() total_attachments = apst.get_total_attachment_count() total_items = total_messages + total_attachments items_completed = 0 for folder in apst.folder_generator(): for message in apst.message_generator(folder): if message.Subject: message_path = os.path.join(folder.path, message.Subject) else: message_path = os.path.join(folder.path, u'[NoSubject]') if message.Body: self.check_text_regexs(message.Body, regexs, message_path) if message.HasAttachments: for subattachment in message.subattachments: if get_ext(subattachment.Filename) in search_extensions['TEXT']+search_extensions['ZIP']: attachment = message.get_attachment(subattachment) self.check_attachment_regexs(attachment, regexs, search_extensions, message_path) items_completed += 1 items_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' %ss:%s' % (hunt_type, len(self.matches))) pbar.update(items_completed * 100.0 / total_items) else: gauge_update_function(value = items_completed * 100.0 / total_items) apst.close() except IOError: self.set_error(sys.exc_info()[1]) except pst.PSTException: self.set_error(sys.exc_info()[1]) if not gauge_update_function: pbar.finish() return self.matches
Example #23
Source File: filehunt.py From PassHunt with BSD 3-Clause "New" or "Revised" License | 4 votes |
def check_pst_regexs(self, regexs, search_extensions, hunt_type, gauge_update_function=None): """ Searches a pst file for regular expressions in messages and attachments using regular expressions""" all_extensions = search_extensions['TEXT'] + search_extensions['ZIP'] + search_extensions['SPECIAL'] if not gauge_update_function: pbar_widgets = ['%s Hunt %s: ' % (hunt_type, unicode2ascii(self.filename)), progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' %ss:0' % hunt_type)] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = '%s Hunt: ' % hunt_type) try: apst = pst.PST(self.path) total_messages = apst.get_total_message_count() total_attachments = apst.get_total_attachment_count() total_items = total_messages + total_attachments items_completed = 0 for folder in apst.folder_generator(): for message in apst.message_generator(folder): if message.Subject: message_path = os.path.join(folder.path, message.Subject) else: message_path = os.path.join(folder.path, u'[NoSubject]') if message.Body: self.check_text_regexs(message.Body, regexs, message_path) if message.HasAttachments: for subattachment in message.subattachments: if get_ext(subattachment.Filename) in search_extensions['TEXT']+search_extensions['ZIP']: attachment = message.get_attachment(subattachment) self.check_attachment_regexs(attachment, regexs, search_extensions, message_path) items_completed += 1 items_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' %ss:%s' % (hunt_type, len(self.matches))) pbar.update(items_completed * 100.0 / total_items) else: gauge_update_function(value = items_completed * 100.0 / total_items) apst.close() except IOError: self.set_error(sys.exc_info()[1]) except pst.PSTException: self.set_error(sys.exc_info()[1]) if not gauge_update_function: pbar.finish() return self.matches
Example #24
Source File: filehunt.py From PassHunt with BSD 3-Clause "New" or "Revised" License | 4 votes |
def find_all_files_in_directory(AFileClass, root_dir, excluded_directories, search_extensions, gauge_update_function=None): """Recursively searches a directory for files. search_extensions is a dictionary of extension lists""" global TEXT_FILE_SIZE_LIMIT all_extensions = [ext for ext_list in search_extensions.values() for ext in ext_list] extension_types = {} for ext_type, ext_list in search_extensions.iteritems(): for ext in ext_list: extension_types[ext] = ext_type if not gauge_update_function: pbar_widgets = ['Doc Hunt: ', progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA(), progressbar.FormatLabel(' Docs:0')] pbar = progressbar.ProgressBar(widgets = pbar_widgets).start() else: gauge_update_function(caption = 'Doc Hunt: ') doc_files = [] root_dir_dirs = None root_items_completed = 0 docs_found = 0 for root, sub_dirs, files in os.walk(root_dir): sub_dirs[:] = [check_dir for check_dir in sub_dirs if os.path.join(root, check_dir).lower() not in excluded_directories] if not root_dir_dirs: root_dir_dirs = [os.path.join(root, sub_dir) for sub_dir in sub_dirs] root_total_items = len(root_dir_dirs) + len(files) if root in root_dir_dirs: root_items_completed += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' Docs:%s' % docs_found) pbar.update(root_items_completed * 100.0 / root_total_items) else: gauge_update_function(value = root_items_completed * 100.0 / root_total_items) for filename in files: if root == root_dir: root_items_completed += 1 afile = AFileClass(filename, root) # AFile or PANFile if afile.ext.lower() in all_extensions: afile.set_file_stats() afile.type = extension_types[afile.ext.lower()] if afile.type in ('TEXT','SPECIAL') and afile.size > TEXT_FILE_SIZE_LIMIT: afile.type = 'OTHER' afile.set_error('File size {1} over limit of {0} for checking'.format(get_friendly_size(TEXT_FILE_SIZE_LIMIT), afile.size_friendly())) doc_files.append(afile) if not afile.errors: docs_found += 1 if not gauge_update_function: pbar_widgets[6] = progressbar.FormatLabel(' Docs:%s' % docs_found) pbar.update(root_items_completed * 100.0 / root_total_items) else: gauge_update_function(value = root_items_completed * 100.0 / root_total_items) if not gauge_update_function: pbar.finish() return doc_files