Python concurrent.futures.append() Examples

The following are 30 code examples of concurrent.futures.append(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module concurrent.futures , or try the search function .
Example #1
Source File: cum.py    From cum with Apache License 2.0 6 votes vote down vote up
def latest(alias, relative):
    """List most recent chapter addition for series."""
    query = db.session.query(db.Series)
    if alias:
        query = query.filter_by(following=True, alias=alias)
    else:
        query = query.filter_by(following=True)
    query = query.order_by(db.Series.alias).all()
    updates = []
    for series in query:
        if series.last_added is None:
            time = 'never'
        elif relative:
            time = utility.time_to_relative(series.last_added)
        else:
            time = series.last_added.strftime('%Y-%m-%d %H:%M')
        updates.append((series.alias, time))
    output.even_columns(updates, separator_width=3) 
Example #2
Source File: main.py    From halive with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_urls(inputfiles):
    """
    This function takes as input the list of files containing the hostnames
    and normalizes the format of the hostnames in order to be able to perform
    valid HTTP/HTTPS requests.

    Args:
    inputfiles -- list of inputfiles

    Returns:
    urls       -- list of normalized URLs which can be queries
    """
    urls = []
    scheme_rgx = re.compile(r'^https?://')
    for ifile in inputfiles:
        urls.append(ifile.read().splitlines())
    urls = set([n for l in urls for n in l])
    urls = list(filter(None, urls))
    for i in range(len(urls)):
        if not scheme_rgx.match(urls[i]):
            urls[i] = 'http://' + urls[i]
    return urls 
Example #3
Source File: __init__.py    From benchexec with Apache License 2.0 6 votes vote down vote up
def append(self, resultFile, resultElem, all_columns=False):
        """
        Append the result for one run. Needs to be called before collect_data().
        """
        self._xml_results += [
            (result, resultFile) for result in _get_run_tags_from_xml(resultElem)
        ]
        for attrib, values in RunSetResult._extract_attributes_from_result(
            resultFile, resultElem
        ).items():
            self.attributes[attrib].extend(values)

        if not self.columns:
            self.columns = RunSetResult._extract_existing_columns_from_result(
                resultFile, resultElem, all_columns
            ) 
Example #4
Source File: __init__.py    From benchexec with Apache License 2.0 6 votes vote down vote up
def insert_logfile_names(resultFile, resultElem):
    # get folder of logfiles (truncate end of XML file name and append .logfiles instead)
    log_folder = resultFile[0 : resultFile.rfind(".results.")] + ".logfiles/"

    # append begin of filename
    runSetName = resultElem.get("name")
    if runSetName is not None:
        blockname = resultElem.get("block")
        if blockname is None:
            log_folder += runSetName + "."
        elif blockname == runSetName:
            pass  # real runSetName is empty
        else:
            assert runSetName.endswith("." + blockname)
            runSetName = runSetName[: -(1 + len(blockname))]  # remove last chars
            log_folder += runSetName + "."

    # for each file: append original filename and insert log_file_name into sourcefileElement
    for sourcefile in _get_run_tags_from_xml(resultElem):
        if "logfile" in sourcefile.attrib:
            log_file = urllib.parse.urljoin(resultFile, sourcefile.get("logfile"))
        else:
            log_file = log_folder + os.path.basename(sourcefile.get("name")) + ".log"
        sourcefile.set("logfile", log_file) 
Example #5
Source File: aem_hacker.py    From aem-hacker with MIT License 6 votes vote down vote up
def serve(self):
        try:
            token, key, value = self.path.split('/')[1:4]
        except:
            self.send_response(200)
            return

        if self.token != token:
            self.send_response(200)
            return

        if key in self.d:
            self.d[key].append(value)
        else:
            self.d[key] = [value, ]

        self.send_response(200) 
Example #6
Source File: run.py    From seed_rl with Apache License 2.0 6 votes vote down vote up
def main(_):
  tf_config = os.environ.get('TF_CONFIG', None)
  logging.info(tf_config)
  config = json.loads(tf_config)
  job_type = config.get('task', {}).get('type')
  os.environ.update({'PYTHONPATH': '/'})
  executor = concurrent.futures.ThreadPoolExecutor(
      max_workers=FLAGS.actors_per_worker)
  futures = []
  if job_type == 'master':
    futures.append(run_learner(executor, config))
  else:
    assert job_type == 'worker', 'Unexpected task type: {}'.format(job_type)
    for actor_id in range(FLAGS.actors_per_worker):
      futures.append(run_actor(executor, config, actor_id))
  for f in futures:
    f.result() 
Example #7
Source File: foolslide.py    From cum with Apache License 2.0 6 votes vote down vote up
def get_chapters(self, chapter_object):
        """Queries the series details API and creates a chapter object for each
        chapter listed.
        """
        response = requests.get(self.api_hook_details).json()
        chapters = []
        for chapter in response['chapters']:
            if int(chapter['chapter']['subchapter']) > 0:
                chapter_number = '.'.join([chapter['chapter']['chapter'],
                                           chapter['chapter']['subchapter']])
            else:
                chapter_number = chapter['chapter']['chapter']
            kwargs = {
                'name': self.name,
                'alias': self.alias,
                'chapter': chapter_number,
                'api_id': chapter['chapter']['id'],
                'url': chapter['chapter']['href'],
                'title': chapter['chapter']['name'],
                'groups': [team['name'] for team in chapter['teams']]
            }
            chapter = chapter_object(**kwargs)
            chapters.append(chapter)
        return chapters 
Example #8
Source File: towerlib.py    From towerlib with MIT License 6 votes vote down vote up
def _get_paginated_response(self, url, params=None):
        url = self.add_slash(url)
        response_data = self._get_first_page(url, params)
        count = response_data.get('count', 0)
        page_count = int(math.ceil(float(count) / PAGINATION_LIMIT))
        self._logger.debug('Calculated that there are {} pages to get'.format(page_count))
        for result in response_data.get('results', []):
            yield result
        if page_count:
            with concurrent.futures.ThreadPoolExecutor(max_workers=25) as executor:
                futures = []
                if not params:
                    params = {}
                for index in range(page_count, 1, -1):
                    params.update({'page': index})
                    futures.append(executor.submit(self.session.get, url, params=params.copy()))
                for future in concurrent.futures.as_completed(futures):
                    try:
                        response = future.result()
                        response_data = response.json()
                        response.close()
                        for result in response_data.get('results'):
                            yield result
                    except Exception:  # pylint: disable=broad-except
                        self._logger.exception('Future failed...') 
Example #9
Source File: utils.py    From resolwe with Apache License 2.0 6 votes vote down vote up
def paralelize(
    objects: Sequence[Any],
    worker: Callable[[Sequence[Any]], Any],
    max_threads: int = 10,
) -> Sequence[concurrent.futures.Future]:
    """Paralelize tasks using connector on list of URLS.

    URLs are split into up-to num_threads chunks and each chunk is processed
    in its own thread. Connectors in worker method MUST be duplicated to ensure
    thread safety.

    :returns: collection of instance of Future objects, each one corresponding
        to one thread. It is caller responsibility to check if threads have
        finished successfully.
    """
    number_of_chunks = min(len(objects), max_threads)
    objects_chunks = chunks(objects, number_of_chunks)

    futures = []
    with concurrent.futures.ThreadPoolExecutor(max_workers=max_threads) as executor:
        for objects_chunk in objects_chunks:
            futures.append(executor.submit(worker, objects_chunk))
    return futures 
Example #10
Source File: dynastyscans.py    From cum with Apache License 2.0 6 votes vote down vote up
def get_chapters(self):
        chapters = []
        for t in self.json['taggings']:
            if 'permalink' in t and 'title' in t:
                name_parts = re.search(name_re, t['title'])
                if not name_parts:
                    name_parts = re.search(fallback_re, t['title'])
                    chapter = name_parts.group('num')
                elif name_parts.group('type') == 'Special':
                    chapter = 'Special ' + name_parts.group('num')
                else:
                    chapter = name_parts.group('num')
                title = name_parts.group('title')
                url = urljoin('https://dynasty-scans.com/chapters/',
                              t['permalink'])
                c = DynastyScansChapter(name=self.name, alias=self.alias,
                                        chapter=chapter, url=url, title=title)
                chapters.append(c)
        return chapters 
Example #11
Source File: views.py    From promgen with MIT License 5 votes vote down vote up
def get_context_data(self, **kwargs):
        context = super(HostList, self).get_context_data(**kwargs)
        context['host_groups'] = collections.defaultdict(list)
        for host in context['object_list']:
            context['host_groups'][host.name].append(host)
        context['host_groups'] = dict(context['host_groups'])
        return context 
Example #12
Source File: _test_process_executor.py    From loky with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_memory_leak_protection(self):
        self.executor.shutdown(wait=True)

        executor = self.executor_type(1, context=self.context)

        def _leak_some_memory(size=int(3e6), delay=0.001):
            """function that leaks some memory """
            from loky import process_executor
            process_executor._MEMORY_LEAK_CHECK_DELAY = 0.1
            if getattr(os, '_loky_leak', None) is None:
                os._loky_leak = []

            os._loky_leak.append(b"\x00" * size)

            # Leave enough time for the memory leak detector to kick-in:
            # by default the process does not check its memory usage
            # more than once per second.
            time.sleep(delay)

            leaked_size = sum(len(buffer) for buffer in os._loky_leak)
            return os.getpid(), leaked_size

        with pytest.warns(UserWarning, match='memory leak'):
            futures = []
            for i in range(300):
                # Total run time should be 3s which is way over the 1s cooldown
                # period between two consecutive memory checks in the worker.
                futures.append(executor.submit(_leak_some_memory))

            executor.shutdown(wait=True)
            results = [f.result() for f in futures]

            # The pid of the worker has changed when restarting the worker
            first_pid, last_pid = results[0][0], results[-1][0]
            assert first_pid != last_pid

            # The restart happened after 100 MB of leak over the
            # default process size + what has leaked since the last
            # memory check.
            for _, leak_size in results:
                assert leak_size / 1e6 < 650 
Example #13
Source File: _test_process_executor.py    From loky with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_submit_from_callback(self):
        collected = defaultdict(list)
        executor = self.executor

        def _collect_and_submit_next(future):
            name, count = future.result()
            collected[name].append(count)
            if count > 0:
                future = executor.submit(self.return_inputs, name, count - 1)
                future.add_done_callback(_collect_and_submit_next)

        # Start 3 concurrent callbacks chains
        fa = executor.submit(self.return_inputs, 'chain a', 100)
        fa.add_done_callback(_collect_and_submit_next)
        fb = executor.submit(self.return_inputs, 'chain b', 50)
        fb.add_done_callback(_collect_and_submit_next)
        fc = executor.submit(self.return_inputs, 'chain c', 60)
        fc.add_done_callback(_collect_and_submit_next)
        assert fa.result() == ('chain a', 100)
        assert fb.result() == ('chain b', 50)
        assert fc.result() == ('chain c', 60)

        # Wait a maximum of 5s for the asynchronous callback chains to complete
        patience = 500
        while True:
            if (collected['chain a'] == list(range(100, -1, -1)) and
                    collected['chain b'] == list(range(50, -1, -1)) and
                    collected['chain c'] == list(range(60, -1, -1))):
                # the recursive callback chains have completed successfully
                break
            elif patience < 0:
                raise AssertionError("callback submit chains stalled at: %r"
                                     % collected)
            else:
                patience -= 1
                time.sleep(0.01) 
Example #14
Source File: _test_process_executor.py    From loky with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_thread_safety(self):
        # Check that our process-pool executor can be shared to schedule work
        # by concurrent threads
        threads = []
        results = [None] * 10
        for i in range(len(results)):
            threads.append(Thread(target=self._test_thread_safety,
                                  args=(i, results)))
        for t in threads:
            t.start()
        for t in threads:
            t.join()
        for result in results:
            if result != "ok":
                raise AssertionError(result) 
Example #15
Source File: _test_process_executor.py    From loky with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_reference_cycle_collection(self):
        # make the parallel call create a reference cycle and make
        # a weak reference to be able to track the garbage collected objects
        self.executor.shutdown(wait=True)

        executor = self.executor_type(1, context=self.context)

        def _create_cyclic_reference(delay=0.001):
            """function that creates a cyclic reference"""
            from loky import process_executor
            process_executor._USE_PSUTIL = False
            process_executor._MEMORY_LEAK_CHECK_DELAY = 0.1

            class A:
                def __init__(self, size=int(1e6)):
                    self.data = b"\x00" * size
                    self.a = self
            if getattr(os, '_loky_cyclic_weakrefs', None) is None:
                os._loky_cyclic_weakrefs = []

            a = A()
            time.sleep(delay)
            os._loky_cyclic_weakrefs.append(weakref.ref(a))
            return sum(1 for r in os._loky_cyclic_weakrefs if r() is not None)

        futures = []
        for i in range(300):
            # Total run time should be 3s which is way over the 1s cooldown
            # period between two consecutive memory checks in the worker.
            futures.append(executor.submit(_create_cyclic_reference))

        executor.shutdown(wait=True)

        max_active_refs_count = max(f.result() for f in futures)
        assert max_active_refs_count < 150
        assert max_active_refs_count != 1 
Example #16
Source File: run.py    From seed_rl with Apache License 2.0 5 votes vote down vote up
def run_actor(executor, config, actor_id):
  """Runs actor job using executor."""
  master_addr = config.get('cluster').get('master')[0]
  args = [
      'python', get_py_main(),
      '--run_mode=actor',
      '--server_address={}'.format(master_addr),
      '--num_actors={}'.format(FLAGS.workers * FLAGS.actors_per_worker)
  ]
  worker_index = config.get('task').get('index')
  args.append('--task={}'.format(worker_index * FLAGS.actors_per_worker +
                                 actor_id))
  if '--' in sys.argv:
    args.extend(sys.argv[sys.argv.index('--') + 1:])
  return executor.submit(subprocess.check_call, args) 
Example #17
Source File: manage.py    From searchlight with Apache License 2.0 5 votes vote down vote up
def methods_of(obj):
    """Get all callable methods of an object that don't start with underscore

    returns a list of tuples of the form (method_name, method)
    """
    result = []
    for i in dir(obj):
        if callable(getattr(obj, i)) and not i.startswith('_'):
            result.append((i, getattr(obj, i)))
    return result 
Example #18
Source File: manage.py    From searchlight with Apache License 2.0 5 votes vote down vote up
def add_command_parsers(subparsers):
    """Adds any commands and subparsers for their actions. This code's
    from the Glance equivalent.
    """
    for command_name, cls in COMMANDS.items():
        command_object = cls()

        parser = subparsers.add_parser(command_name)
        parser.set_defaults(command_object=command_object)

        command_subparsers = parser.add_subparsers(dest='action')

        for (action, action_fn) in methods_of(command_object):
            parser = command_subparsers.add_parser(action)

            action_kwargs = []
            for args, kwargs in getattr(action_fn, 'args', []):
                if kwargs['dest'].startswith('action_kwarg_'):
                    action_kwargs.append(
                        kwargs['dest'][len('action_kwarg_'):])
                else:
                    action_kwargs.append(kwargs['dest'])
                    kwargs['dest'] = 'action_kwarg_' + kwargs['dest']

                parser.add_argument(*args, **kwargs)

            parser.set_defaults(action_fn=action_fn)
            parser.set_defaults(action_kwargs=action_kwargs)

            parser.add_argument('action_args', nargs='*') 
Example #19
Source File: async.py    From django-gateone with GNU General Public License v3.0 5 votes vote down vote up
def add_task(self, funcs):
        """
        Adds the given *funcs* to this schedule.
        """
        if not isinstance(funcs, list):
            funcs = [funcs] # Make it a list
        self.funcs.append(funcs) 
Example #20
Source File: async.py    From django-gateone with GNU General Public License v3.0 5 votes vote down vote up
def call_singleton(self, function, identifier, *args, **kwargs):
        """
        Executes *function* if no other function with the given *identifier*
        is already running.  If a function is currently running with the given
        *identifier* the passed *function* will be called when the first
        function is complete.

        In other words, functions called via this method will be executed in
        sequence with each function being called after the first is complete.

        The function will be passed any given *args* and *kwargs* just like
        :meth:`AsyncRunner.call`.

        If 'callback' is passed as a keyword argument (*kwargs*) it will be
        called with the result when complete.
        """
        callback = kwargs.pop('callback', None)
        if identifier in ONE_CALLS:
            ONE_CALLS[identifier]['queue'].append(
                (function, args, kwargs, callback))
        else:
            from collections import deque
            future = self.executor.submit(safe_call, function, *args, **kwargs)
            ONE_CALLS[identifier] = {
                'future': future,
                'queue': deque()
            }
            if callback:
                done_callback(
                    ONE_CALLS[identifier]['future'],
                    lambda f: callback(f.result()))
            completed = partial(_call_complete, self, identifier)
            done_callback(ONE_CALLS[identifier]['future'], completed)
        #print 'ONE_CALLS',ONE_CALLS
        #print 'identifier',identifier
        return ONE_CALLS[identifier]['future'] 
Example #21
Source File: async.py    From django-gateone with GNU General Public License v3.0 5 votes vote down vote up
def callback_when_complete(futures, callback):
    """
    Calls *callback* after all *futures* (list) have completed running.
    """
    counter = count(1)
    io_loop = IOLoop.current()
    results = []
    def add_one(f):
        c = counter.next()
        results.append(f.result())
        if c >= len(futures):
            return callback(results)
    for future in futures:
        io_loop.add_future(future, add_one) 
Example #22
Source File: async.py    From django-gateone with GNU General Public License v3.0 5 votes vote down vote up
def append_results(results, function, *args, **kwargs):
    """
    Calls *function* with the given *args* and *kwargs* then appends the result
    to *results* (which must be a list).  If we're not in the main process the
    given *function* will be called using `safe_call`.
    """
    if os.getpid() != PID:
        results.append(safe_call(function, *args, **kwargs))
    else:
        results.append(function(*args, **kwargs)) 
Example #23
Source File: Devo_v2.py    From content with MIT License 5 votes vote down vote up
def alert_to_incident(alert):
    alert_severity = float(1)
    alert_name = alert['context'].split('.')[-1]
    alert_description = None
    alert_occurred = demisto_ISO(float(alert['eventdate']))
    alert_labels = []

    if demisto.get(alert['extraData'], 'alertPriority'):
        alert_severity = SEVERITY_LEVELS_MAP[str(alert['extraData']['alertPriority']).lower()]

    if demisto.get(alert['extraData'], 'alertName'):
        alert_name = alert['extraData']['alertName']

    if demisto.get(alert['extraData'], 'alertDescription'):
        alert_description = alert['extraData']['alertDescription']

    new_alert: Dict = {
        'devo.metadata.alert': {}
    }
    for key in alert:
        if key == 'extraData':
            continue
        new_alert['devo.metadata.alert'][key] = alert[key]
        alert_labels.append({'type': f'devo.metadata.alert.{key}', 'value': str(alert[key])})

    for key in alert['extraData']:
        new_alert[key] = alert['extraData'][key]
        alert_labels.append({'type': f'{key}', 'value': str(alert['extraData'][key])})

    incident = {
        'name': alert_name,
        'severity': alert_severity,
        'details': alert_description,
        'occurred': alert_occurred,
        'labels': alert_labels,
        'rawJSON': json.dumps(new_alert)
    }

    return incident 
Example #24
Source File: get.py    From twint with MIT License 5 votes vote down vote up
def Multi(feed, config, conn):
    logme.debug(__name__+':Multi')
    count = 0
    try:
        with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
            loop = asyncio.get_event_loop()
            futures = []
            for tweet in feed:
                count += 1
                if config.Favorites or config.Profile_full:
                    logme.debug(__name__+':Multi:Favorites-profileFull')
                    link = tweet.find("a")["href"]
                    url = f"https://twitter.com{link}&lang=en"
                elif config.User_full:
                    logme.debug(__name__+':Multi:userFull')
                    username = tweet.find("a")["name"]
                    url = f"http://twitter.com/{username}?lang=en"
                else:
                    logme.debug(__name__+':Multi:else-url')
                    link = tweet.find("a", "tweet-timestamp js-permalink js-nav js-tooltip")["href"]
                    url = f"https://twitter.com{link}?lang=en"

                if config.User_full:
                    logme.debug(__name__+':Multi:user-full-Run')
                    futures.append(loop.run_in_executor(executor, await User(url,
                        config, conn)))
                else:
                    logme.debug(__name__+':Multi:notUser-full-Run')
                    futures.append(loop.run_in_executor(executor, await Tweet(url,
                        config, conn)))
            logme.debug(__name__+':Multi:asyncioGather')
            await asyncio.gather(*futures)
    except Exception as e:
        # TODO: fix error not error
        # print(str(e) + " [x] get.Multi")
        # will return "'NoneType' object is not callable"
        # but still works
        # logme.critical(__name__+':Multi:' + str(e))
        pass

    return count 
Example #25
Source File: cloudasset.py    From forseti-security with Apache License 2.0 5 votes vote down vote up
def _download_cloudasset_data(config, inventory_index_id):
    """Download cloud asset data.

    Args:
        config (InventoryConfig): Inventory config.
        inventory_index_id (int): The inventory index ID for this export.

    Yields:
        str: GCS path of the cloud asset file.
    """
    root_resources = []
    if config.use_composite_root():
        root_resources.extend(config.get_composite_root_resources())
    else:
        root_resources.append(config.get_root_resource_id())
    cloudasset_client = cloudasset.CloudAssetClient(
        config.get_api_quota_configs())
    with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
        futures = []
        for root_id in root_resources:
            for content_type in CONTENT_TYPES:
                futures.append(executor.submit(_export_assets,
                                               cloudasset_client,
                                               config,
                                               root_id,
                                               content_type,
                                               inventory_index_id))

        for future in concurrent.futures.as_completed(futures):
            yield future.result() 
Example #26
Source File: unzipStructure.py    From TCDTIMITprocessing with GNU General Public License v3.0 5 votes vote down vote up
def createZipList(rootDir):
    zipList= []
    for root, dirs, files in os.walk(rootDir):
        for fname in files:
            if ".zip" in fname:
                path = ''.join([root, os.sep, fname])
                #print('Found zip: %s' % path)
                zipList.append(path)
    return zipList

# define func to unzip one file 
Example #27
Source File: cloudasset.py    From forseti-security with Apache License 2.0 5 votes vote down vote up
def _stream_cloudasset_worker(cai_data, engine, output_queue):
    """Worker to stream data from GCS into sqlite temporary table.

    Args:
        cai_data (file): An open file like pipe.
        engine (sqlalchemy.engine.Engine): Database engine to write data to.
        output_queue (collections.deque): A queue storing the results of this
            thread.
    """
    # Codecs transforms the raw byte stream into an iterable of lines.
    cai_iter = codecs.getreader('utf-8')(cai_data)
    rows = cai_temporary_storage.CaiDataAccess.populate_cai_data(
        cai_iter, engine)
    LOGGER.info('%s assets imported to database.', rows)
    output_queue.append(rows) 
Example #28
Source File: cum.py    From cum with Apache License 2.0 5 votes vote down vote up
def update(fast):
    """Gather new chapters from followed series."""
    pool = concurrent.futures.ThreadPoolExecutor(config.get().download_threads)
    futures = []
    warnings = []
    aliases = {}
    query = db.session.query(db.Series).filter_by(following=True).all()
    if fast:
        skip_count = 0
        for series in query.copy():
            if not series.needs_update:
                skip_count += 1
                query.remove(series)
        output.series('Updating {} series ({} skipped)'
                      .format(len(query), skip_count))
    else:
        output.series('Updating {} series'.format(len(query)))
    for follow in query:
        fut = pool.submit(utility.series_by_url, follow.url)
        futures.append(fut)
        aliases[fut] = follow.alias
    with click.progressbar(length=len(futures), show_pos=True,
                           fill_char='>', empty_char=' ') as bar:
        for future in concurrent.futures.as_completed(futures):
            try:
                series = future.result()
            except exceptions.ConnectionError:
                warnings.append('Unable to update {} (connection error)'
                                .format(aliases[future]))
            except exceptions.ScrapingError:
                warnings.append('Unable to update {} (scraping error)'
                                .format(aliases[future]))
            except exceptions.LoginError as e:
                warnings.append('Unable to update {} ({})'
                                .format(aliases[future], e.message))
            else:
                series.update()
            bar.update(1)
    for w in warnings:
        output.warning(w)
    utility.list_new() 
Example #29
Source File: check_https.py    From httpswatch with MIT License 5 votes vote down vote up
def new_check(self):
        c = Check()
        self.checks.append(c)
        return c 
Example #30
Source File: UploadForm.py    From fuxploider with GNU General Public License v3.0 5 votes vote down vote up
def detectValidExtensions(self, extensions, maxN, extList=None):
        """Detect valid extensions for this upload form (sending legit files with legit mime types)."""
        self.logger.info("### Starting detection of valid extensions ...")
        n = 0
        if extList:
            tmpExtList = []
            for e in extList:
                tmpExtList.append((e, getMime(extensions, e)))
        else:
            tmpExtList = extensions
        validExtensions = []  # unused?

        extensionsToTest = tmpExtList[0:maxN]
        with concurrent.futures.ThreadPoolExecutor(max_workers=self.threads) as executor:
            futures = []
            try:
                for ext in extensionsToTest:
                    f = executor.submit(
                        self.uploadFile,
                        "." + ext[0],
                        ext[1],
                        os.urandom(self.size)
                    )
                    f.ext = ext
                    f.add_done_callback(self.detectValidExtension)
                    futures.append(f)
                for future in concurrent.futures.as_completed(futures):
                    a = future.result()
                    n += 1
            except KeyboardInterrupt:
                self.shouldLog = False
                executor.shutdown(wait=False)
                self.stopThreads = True
                executor._threads.clear()
                concurrent.futures.thread._threads_queues.clear()
        return n