Python click.progressbar() Examples

The following are 30 code examples of click.progressbar(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module click , or try the search function .
Example #1
Source File: cmd_delete.py    From python-alerta-client with Apache License 2.0 6 votes vote down vote up
def cli(obj, ids, query, filters):
    """Delete alerts."""
    client = obj['client']
    if ids:
        total = len(ids)
    else:
        if not (query or filters):
            click.confirm('Deleting all alerts. Do you want to continue?', abort=True)
        if query:
            query = [('q', query)]
        else:
            query = build_query(filters)
        total, _, _ = client.get_count(query)
        ids = [a.id for a in client.get_alerts(query)]

    with click.progressbar(ids, label='Deleting {} alerts'.format(total)) as bar:
        for id in bar:
            client.delete_alert(id) 
Example #2
Source File: rdfgen.py    From ontobio with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def convert(association, ontology, output, association_file):
    click.echo("converting {}".format(association))

    rdfWriter = assoc_rdfgen.TurtleRdfWriter(label=os.path.basename(output.name))
    rdfTransformer = assoc_rdfgen.CamRdfTransform(writer=rdfWriter)
    parser_config = assocparser.AssocParserConfig(ontology=make_ontology(ontology))
    parser = _association_parser(association, parser_config)

    with open(association_file) as af:
        lines = sum(1 for line in af)

    with open(association_file) as af:
        associations = parser.association_generator(file=af)
        with click.progressbar(iterable=associations, length=lines) as assocs:
            for assoc in assocs:
                rdfTransformer.provenance()
                rdfTransformer.translate(assoc)

        click.echo("Writing ttl to disk")
        rdfWriter.serialize(destination=output) 
Example #3
Source File: validate.py    From ontobio with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def produce_gpi(dataset, target_dir, gaf_path, ontology_graph):
    gafparser = GafParser()
    gafparser.config = assocparser.AssocParserConfig(
        ontology=ontology_graph
    )
    with open(gaf_path) as sg:
        lines = sum(1 for line in sg)

    gpi_path = os.path.join(os.path.split(gaf_path)[0], "{}.gpi".format(dataset))
    with open(gaf_path) as gf, open(gpi_path, "w") as gpi:
        click.echo("Using {} as the gaf to build gpi with".format(gaf_path))
        bridge = gafgpibridge.GafGpiBridge()
        gpiwriter = entitywriter.GpiWriter(file=gpi)
        gpi_cache = set()

        with click.progressbar(iterable=gafparser.association_generator(file=gf), length=lines) as associations:
            for association in associations:
                entity = bridge.convert_association(association)
                if entity not in gpi_cache and entity is not None:
                    # If the entity is not in the cache, add it and write it out
                    gpi_cache.add(entity)
                    gpiwriter.write_entity(entity)

    return gpi_path 
Example #4
Source File: base.py    From cum with Apache License 2.0 6 votes vote down vote up
def progress_bar(self, arg):
        """Returns a pre-configured Click progress bar to use with downloads.
        If chapter uses separate page downloads, page download progress is
        shown (e.g. '7/20').
        """
        if self.uses_pages:
            iterable = arg
            length = None
        else:
            iterable = None
            length = arg

        click.echo('{c.alias} {c.chapter}'.format(c=self))
        return click.progressbar(iterable=iterable, length=length,
                                 fill_char='>', empty_char=' ',
                                 show_pos=self.uses_pages, show_percent=True) 
Example #5
Source File: __init__.py    From openag_python with GNU General Public License v3.0 6 votes vote down vote up
def load_fixture(fixture_file):
    """
    Populate the database from a JSON file. Reads the JSON file FIXTURE_FILE
    and uses it to populate the database. Fuxture files should consist of a
    dictionary mapping database names to arrays of objects to store in those
    databases.
    """
    utils.check_for_local_server()
    local_url = config["local_server"]["url"]
    server = Server(local_url)
    fixture = json.load(fixture_file)
    for db_name, _items in fixture.items():
        db = server[db_name]
        with click.progressbar(
            _items, label=db_name, length=len(_items)
        ) as items:
            for item in items:
                item_id = item["_id"]
                if item_id in db:
                    old_item = db[item_id]
                    item["_rev"] = old_item["_rev"]
                    if item == old_item:
                        continue
                db[item_id] = item 
Example #6
Source File: cmd_update.py    From python-alerta-client with Apache License 2.0 6 votes vote down vote up
def cli(obj, ids, query, filters, attributes):
    """Update alert attributes."""
    client = obj['client']
    if ids:
        total = len(ids)
    else:
        if query:
            query = [('q', query)]
        else:
            query = build_query(filters)
        total, _, _ = client.get_count(query)
        ids = [a.id for a in client.get_alerts(query)]

    with click.progressbar(ids, label='Updating {} alerts'.format(total)) as bar:
        for id in bar:
            client.update_attributes(id, dict(a.split('=') for a in attributes)) 
Example #7
Source File: cmd_tag.py    From python-alerta-client with Apache License 2.0 6 votes vote down vote up
def cli(obj, ids, query, filters, tags):
    """Add tags to alerts."""
    client = obj['client']
    if ids:
        total = len(ids)
    else:
        if query:
            query = [('q', query)]
        else:
            query = build_query(filters)
        total, _, _ = client.get_count(query)
        ids = [a.id for a in client.get_alerts(query)]

    with click.progressbar(ids, label='Tagging {} alerts'.format(total)) as bar:
        for id in bar:
            client.tag_alert(id, tags) 
Example #8
Source File: downloader.py    From web2board with GNU Lesser General Public License v3.0 6 votes vote down vote up
def start(self):
        itercontent = self._request.iter_content(chunk_size=self.CHUNK_SIZE)
        f = open(self._destination, "wb")
        chunks = int(ceil(self.get_size() / float(self.CHUNK_SIZE)))

        if util.is_ci():
            click.echo("Downloading...")
            for _ in range(0, chunks):
                f.write(next(itercontent))
        else:
            with click.progressbar(length=chunks, label="Downloading") as pb:
                for _ in pb:
                    f.write(next(itercontent))
        f.close()
        self._request.close()

        self._preserve_filemtime(self.get_lmtime()) 
Example #9
Source File: cmd_note.py    From python-alerta-client with Apache License 2.0 6 votes vote down vote up
def cli(obj, ids, alert_ids, query, filters, text, delete):
    """Add or delete note to alerts."""
    client = obj['client']
    if delete:
        client.delete_alert_note(*delete)
    else:
        if alert_ids:
            total = len(alert_ids)
        else:
            if query:
                query = [('q', query)]
            else:
                query = build_query(filters)
            total, _, _ = client.get_count(query)
            alert_ids = [a.id for a in client.get_alerts(query)]

        with click.progressbar(alert_ids, label='Add note to {} alerts'.format(total)) as bar:
            for id in bar:
                client.alert_note(id, text=text) 
Example #10
Source File: cmd_untag.py    From python-alerta-client with Apache License 2.0 6 votes vote down vote up
def cli(obj, ids, query, filters, tags):
    """Remove tags from alerts."""
    client = obj['client']
    if ids:
        total = len(ids)
    else:
        if query:
            query = [('q', query)]
        else:
            query = build_query(filters)
        total, _, _ = client.get_count(query)
        ids = [a.id for a in client.get_alerts(query)]

    with click.progressbar(ids, label='Untagging {} alerts'.format(total)) as bar:
        for id in bar:
            client.untag_alert(id, tags) 
Example #11
Source File: synchronizers.py    From pdm with MIT License 6 votes vote down vote up
def progressbar(self, label: str, total: int):
        bar = progressbar(
            length=total,
            fill_char=stream.green(self.BAR_FILLED_CHAR),
            empty_char=self.BAR_EMPTY_CHAR,
            show_percent=False,
            show_pos=True,
            label=label,
            bar_template="%(label)s %(bar)s %(info)s",
        )
        if self.parallel:
            executor = ThreadPoolExecutor()
        else:
            executor = DummyExecutor()
        with executor:
            try:
                yield bar, executor
            except KeyboardInterrupt:
                pass 
Example #12
Source File: cli.py    From invenio-app-ils with MIT License 6 votes vote down vote up
def index_opendefinition(loader, path, whitelist_status, force):
    """Index JSON-based vocabularies in Elasticsearch."""
    if not force:
        click.confirm(
            "Are you sure you want to index the vocabularies?",
            abort=True
        )
    index_count = 0
    click.echo('indexing licenses from loader {} and path {}...'.format(
        loader,
        path
    ))
    if whitelist_status:
        whitelist_status = whitelist_status.split(",")
    vocabularies = load_vocabularies(
        "opendefinition", loader, path, whitelist_status
    )
    cfg = current_app.config["RECORDS_REST_ENDPOINTS"][VOCABULARY_PID_TYPE]
    indexer = cfg["indexer_class"]()
    with click.progressbar(vocabularies) as bar:
        for vocabulary in bar:
            indexer.index(vocabulary)
    index_count += len(vocabularies)
    click.echo('indexed {} licenses'.format(index_count)) 
Example #13
Source File: cli.py    From invenio-app-ils with MIT License 6 votes vote down vote up
def index_json(filenames, force):
    """Index JSON-based vocabularies in Elasticsearch."""
    if not force:
        click.confirm(
            "Are you sure you want to index the vocabularies?",
            abort=True
        )
    source = "json"
    index_count = 0
    for filename in filenames:
        click.echo('indexing vocabularies in {}...'.format(filename))
        vocabularies = load_vocabularies(source, filename)
        cfg = current_app.config["RECORDS_REST_ENDPOINTS"][VOCABULARY_PID_TYPE]
        indexer = cfg["indexer_class"]()
        with click.progressbar(vocabularies) as bar:
            for vocabulary in bar:
                indexer.index(vocabulary)
        index_count += len(vocabularies)
    click.echo('indexed {} vocabularies'.format(index_count)) 
Example #14
Source File: bruteforce_default_credentials.py    From aemscan with MIT License 6 votes vote down vote up
def run(url):
    response = requests.get(url + '/', auth=('baseline', 'request'))
    if response.status_code != 401:

        click.echo(click.style('AEM authentication is not available', fg='red'))
        return

    click.echo(click.style('AEM authentication is available', fg='green'))
    with open(os.path.dirname(__file__) + '/../data/aem-default-creds.txt', 'r') as f:
        creds = map(string.strip, f.readlines())
        found = []
        with click.progressbar(creds, label='Checking default credentials') as bar:
            for line in bar:
                (login, password) = line.split(':')
                response = requests.post(url + '/', auth=(login, password))
                if response.status_code == 200:
                    found.append(line)
    if found:
        click.echo(click.style('Found {} default credentials!'.format(len(found)), fg='green'))
        for item in found:
            click.echo(click.style('{}'.format(item), fg='green')) 
Example #15
Source File: migrate.py    From indico-plugins with MIT License 6 votes vote down vote up
def run(self):
        models = {model: self.make_query(model).count() for model in StoredFileMixin.__subclasses__()}
        models = {model: total for model, total in models.iteritems() if total}
        labels = {model: cformat('Processing %{blue!}{}%{reset} (%{cyan}{}%{reset} rows)').format(model.__name__, total)
                  for model, total in models.iteritems()}
        max_length = max(len(x) for x in labels.itervalues())
        labels = {model: label.ljust(max_length) for model, label in labels.iteritems()}
        for model, total in sorted(models.items(), key=itemgetter(1)):
            with click.progressbar(self.query_chunked(model, 1000), length=total, label=labels[model],
                                   show_percent=True, show_pos=True) as objects:
                for obj in self.flush_rclone_iterator(objects, 1000):
                    try:
                        self.process_obj(obj)
                    except Exception as exc:
                        click.echo(cformat('\n%{red!}Error processing %{reset}%{yellow}{}%{red!}: %{reset}%{yellow!}{}')
                                   .format(obj, exc))
        click.secho('All done!', fg='green')
        click.echo('Add the following entries to your STORAGE_BACKENDS:')
        for bucket, data in sorted(self.buckets.viewitems(), key=itemgetter(0)):
            click.echo("'{}': 's3-readonly:host={},bucket={}',".format(
                data['backend'], self.s3_endpoint.replace('https://', ''), bucket)) 
Example #16
Source File: utils.py    From python-alerta-client with Apache License 2.0 5 votes vote down vote up
def action_progressbar(client, action, ids, label, text=None, timeout=None):
    skipped = 0

    def show_skipped(id):
        if not id and skipped:
            return '(skipped {})'.format(skipped)

    with click.progressbar(ids, label=label, show_eta=True, item_show_func=show_skipped) as bar:
        for id in bar:
            try:
                client.action(id, action=action, text=text, timeout=timeout)
            except Exception:
                skipped += 1 
Example #17
Source File: tohinz_wrapper.py    From EvadeML-Zoo with MIT License 5 votes vote down vote up
def generate_carlini_li_examples(sess, model, x, y, X, Y, attack_params, verbose, attack_log_fpath):
    model_wrapper = wrap_to_tohinz_model(model, X, Y)

    if 'batch_size' in attack_params:
        batch_size = attack_params['batch_size']
        del attack_params['batch_size']
    else:
        batch_size= 10

    accepted_params = ['targeted', 'learning_rate', 'max_iterations', 'abort_early', 'initial_const', 'largest_const', 'reduce_const', 'decrease_factor', 'const_factor', 'confidence']
    for k in attack_params:
        if k not in accepted_params:
            raise NotImplementedError("Unsuporrted params in Carlini Li: %s" % k)

    attack = CarliniLi(sess, model_wrapper, **attack_params)
    
    X_adv_list = []

    with click.progressbar(range(0, len(X)), file=sys.stderr, show_pos=True, 
                           width=40, bar_template='  [%(bar)s] Carlini Li Attacking %(info)s', 
                           fill_char='>', empty_char='-') as bar:
        for i in bar:
            if i % batch_size == 0:
                X_sub = X[i:min(i+batch_size, len(X)),:]
                Y_sub = Y[i:min(i+batch_size, len(X)),:]
                if not verbose:
                    disablePrint(attack_log_fpath)
                X_adv_sub = attack.attack(X_sub - 0.5, Y_sub) + 0.5
                if not verbose:
                    enablePrint()
                X_adv_list.append(X_adv_sub)

    X_adv = np.vstack(X_adv_list)
    return X_adv 
Example #18
Source File: label.py    From redisgraph-bulk-loader with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def process_entities(self):
        entities_created = 0
        with click.progressbar(self.reader, length=self.entities_count, label=self.entity_str) as reader:
            for row in reader:
                self.validate_row(row)

                # Update the node identifier dictionary if necessary
                if self.config.store_node_identifiers:
                    id_field = row[self.id]
                    if self.id_namespace is not None:
                        id_field = self.id_namespace + '.' + str(id_field)
                    self.update_node_dictionary(id_field)

                row_binary = self.pack_props(row)
                row_binary_len = len(row_binary)
                # If the addition of this entity will make the binary token grow too large,
                # send the buffer now.
                # TODO how much of this can be made uniform w/ relations and moved to Querybuffer?
                if self.binary_size + row_binary_len > self.config.max_token_size:
                    self.query_buffer.labels.append(self.to_binary())
                    self.query_buffer.send_buffer()
                    self.reset_partial_binary()
                    # Push the label onto the query buffer again, as there are more entities to process.
                    self.query_buffer.labels.append(self.to_binary())

                self.query_buffer.node_count += 1
                entities_created += 1
                self.binary_size += row_binary_len
                self.binary_entities.append(row_binary)
            self.query_buffer.labels.append(self.to_binary())
        self.infile.close()
        print("%d nodes created with label '%s'" % (entities_created, self.entity_str)) 
Example #19
Source File: relation_type.py    From redisgraph-bulk-loader with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def process_entities(self):
        entities_created = 0
        with click.progressbar(self.reader, length=self.entities_count, label=self.entity_str) as reader:
            for row in reader:
                self.validate_row(row)
                try:
                    start_id = row[self.start_id]
                    if self.start_namespace:
                        start_id = self.start_namespace + '.' + str(start_id)
                    end_id = row[self.end_id]
                    if self.end_namespace:
                        end_id = self.end_namespace + '.' + str(end_id)

                    src = self.query_buffer.nodes[start_id]
                    dest = self.query_buffer.nodes[end_id]
                except KeyError as e:
                    print("Relationship specified a non-existent identifier. src: %s; dest: %s" % (row[self.start_id], row[self.end_id]))
                    if self.config.skip_invalid_edges is False:
                        raise e
                    continue
                fmt = "=QQ" # 8-byte unsigned ints for src and dest
                row_binary = struct.pack(fmt, src, dest) + self.pack_props(row)
                row_binary_len = len(row_binary)
                # If the addition of this entity will make the binary token grow too large,
                # send the buffer now.
                if self.binary_size + row_binary_len > self.config.max_token_size:
                    self.query_buffer.reltypes.append(self.to_binary())
                    self.query_buffer.send_buffer()
                    self.reset_partial_binary()
                    # Push the reltype onto the query buffer again, as there are more entities to process.
                    self.query_buffer.reltypes.append(self.to_binary())

                self.query_buffer.relation_count += 1
                entities_created += 1
                self.binary_size += row_binary_len
                self.binary_entities.append(row_binary)
            self.query_buffer.reltypes.append(self.to_binary())
        self.infile.close()
        print("%d relations created for type '%s'" % (entities_created, self.entity_str)) 
Example #20
Source File: __init__.py    From elasticsearch_loader with MIT License 5 votes vote down vote up
def load(lines, config):
    bulks = grouper(lines, config['bulk_size'] * 3)
    if config['progress']:
        bulks = [x for x in bulks]
    with click.progressbar(bulks) as pbar:
        for i, bulk in enumerate(pbar):
            try:
                single_bulk_to_es(bulk, config, config['with_retry'])
            except Exception as e:
                log('warn', 'Chunk {i} got exception ({e}) while processing'.format(e=e, i=i))
                raise 
Example #21
Source File: us_equity_pricing.py    From zipline-chinese with Apache License 2.0 5 votes vote down vote up
def write(self, filename, calendar, assets, show_progress=False):
        """
        Parameters
        ----------
        filename : str
            The location at which we should write our output.
        calendar : pandas.DatetimeIndex
            Calendar to use to compute asset calendar offsets.
        assets : pandas.Int64Index
            The assets for which to write data.
        show_progress : bool
            Whether or not to show a progress bar while writing.

        Returns
        -------
        table : bcolz.ctable
            The newly-written table.
        """
        _iterator = self.gen_tables(assets)
        if show_progress:
            pbar = progressbar(
                _iterator,
                length=len(assets),
                item_show_func=lambda i: i if i is None else str(i[0]),
                label="Merging asset files:",
            )
            with pbar as pbar_iterator:
                return self._write_internal(filename, calendar, pbar_iterator)
        return self._write_internal(filename, calendar, _iterator) 
Example #22
Source File: carlini_wrapper.py    From EvadeML-Zoo with MIT License 5 votes vote down vote up
def generate_carlini_li_examples(sess, model, x, y, X, Y, attack_params, verbose, attack_log_fpath):
    model_wrapper = wrap_to_carlini_model(model, X, Y)

    if 'batch_size' in attack_params:
        batch_size = attack_params['batch_size']
        del attack_params['batch_size']
    else:
        batch_size= 10

    accepted_params = ['targeted', 'learning_rate', 'max_iterations', 'abort_early', 'initial_const', 'largest_const', 'reduce_const', 'decrease_factor', 'const_factor', 'confidence']
    for k in attack_params:
        if k not in accepted_params:
            raise NotImplementedError("Unsuporrted params in Carlini Li: %s" % k)

    attack = CarliniLi(sess, model_wrapper, **attack_params)
    
    X_adv_list = []

    with click.progressbar(range(0, len(X)), file=sys.stderr, show_pos=True, 
                           width=40, bar_template='  [%(bar)s] Carlini Li Attacking %(info)s', 
                           fill_char='>', empty_char='-') as bar:
        for i in bar:
            if i % batch_size == 0:
                X_sub = X[i:min(i+batch_size, len(X)),:]
                Y_sub = Y[i:min(i+batch_size, len(X)),:]
                if not verbose:
                    disablePrint(attack_log_fpath)
                X_adv_sub = attack.attack(X_sub - 0.5, Y_sub) + 0.5
                if not verbose:
                    enablePrint()
                X_adv_list.append(X_adv_sub)

    X_adv = np.vstack(X_adv_list)
    return X_adv 
Example #23
Source File: checkpoint_downloader.py    From luminoth with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def download_checkpoint(network, network_filename, checkpoint_path,
                        checkpoint_filename):
    tarball_filename = BASE_NETWORK_FILENAMES[network]
    url = TENSORFLOW_OFFICIAL_ENDPOINT + tarball_filename
    response = requests.get(url, stream=True)
    total_size = int(response.headers.get('Content-Length'))
    tarball_path = os.path.join(checkpoint_path, tarball_filename)
    tmp_tarball = tf.gfile.Open(tarball_path, 'wb')
    tf.logging.info('Downloading {} checkpoint.'.format(network_filename))
    with click.progressbar(length=total_size) as bar:
        for data in response.iter_content(chunk_size=4096):
            tmp_tarball.write(data)
            bar.update(len(data))
    tmp_tarball.flush()

    tf.logging.info('Saving checkpoint to {}'.format(checkpoint_path))
    # Open saved tarball as readable binary
    tmp_tarball = tf.gfile.Open(tarball_path, 'rb')
    # Open tarfile object
    tar_obj = tarfile.open(fileobj=tmp_tarball)
    # Get checkpoint file name
    checkpoint_file_name = tar_obj.getnames()[0]
    # Create buffer with extracted network checkpoint
    checkpoint_fp = tar_obj.extractfile(checkpoint_file_name)
    # Define where to save.
    checkpoint_file = tf.gfile.Open(checkpoint_filename, 'wb')
    # Write extracted checkpoint to file
    checkpoint_file.write(checkpoint_fp.read())
    checkpoint_file.flush()
    checkpoint_file.close()
    tmp_tarball.close()
    # Remove temp tarball
    tf.gfile.Remove(tarball_path) 
Example #24
Source File: cleverhans_wrapper.py    From EvadeML-Zoo with MIT License 5 votes vote down vote up
def generate_jsma_examples(sess, model, x, y, X, Y, attack_params, verbose, attack_log_fpath):
    """
    Targeted attack, with target classes in Y.
    """
    Y_target = Y

    nb_classes = Y.shape[1]

    jsma = SaliencyMapMethod(model, back='tf', sess=sess)
    jsma_params = {'theta': 1., 'gamma': 0.1,
                   'nb_classes': nb_classes, 'clip_min': 0.,
                   'clip_max': 1., 'targets': y,
                   'y_val': None}
    jsma_params = override_params(jsma_params, attack_params)

    adv_x_list = []

    with click.progressbar(range(0, len(X)), file=sys.stderr, show_pos=True, 
                           width=40, bar_template='  [%(bar)s] JSMA Attacking %(info)s', 
                           fill_char='>', empty_char='-') as bar:
        # Loop over the samples we want to perturb into adversarial examples
        for sample_ind in bar:
            sample = X[sample_ind:(sample_ind+1)]

            jsma_params['y_val'] = Y_target[[sample_ind],]
            adv_x = jsma.generate_np(sample, **jsma_params)
            adv_x_list.append(adv_x)

    return np.vstack(adv_x_list) 
Example #25
Source File: deepfool_wrapper.py    From EvadeML-Zoo with MIT License 5 votes vote down vote up
def generate_deepfool_examples(sess, model, x, y, X, Y, attack_params, verbose, attack_log_fpath):
    """
    Untargeted attack. Y is not needed.
    """

    # TODO: insert a uint8 filter to f.
    f, grad_fs = prepare_attack(sess, model, x, y, X, Y)

    params = {'num_classes': 10, 'overshoot': 0.02, 'max_iter': 50}
    params = override_params(params, attack_params)

    adv_x_list = []
    aux_info = {}
    aux_info['r_tot'] = []
    aux_info['loop_i'] = []
    aux_info['k_i'] = []

    with click.progressbar(range(0, len(X)), file=sys.stderr, show_pos=True,
                           width=40, bar_template='  [%(bar)s] DeepFool Attacking %(info)s',
                           fill_char='>', empty_char='-') as bar:
        # Loop over the samples we want to perturb into adversarial examples
        for i in bar:
            image = X[i:i+1,:,:,:]

            if not verbose:
                disablePrint(attack_log_fpath)

            r_tot, loop_i, k_i, pert_image = deepfool(image, f, grad_fs, **params)

            if not verbose:
                enablePrint()

            adv_x_list.append(pert_image)

            aux_info['r_tot'].append(r_tot)
            aux_info['loop_i'].append(loop_i)
            aux_info['k_i'].append(k_i)

    return np.vstack(adv_x_list), aux_info 
Example #26
Source File: tohinz_wrapper.py    From EvadeML-Zoo with MIT License 5 votes vote down vote up
def generate_carlini_l0_examples(sess, model, x, y, X, Y, attack_params, verbose, attack_log_fpath):
    model_wrapper = wrap_to_tohinz_model(model, X, Y)

    if 'batch_size' in attack_params:
        batch_size = attack_params['batch_size']
        del attack_params['batch_size']
    else:
        batch_size= 10

    accepted_params = ['targeted', 'learning_rate', 'max_iterations', 'abort_early', 'initial_const', 'largest_const', 'reduce_const', 'decrease_factor', 'const_factor', 'independent_channels', 'confidence']
    for k in attack_params:
        if k not in accepted_params:
            raise NotImplementedError("Unsuporrted params in Carlini L0: %s" % k)

    attack = CarliniL0(sess, model_wrapper, **attack_params)

    X_adv_list = []

    with click.progressbar(range(0, len(X)), file=sys.stderr, show_pos=True, 
                           width=40, bar_template='  [%(bar)s] Carlini L0 Attacking %(info)s', 
                           fill_char='>', empty_char='-') as bar:
        for i in bar:
            if i % batch_size == 0:
                X_sub = X[i:min(i+batch_size, len(X)),:]
                Y_sub = Y[i:min(i+batch_size, len(X)),:]
                if not verbose:
                    disablePrint(attack_log_fpath)
                X_adv_sub = attack.attack(X_sub - 0.5, Y_sub) + 0.5
                if not verbose:
                    enablePrint()
                X_adv_list.append(X_adv_sub)

    X_adv = np.vstack(X_adv_list)
    return X_adv 
Example #27
Source File: __init__.py    From InplusTrader_Linux with MIT License 5 votes vote down vote up
def _init(self):
        trading_length = len(self._env.config.base.trading_calendar)
        self.progress_bar = click.progressbar(length=trading_length, show_eta=False) 
Example #28
Source File: cli.py    From hangar-py with Apache License 2.0 5 votes vote down vote up
def import_data(ctx, repo: Repository, column, path, branch, plugin, overwrite):
    """Import file or directory of files at PATH to COLUMN in the staging area.

    If passing in a directory, all files in the directory will be imported, if
    passing in a file, just that files specified will be imported.
    """
    # TODO: ignore warning through env variable
    from types import GeneratorType
    from hangar import external
    from hangar.records.heads import get_staging_branch_head

    kwargs = parse_custom_arguments(ctx.args)
    if branch is None:
        branch = get_staging_branch_head(repo._env.branchenv)
    elif branch not in repo.list_branches():
        raise click.ClickException(f'Branch name: {branch} does not exist, Exiting.')
    click.echo(f'Writing to branch: {branch}')

    co = repo.checkout(write=True, branch=branch)
    try:
        active_aset = co.columns.get(column)
        p = Path(path)
        files = [f.resolve() for f in p.iterdir()] if p.is_dir() else [p.resolve()]
        with active_aset as aset, click.progressbar(files) as filesBar:
            for f in filesBar:
                ext = ''.join(f.suffixes).strip('.')  # multi-suffix files (tar.bz2)
                loaded = external.load(f, plugin=plugin, extension=ext, **kwargs)
                if not isinstance(loaded, GeneratorType):
                    loaded = [loaded]
                for arr, fname in loaded:
                    if (not overwrite) and (fname in aset):
                        continue
                    try:
                        aset[fname] = arr
                    except ValueError as e:
                        click.echo(e)
    except (ValueError, KeyError) as e:
        raise click.ClickException(e)
    finally:
        co.close() 
Example #29
Source File: log.py    From kraken with Apache License 2.0 5 votes vote down vote up
def progressbar(*args, **kwargs):
    """
    Slight extension to click's progressbar disabling output on when log level
    is set below 30.
    """
    import logging
    logger = logging.getLogger(__name__)
    bar = click.progressbar(*args, **kwargs)
    if logger.getEffectiveLevel() < 30:
        bar.is_hidden = True  # type: ignore
    return bar 
Example #30
Source File: scan_useful_paths.py    From aemscan with MIT License 5 votes vote down vote up
def run(url):
    with open(os.path.dirname(__file__) + '/../data/aem-paths.txt', 'r') as f:
        paths = map(string.strip, f.readlines())
        found = []
        with click.progressbar(paths, label='Scanning useful paths') as bar:
            for path in bar:
                response = requests.head(url + path)
                if response.status_code == 200:
                    found.append(path)
    if found:
        click.echo(click.style('Found {} paths:'.format(len(found)), fg='green'))
        for item in found:
            click.echo(click.style('{}'.format(item), fg='green'))