Python typing.Collection() Examples

The following are 30 code examples of typing.Collection(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module typing , or try the search function .
Example #1
Source File: service.py    From grpclib with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def extend(cls, services: 'Collection[IServable]') -> 'List[IServable]':
        """
        Extends services list with reflection service:

        .. code-block:: python3

            from grpclib.reflection.service import ServerReflection

            services = [Greeter()]
            services = ServerReflection.extend(services)

            server = Server(services)
            ...

        Returns new services list with reflection support added.
        """
        service_names = []
        for service in services:
            service_names.append(_service_name(service))
        services = list(services)
        services.append(cls(_service_names=service_names))
        services.append(_ServerReflectionV1Alpha(_service_names=service_names))
        return services 
Example #2
Source File: bounce_lib.py    From paasta with Apache License 2.0 6 votes vote down vote up
def crossover_bounce(
    new_config: BounceMethodConfigDict,
    new_app_running: bool,
    happy_new_tasks: Collection,
    old_non_draining_tasks: Sequence,
    margin_factor=1.0,
) -> BounceMethodResult:
    """Starts a new app if necessary; slowly kills old apps as instances of the new app become happy.

    See the docstring for brutal_bounce() for parameters and return value.
    """

    assert margin_factor > 0
    assert margin_factor <= 1

    needed_count = max(
        int(math.ceil(new_config["instances"] * margin_factor)) - len(happy_new_tasks),
        0,
    )

    return {
        "create_app": not new_app_running,
        "tasks_to_drain": set(old_non_draining_tasks[needed_count:]),
    } 
Example #3
Source File: game.py    From dcs_liberation with GNU Lesser General Public License v3.0 6 votes vote down vote up
def pass_turn(self, no_action=False, ignored_cps: typing.Collection[ControlPoint]=None):
        logging.info("Pass turn")
        for event in self.events:
            if self.settings.version == "dev":
                # don't damage player CPs in by skipping in dev mode
                if isinstance(event, UnitsDeliveryEvent):
                    event.skip()
            else:
                event.skip()

        for cp in self.theater.enemy_points():
            self._commision_units(cp)
        self._budget_player()

        if not no_action:
            for cp in self.theater.player_points():
                cp.base.affect_strength(+PLAYER_BASE_STRENGTH_RECOVERY)

        self.ignored_cps = []
        if ignored_cps:
            self.ignored_cps = ignored_cps

        self.events = []  # type: typing.List[Event]
        self._generate_events()
        #self._generate_globalinterceptions() 
Example #4
Source File: structures.py    From pytest with MIT License 6 votes vote down vote up
def param(
        cls,
        *values: object,
        marks: "Union[MarkDecorator, typing.Collection[Union[MarkDecorator, Mark]]]" = (),
        id: Optional[str] = None
    ) -> "ParameterSet":
        if isinstance(marks, MarkDecorator):
            marks = (marks,)
        else:
            # TODO(py36): Change to collections.abc.Collection.
            assert isinstance(marks, (collections.abc.Sequence, set))

        if id is not None:
            if not isinstance(id, str):
                raise TypeError(
                    "Expected id to be a string, got {}: {!r}".format(type(id), id)
                )
            id = ascii_escaped(id)
        return cls(values, marks, id) 
Example #5
Source File: rpc_utils.py    From RLs with Apache License 2.0 6 votes vote down vote up
def _process_vector_observation(
    obs_index: int,
    shape: Tuple[int, ...],
    agent_info_list: Collection[
        AgentInfoProto
    ],  # pylint: disable=unsubscriptable-object
) -> np.ndarray:
    if len(agent_info_list) == 0:
        return np.zeros((0, shape[0]), dtype=np.float32)
    np_obs = np.array(
        [
            agent_obs.observations[obs_index].float_data.data
            for agent_obs in agent_info_list
        ],
        dtype=np.float32,
    )
    _raise_on_nan_and_inf(np_obs, "observations")
    return np_obs 
Example #6
Source File: __init__.py    From pytest with MIT License 6 votes vote down vote up
def param(
    *values: object,
    marks: "Union[MarkDecorator, typing.Collection[Union[MarkDecorator, Mark]]]" = (),
    id: Optional[str] = None
) -> ParameterSet:
    """Specify a parameter in `pytest.mark.parametrize`_ calls or
    :ref:`parametrized fixtures <fixture-parametrize-marks>`.

    .. code-block:: python

        @pytest.mark.parametrize(
            "test_input,expected",
            [("3+5", 8), pytest.param("6*9", 42, marks=pytest.mark.xfail),],
        )
        def test_eval(test_input, expected):
            assert eval(test_input) == expected

    :param values: variable args of the values of the parameter set, in order.
    :keyword marks: a single mark or a list of marks to be applied to this parameter set.
    :keyword str id: the id to attribute to this parameter set.
    """
    return ParameterSet.param(*values, marks=marks, id=id) 
Example #7
Source File: max_likelihood.py    From copulae with MIT License 6 votes vote down vote up
def copula_log_lik(self, param: Union[float, Collection[float]]) -> float:
        """
        Calculates the log likelihood after setting the new parameters (inserted from the optimizer) of the copula

        Parameters
        ----------
        param: ndarray
            Parameters of the copula

        Returns
        -------
        float
            Negative log likelihood of the copula

        """
        try:
            self.copula.params = param
            return -self.copula.log_lik(self.data, to_pobs=False)
        except ValueError:  # error encountered when setting invalid parameters
            return np.inf 
Example #8
Source File: utils.py    From bioconda-utils with MIT License 6 votes vote down vote up
def ellipsize_recipes(recipes: Collection[str], recipe_folder: str,
                      n: int = 5, m: int = 50) -> str:
    """Logging helper showing recipe list

    Args:
      recipes: List of recipes
      recipe_folder: Folder name to strip from recipes.
      n: Show at most this number of recipes, with "..." if more are found.
      m: Don't show anything if more recipes than this
         (pointless to show first 5 of 5000)
    Returns:
      A string like " (htslib, samtools, ...)" or ""
    """
    if not recipes or len(recipes) > m:
        return ""
    if len(recipes) > n:
        if not isinstance(recipes, Sequence):
            recipes = list(recipes)
        recipes = recipes[:n]
        append = ", ..."
    else:
        append = ""
    return ' ('+', '.join(recipe.lstrip(recipe_folder).lstrip('/')
                     for recipe in recipes) + append + ')' 
Example #9
Source File: testing.py    From grpclib with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(
        self,
        services: Collection['IServable'],
        codec: Optional[CodecBase] = None,
        status_details_codec: Optional[StatusDetailsCodecBase] = None,
    ) -> None:
        """
        :param services: list of services you want to test

        :param codec: instance of a codec to encode and decode messages,
            if omitted ``ProtoCodec`` is used by default

        :param status_details_codec: instance of a status details codec to
            encode and decode error details in a trailing metadata, if omitted
            ``ProtoStatusDetailsCodec`` is used by default
        """
        self._services = services
        self._codec = codec
        self._status_details_codec = status_details_codec 
Example #10
Source File: stackdriver_task_handler.py    From airflow with Apache License 2.0 6 votes vote down vote up
def __init__(
        self,
        gcp_key_path: Optional[str] = None,
        # See: https://github.com/PyCQA/pylint/issues/2377
        scopes: Optional[Collection[str]] = _DEFAULT_SCOPESS,  # pylint: disable=unsubscriptable-object
        name: str = DEFAULT_LOGGER_NAME,
        transport: Type[Transport] = BackgroundThreadTransport,
        resource: Resource = _GLOBAL_RESOURCE,
        labels: Optional[Dict[str, str]] = None,
    ):
        super().__init__()
        self.gcp_key_path: Optional[str] = gcp_key_path
        # See: https://github.com/PyCQA/pylint/issues/2377
        self.scopes: Optional[Collection[str]] = scopes  # pylint: disable=unsubscriptable-object
        self.name: str = name
        self.transport_type: Type[Transport] = transport
        self.resource: Resource = resource
        self.labels: Optional[Dict[str, str]] = labels
        self.task_instance_labels: Optional[Dict[str, str]] = {} 
Example #11
Source File: utils.py    From paasta with Apache License 2.0 6 votes vote down vote up
def read_service_instance_names(
    service: str, instance_type: str, cluster: str, soa_dir: str
) -> Collection[Tuple[str, str]]:
    instance_list = []
    conf_file = f"{instance_type}-{cluster}"
    config = service_configuration_lib.read_extra_service_information(
        service, conf_file, soa_dir=soa_dir, deepcopy=False,
    )
    config = filter_templates_from_config(config)
    if instance_type == "tron":
        for job_name, job in config.items():
            action_names = list(job.get("actions", {}).keys())
            for name in action_names:
                instance = f"{job_name}.{name}"
                instance_list.append((service, instance))
    else:
        for instance in config:
            instance_list.append((service, instance))
    return instance_list 
Example #12
Source File: presentation_schema.py    From talk-generator with MIT License 6 votes vote down vote up
def __init__(
        self,
        presentation_schema,
        presentation_context,
        seed_generator: SlideSeedGenerator,
        num_slides: int,
        used_elements: Optional[Collection[Union[str, ImageData]]] = None,
        prohibited_generators: Optional[Collection[SlideGeneratorData]] = None,
        int_seed: Optional[int] = None,
    ):
        self.presentation_schema = presentation_schema
        self.presentation_context = presentation_context
        self.seed_generator: SlideSeedGenerator = seed_generator
        self.num_slides = num_slides
        self.used_elements = used_elements
        self.prohibited_generators = prohibited_generators
        self.int_seed = int_seed 
Example #13
Source File: status.py    From paasta with Apache License 2.0 6 votes vote down vote up
def get_smartstack_status_human(
    registration: str, expected_backends_per_location: int, locations: Collection[Any],
) -> List[str]:
    if len(locations) == 0:
        return [f"Smartstack: ERROR - {registration} is NOT in smartstack at all!"]

    output = ["Smartstack:"]
    output.append(f"  Haproxy Service Name: {registration}")
    output.append(f"  Backends:")
    for location in locations:
        backend_status = haproxy_backend_report(
            expected_backends_per_location, location.running_backends_count
        )
        output.append(f"    {location.name} - {backend_status}")

        if location.backends:
            backends_table = build_smartstack_backends_table(location.backends)
            output.extend([f"      {line}" for line in backends_table])

    return output 
Example #14
Source File: setup_marathon_job.py    From paasta with Apache License 2.0 6 votes vote down vote up
def undrain_tasks(
    to_undrain: Collection[MarathonTask],
    leave_draining: Collection[MarathonTask],
    drain_method: drain_lib.DrainMethod,
    log_deploy_error: LogDeployError,
) -> None:
    # If any tasks on the new app happen to be draining (e.g. someone reverts to an older version with
    # `paasta mark-for-deployment`), then we should undrain them.

    async def undrain_task(task: MarathonTask) -> None:
        if task not in leave_draining:
            if task.state == "TASK_UNREACHABLE":
                return
            try:
                await drain_method.stop_draining(task)
            except Exception:
                log_deploy_error(
                    f"Ignoring exception during stop_draining of task {task.id}: {traceback.format_exc()}"
                )

    if to_undrain:
        a_sync.block(
            asyncio.wait,
            [asyncio.ensure_future(undrain_task(task)) for task in to_undrain],
        ) 
Example #15
Source File: mesos_tools.py    From paasta with Apache License 2.0 5 votes vote down vote up
def filter_not_running_tasks(tasks: Collection[Task]) -> List[Task]:
    """ Filters those tasks where it's state is *not* TASK_RUNNING.
    :param tasks: a list of mesos.cli.Task
    :return filtered: a list of tasks *not* running
    """
    return [task for task in tasks if not is_task_running(task)] 
Example #16
Source File: utils.py    From paasta with Apache License 2.0 5 votes vote down vote up
def mean(iterable: Collection[float]) -> float:
    """
    Returns the average value of an iterable
    """
    return sum(iterable) / len(iterable) 
Example #17
Source File: rpc_utils.py    From RLs with Apache License 2.0 5 votes vote down vote up
def _process_visual_observation(
    obs_index: int,
    shape: Tuple[int, int, int],
    agent_info_list: Collection[
        AgentInfoProto
    ],  # pylint: disable=unsubscriptable-object
) -> np.ndarray:
    if len(agent_info_list) == 0:
        return np.zeros((0, shape[0], shape[1], shape[2]), dtype=np.float32)

    batched_visual = [
        observation_to_np_array(agent_obs.observations[obs_index], shape)
        for agent_obs in agent_info_list
    ]
    return np.array(batched_visual, dtype=np.float32) 
Example #18
Source File: native_scheduler.py    From paasta with Apache License 2.0 5 votes vote down vote up
def group_tasks_by_version(
        self, task_ids: Collection[str]
    ) -> Mapping[str, Collection[str]]:
        d: Dict[str, List[str]] = {}
        for task_id in task_ids:
            version = task_id.rsplit(".", 1)[0]
            d.setdefault(version, []).append(task_id)
        return d 
Example #19
Source File: mesos_tools.py    From paasta with Apache License 2.0 5 votes vote down vote up
def get_all_running_tasks() -> Collection[Task]:
    """ Will include all running tasks; for now orphans are not included
    """
    framework_tasks = await get_current_tasks("")
    mesos_master = get_mesos_master()
    framework_tasks += await mesos_master.orphan_tasks()
    running_tasks = filter_running_tasks(framework_tasks)
    return running_tasks 
Example #20
Source File: mesos_tools.py    From paasta with Apache License 2.0 5 votes vote down vote up
def select_tasks_by_id(tasks: Collection[Task], job_id: str = "") -> List[Task]:
    """Returns a list of the tasks with a given job_id.

    :param tasks: a list of mesos.Task.
    :param job_id: the job id.
    :return tasks: a list of mesos.Task.
    """
    return [task for task in tasks if job_id in task["id"]] 
Example #21
Source File: db.py    From dcs_liberation with GNU Lesser General Public License v3.0 5 votes vote down vote up
def choose_units(for_task: Task, factor: float, count: int, country: str) -> typing.Collection[UnitType]:
    suitable_unittypes = find_unittype(for_task, country)
    suitable_unittypes = [x for x in suitable_unittypes if x not in helicopter_map.values()]
    suitable_unittypes.sort(key=lambda x: PRICES[x])

    idx = int(len(suitable_unittypes) * factor)
    variety = int(count + count * factor / 2)

    index_start = min(idx, len(suitable_unittypes) - variety)
    index_end = min(idx + variety, len(suitable_unittypes))
    return list(set(suitable_unittypes[index_start:index_end])) 
Example #22
Source File: smartstack_tools.py    From paasta with Apache License 2.0 5 votes vote down vote up
def are_services_up_on_ip_port(
    synapse_host: str,
    synapse_port: int,
    synapse_haproxy_url_format: str,
    services: Collection[str],
    host_ip: str,
    host_port: int,
) -> bool:
    backends = get_multiple_backends(
        services,
        synapse_host=synapse_host,
        synapse_port=synapse_port,
        synapse_haproxy_url_format=synapse_haproxy_url_format,
    )
    backends_by_ip_port: DefaultDict[
        Tuple[str, int], List[HaproxyBackend]
    ] = collections.defaultdict(list)

    for backend in backends:
        ip, port, _ = ip_port_hostname_from_svname(backend["svname"])
        backends_by_ip_port[ip, port].append(backend)

    backends_on_ip = backends_by_ip_port[host_ip, host_port]
    # any backend being up is okay because a previous backend
    # may have had the same IP and synapse only removes them completely
    # after some time
    services_with_atleast_one_backend_up = {service: False for service in services}
    for service in services:
        for be in backends_on_ip:
            if be["pxname"] == service and backend_is_up(be):
                services_with_atleast_one_backend_up[service] = True
    return all(services_with_atleast_one_backend_up.values()) 
Example #23
Source File: smartstack_tools.py    From paasta with Apache License 2.0 5 votes vote down vote up
def get_replication_for_services(
    synapse_host: str,
    synapse_port: int,
    synapse_haproxy_url_format: str,
    services: Collection[str],
) -> Dict[str, int]:
    """Returns the replication level for the provided services

    This check is intended to be used with an haproxy load balancer, and
    relies on the implementation details of that choice.

    :param synapse_host: The host that this check should contact for replication information.
    :param synapse_port: The port number that this check should contact for replication information.
    :param synapse_haproxy_url_format: The format of the synapse haproxy URL.
    :param services: A list of strings that are the service names
                          that should be checked for replication.

    :returns available_instance_counts: A dictionary mapping the service names
                                  to an integer number of available
                                  replicas
    :returns None: If it cannot connect to the specified synapse host and port
    """
    backends = get_multiple_backends(
        services=services,
        synapse_host=synapse_host,
        synapse_port=synapse_port,
        synapse_haproxy_url_format=synapse_haproxy_url_format,
    )

    counter = collections.Counter([b["pxname"] for b in backends if backend_is_up(b)])
    return {sn: counter[sn] for sn in services} 
Example #24
Source File: bounce_lib.py    From paasta with Apache License 2.0 5 votes vote down vote up
def upthendown_bounce(
    new_config: BounceMethodConfigDict,
    new_app_running: bool,
    happy_new_tasks: Collection,
    old_non_draining_tasks: Sequence,
    margin_factor=1.0,
) -> BounceMethodResult:
    """Starts a new app if necessary; only kills old apps once all the requested tasks for the new version are running.

    See the docstring for brutal_bounce() for parameters and return value.
    """
    if new_app_running and len(happy_new_tasks) == new_config["instances"]:
        return {"create_app": False, "tasks_to_drain": set(old_non_draining_tasks)}
    else:
        return {"create_app": not new_app_running, "tasks_to_drain": set()} 
Example #25
Source File: slide_topic_generators.py    From talk-generator with MIT License 5 votes vote down vote up
def __init__(self, topics: Collection[str], _):
        self._topics = topics 
Example #26
Source File: content_generators.py    From talk-generator with MIT License 5 votes vote down vote up
def copyright_free_prefixed_generator_from_word(prefixes: Union[str, Collection[str]]):
    if isinstance(prefixes, str):
        return PrefixedGenerator(prefixes, copyright_free_generator_from_word)
    generators = [
        (1, PrefixedGenerator(p, copyright_free_generator_from_word)) for p in prefixes
    ]
    return CombinedGenerator(*generators) 
Example #27
Source File: content_generators.py    From talk-generator with MIT License 5 votes vote down vote up
def copyright_free_prefixed_generator(prefixes: Union[str, Collection[str]]):
    return SeededGenerator(copyright_free_prefixed_generator_from_word(prefixes)) 
Example #28
Source File: slide_generator_data.py    From talk-generator with MIT License 5 votes vote down vote up
def fix_indices(values: Collection[int], num_slides: int):
    return [value % num_slides if value < 0 else value for value in values] 
Example #29
Source File: mentions.py    From fonduer with MIT License 5 votes vote down vote up
def __init__(
        self, n_min: int = 1, n_max: int = 5, split_tokens: Collection[str] = []
    ) -> None:
        """Initialize MentionNgrams."""
        Ngrams.__init__(self, n_min=n_min, n_max=n_max, split_tokens=split_tokens) 
Example #30
Source File: mentions.py    From fonduer with MIT License 5 votes vote down vote up
def __init__(
        self, n_min: int = 1, n_max: int = 5, split_tokens: Collection[str] = []
    ) -> None:
        """Initialize Ngrams."""
        MentionSpace.__init__(self)
        self.n_min = n_min
        self.n_max = n_max
        self.split_rgx = (
            r"(" + r"|".join(map(re.escape, sorted(split_tokens, reverse=True))) + r")"
            if split_tokens and len(split_tokens) > 0
            else None
        )